Skip to content

Commit 0b34e5d

Browse files
authored
Merge pull request #6 from cfhamlet/develop
add: choice reactor
2 parents 236b63c + a7860f5 commit 0b34e5d

File tree

3 files changed

+55
-5
lines changed

3 files changed

+55
-5
lines changed

README.md

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,34 @@ os-scrapy -h
3131

3232
### CRAWLER_CLASS
3333

34-
os-scrapy support config ``CRAWLER_CLASS`` replace the default ``scrapy.crawler.Crawler``. It can be configured in ``settings.py`` file or ``os-scrapy crawl -c <your_crawler_class> <spider_name>``
34+
os-scrapy support config ``CRAWLER_CLASS`` replace the default ``scrapy.crawler.Crawler``
3535

36+
It can be configured in ``settings.py`` file or command line
3637

37-
## Enhanced ``startproject``
3838

39-
Thanks to [os-scrapy-cookiecutter](https://github.com/cfhamlet/os-scrapy-cookiecutter) a ``-p`` option can be used ``os-scrapy startproject -p <project_name>``, which indicate create project as well as python package.
39+
```
40+
os-scrapy crawl -c <your_crawler_class> <spider_name>
41+
```
42+
43+
44+
### Enhanced ``startproject``
45+
46+
Thanks to [os-scrapy-cookiecutter](https://github.com/cfhamlet/os-scrapy-cookiecutter), ``-p`` option can be used to create project as well as python package
47+
48+
49+
```
50+
os-scrapy startproject -p <project_name> [project_dir]
51+
```
52+
53+
### Set ``TWISTED_REACTOR`` on command line
54+
55+
56+
``-r`` option can be used to set ``TWISTED_REACTOR``
57+
58+
59+
```
60+
os-scrapy crawl -r asyncio <spider_name>
61+
```
4062

4163

4264
## Unit Tests
@@ -48,3 +70,4 @@ tox
4870
## License
4971

5072
MIT licensed.
73+

src/os_scrapy/VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
0.0.3
1+
0.0.4

src/os_scrapy/commands/crawl.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,21 @@ def load_crawler_class(class_path):
1919

2020
DEFAULT_CRAWLER_CLASS = f"{Crawler.__module__}.{Crawler.__name__}"
2121

22+
REACTORS = {
23+
"twisted": lambda settings: settings.set("TWISTED_REACTOR", None, "command"),
24+
"poll": lambda settings: settings.set(
25+
"TWISTED_REACTOR", "twisted.internet.pollreactor.PollReactor", "command"
26+
),
27+
"select": lambda settings: settings.set(
28+
"TWISTED_REACTOR", "twisted.internet.selectreactor.SelectReactor", "command"
29+
),
30+
"asyncio": lambda settings: settings.set(
31+
"TWISTED_REACTOR",
32+
"twisted.internet.asyncioreactor.AsyncioSelectorReactor",
33+
"command",
34+
),
35+
}
36+
2237

2338
class Command(ScrapyCommand):
2439
default_settings = {
@@ -31,13 +46,25 @@ def add_options(self, parser):
3146
"-c",
3247
"--crawler-class",
3348
metavar="CRAWLER_CLASS",
34-
help=f"set crawler class. default {DEFAULT_CRAWLER_CLASS}",
49+
help=f"set crawler class (default: {self.settings['CRAWLER_CLASS']})",
50+
)
51+
reactor = self.settings.get("TWISTED_REACTOR")
52+
reactor_choices = list(REACTORS.keys())
53+
parser.add_option(
54+
"-r",
55+
"--reactor",
56+
metavar="REACTOR",
57+
type="choice",
58+
choices=reactor_choices,
59+
help=f"reactor type (default: {reactor if reactor else 'twisted'}). choices: {reactor_choices}",
3560
)
3661

3762
def process_options(self, args, opts):
3863
super(Command, self).process_options(args, opts)
3964
if opts.crawler_class:
4065
self.settings.set("CRAWLER_CLASS", opts.crawler_class, "cmdline")
66+
if opts.reactor:
67+
REACTORS[opts.reactor](self.settings)
4168

4269
def _create_crawler(self, spname):
4370
c = self.settings.get("CRAWLER_CLASS")

0 commit comments

Comments
 (0)