Skip to content

Commit 6236f5b

Browse files
authored
Merge pull request #4 from cfhamlet/develop
v0.0.2
2 parents 2a8e3a3 + 74b121e commit 6236f5b

File tree

6 files changed

+106
-40
lines changed

6 files changed

+106
-40
lines changed

README.md

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,22 @@ pip install os-scrapy
2323

2424
### Command line
2525

26+
The command is same as scrapy
27+
2628
```
27-
os-scrapy
29+
os-scrapy -h
2830
```
2931

32+
### CRAWLER_CLASS
33+
34+
os-scrapy support config ``CRAWLER_CLASS`` replace the default ``scrapy.crawler.Crawler``. It can be configured in ``settings.py`` file or ``os-scrapy crawl -c <your_crawler_class> <spider_name>``
35+
36+
37+
## Enhanced ``startproject``
38+
39+
Thanks to [os-scrapy-cookiecutter](https://github.com/cfhamlet/os-scrapy-cookiecutter) a ``-p`` option can be used ``os-scrapy startproject -p <project_name>``, which indicate create project as well as python package.
40+
41+
3042
## Unit Tests
3143

3244
```
@@ -35,4 +47,4 @@ tox
3547

3648
## License
3749

38-
MIT licensed.
50+
MIT licensed.

requirements/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
Scrapy==2.0.1
1+
os-scrapy-cookiecutter>=0.0.2

script/lint.sh renamed to scripts/lint.sh

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ set -x
1010

1111
pip install -r requirements/requirements-lint.txt
1212

13-
${PREFIX}autoflake --in-place --recursive --remove-all-unused-imports --remove-unused-variables src tests
14-
${PREFIX}black --exclude=".pyi$" src tests
15-
${PREFIX}isort --multi-line=3 --trailing-comma --force-grid-wrap=0 --combine-as --line-width 88 --recursive --apply src tests
13+
FILES="src tests setup.py"
14+
15+
${PREFIX}autoflake --in-place --recursive --remove-all-unused-imports --remove-unused-variables ${FILES}
16+
${PREFIX}black --exclude=".pyi$" ${FILES}
17+
${PREFIX}isort --multi-line=3 --trailing-comma --force-grid-wrap=0 --combine-as --line-width 88 --recursive --apply ${FILES}

setup.py

Lines changed: 31 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
from setuptools import setup, find_packages
1+
from setuptools import find_packages, setup
22

33

44
def read(*filenames, **kwargs):
55
import io
66
from os.path import join, dirname
7-
encoding = kwargs.get('encoding', 'utf-8')
8-
sep = kwargs.get('sep', '\n')
7+
8+
encoding = kwargs.get("encoding", "utf-8")
9+
sep = kwargs.get("sep", "\n")
910
buf = []
1011
for filename in filenames:
1112
with io.open(join(dirname(__file__), filename), encoding=encoding) as f:
@@ -14,36 +15,33 @@ def read(*filenames, **kwargs):
1415

1516

1617
setup(
17-
name='os-scrapy',
18-
version=read('src/os_scrapy/VERSION').strip(),
19-
packages=find_packages(where='src'),
20-
package_dir={'': 'src'},
18+
name="os-scrapy",
19+
version=read("src/os_scrapy/VERSION").strip(),
20+
packages=find_packages(where="src"),
21+
package_dir={"": "src"},
2122
include_package_data=True,
22-
license='MIT License',
23-
description='Ozzy \'s Scrapy',
24-
long_description=open('README.md').read(),
25-
long_description_content_type='text/markdown',
26-
author='Ozzy',
27-
author_email='cfhamlet@gmail.com',
28-
url='https://github.com/cfhamlet/os-scrapy',
29-
install_requires=open('requirements/requirements.txt').read().split('\n'),
30-
python_requires='>=3.6',
23+
license="MIT License",
24+
description="Ozzy 's Scrapy",
25+
long_description=open("README.md").read(),
26+
long_description_content_type="text/markdown",
27+
author="Ozzy",
28+
author_email="cfhamlet@gmail.com",
29+
url="https://github.com/cfhamlet/os-scrapy",
30+
install_requires=open("requirements/requirements.txt").read().split("\n"),
31+
python_requires=">=3.6",
3132
zip_safe=False,
32-
entry_points={
33-
'console_scripts': ['os-scrapy = os_scrapy.cmdline:execute']
34-
},
35-
extras_require={
36-
'ujson': ['ujson'],
37-
},
33+
entry_points={"console_scripts": ["os-scrapy = os_scrapy.cmdline:execute"]},
34+
extras_require={"ujson": ["ujson"],},
3835
classifiers=[
39-
'Development Status :: 2 - Pre-Alpha',
40-
'Intended Audience :: Developers',
41-
'License :: OSI Approved :: MIT License',
42-
'Natural Language :: English',
43-
'Programming Language :: Python :: 3',
44-
'Programming Language :: Python :: 3.6',
45-
'Programming Language :: Python :: 3.7',
46-
'Programming Language :: Python :: 3 :: Only',
47-
'Programming Language :: Python :: Implementation :: CPython',
48-
'Programming Language :: Python :: Implementation :: PyPy',
49-
])
36+
"Development Status :: 2 - Pre-Alpha",
37+
"Intended Audience :: Developers",
38+
"License :: OSI Approved :: MIT License",
39+
"Natural Language :: English",
40+
"Programming Language :: Python :: 3",
41+
"Programming Language :: Python :: 3.6",
42+
"Programming Language :: Python :: 3.7",
43+
"Programming Language :: Python :: 3 :: Only",
44+
"Programming Language :: Python :: Implementation :: CPython",
45+
"Programming Language :: Python :: Implementation :: PyPy",
46+
],
47+
)

src/os_scrapy/VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
0.0.1
1+
0.0.2
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
from os.path import abspath, exists, join
2+
3+
import os_scrapy_cookiecutter
4+
from cookiecutter.main import cookiecutter
5+
from scrapy.commands.startproject import Command as ScrapyCommand
6+
from scrapy.exceptions import UsageError
7+
8+
9+
class Command(ScrapyCommand):
10+
def add_options(self, parser):
11+
super(Command, self).add_options(parser)
12+
parser.add_option(
13+
"-p",
14+
"--package",
15+
dest="package",
16+
action="store_true",
17+
help="create project as package",
18+
)
19+
20+
def run(self, args, opts):
21+
if not opts.package:
22+
return super(Command, self).run(args, opts)
23+
24+
if len(args) not in (1, 2):
25+
raise UsageError()
26+
27+
project_name = args[0]
28+
project_dir = args[0]
29+
30+
if len(args) == 2:
31+
project_dir = args[1]
32+
33+
if exists(join(project_dir, "scrapy.cfg")):
34+
self.exitcode = 1
35+
print("Error: scrapy.cfg already exists in %s" % abspath(project_dir))
36+
return
37+
38+
if not self._is_valid_name(project_name):
39+
self.exitcode = 1
40+
return
41+
42+
try:
43+
cookiecutter(
44+
os_scrapy_cookiecutter.TEMPLATE_DIR,
45+
no_input=True,
46+
extra_context={
47+
"project_name": project_name,
48+
"project_dir": project_dir,
49+
},
50+
)
51+
except Exception as e:
52+
self.exitcode = 1
53+
print(f"Error: create project with cookiecutter {e}")
54+
return

0 commit comments

Comments
 (0)