forked from scrapy/scrapy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
97 lines (88 loc) · 3.32 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
from os.path import dirname, join
from pkg_resources import parse_version
from setuptools import setup, find_packages, __version__ as setuptools_version
with open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
def has_environment_marker_platform_impl_support():
"""Code extracted from 'pytest/setup.py'
https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
The first known release to support environment marker with range operators
it is 18.5, see:
https://setuptools.readthedocs.io/en/latest/history.html#id235
"""
return parse_version(setuptools_version) >= parse_version('18.5')
install_requires = [
'Twisted>=17.9.0',
'cryptography>=2.0',
'cssselect>=0.9.1',
'itemloaders>=1.0.1',
'parsel>=1.5.0',
'pyOpenSSL>=16.2.0',
'queuelib>=1.4.2',
'service_identity>=16.0.0',
'w3lib>=1.17.0',
'zope.interface>=4.1.3',
'protego>=0.1.15',
'itemadapter>=0.1.0',
]
extras_require = {}
cpython_dependencies = [
'lxml>=3.5.0',
'PyDispatcher>=2.0.5',
]
if has_environment_marker_platform_impl_support():
extras_require[':platform_python_implementation == "CPython"'] = cpython_dependencies
extras_require[':platform_python_implementation == "PyPy"'] = [
# Earlier lxml versions are affected by
# https://foss.heptapod.net/pypy/pypy/-/issues/2498,
# which was fixed in Cython 0.26, released on 2017-06-19, and used to
# generate the C headers of lxml release tarballs published since then, the
# first of which was:
'lxml>=4.0.0',
'PyPyDispatcher>=2.1.0',
]
else:
install_requires.extend(cpython_dependencies)
setup(
name='Scrapy',
version=version,
url='https://scrapy.org',
project_urls={
'Documentation': 'https://docs.scrapy.org/',
'Source': 'https://github.com/scrapy/scrapy',
'Tracker': 'https://github.com/scrapy/scrapy/issues',
},
description='A high-level Web Crawling and Web Scraping framework',
long_description=open('README.rst').read(),
author='Scrapy developers',
maintainer='Pablo Hoffman',
maintainer_email='[email protected]',
license='BSD',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': ['scrapy = scrapy.cmdline:execute']
},
classifiers=[
'Framework :: Scrapy',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
python_requires='>=3.6',
install_requires=install_requires,
extras_require=extras_require,
)