-
Notifications
You must be signed in to change notification settings - Fork 162
/
setup.py
52 lines (50 loc) · 1.64 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# -*- coding: utf-8 -*-
#!/usr/bin/python
from setuptools import setup, find_packages
from os.path import join, dirname
with open(join(dirname(__file__), 'scrapyrt/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
setup(
name="scrapyrt",
version=version,
author='Scrapinghub',
author_email='[email protected]',
url="https://github.com/scrapinghub/scrapyrt",
maintainer='Scrapinghub',
maintainer_email='[email protected]',
description='Put Scrapy spiders behind an HTTP API',
long_description=open('README.rst').read(),
license='BSD',
packages=find_packages(),
entry_points={
'console_scripts': ['scrapyrt = scrapyrt.cmdline:execute']
},
zip_safe=False,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Operating System :: OS Independent',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Topic :: Internet :: WWW/HTTP',
'License :: OSI Approved :: BSD License',
],
project_urls={
"Documentation": "https://scrapyrt.readthedocs.io/en/latest/index.html",
"Source": "https://github.com/scrapinghub/scrapyrt",
"Tracker": "https://github.com/scrapinghub/scrapyrt/issues"
},
install_requires=[
'Scrapy>=2.10'
],
package_data={
'scrapyrt': [
'VERSION',
]
},
python_requires='>=3.8',
)