From b7cc2bc25f94febafe4f61187cf417d8ef88dad4 Mon Sep 17 00:00:00 2001 From: Juraj Variny Date: Sat, 8 Nov 2014 21:43:04 +0100 Subject: [PATCH 01/23] fix - FEED_URI was always overriden by scrapyd --- scrapyd/environ.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scrapyd/environ.py b/scrapyd/environ.py index 2874526c..6b074c4c 100644 --- a/scrapyd/environ.py +++ b/scrapyd/environ.py @@ -13,7 +13,7 @@ class Environment(object): def __init__(self, config, initenv=os.environ): self.dbs_dir = config.get('dbs_dir', 'dbs') self.logs_dir = config.get('logs_dir', 'logs') - self.items_dir = config.get('items_dir', 'items') + self.items_dir = config.get('items_dir','') self.jobs_to_keep = config.getint('jobs_to_keep', 5) if config.cp.has_section('settings'): self.settings = dict(config.cp.items('settings')) From bbd0b6c474f91ff1e1efe64192c2b73d9c53cc66 Mon Sep 17 00:00:00 2001 From: Juraj Variny Date: Thu, 27 Nov 2014 18:33:39 +0100 Subject: [PATCH 02/23] fix test: FEED_URI is not compulsory Conflicts: scrapyd/tests/test_environ.py --- scrapyd/tests/test_environ.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scrapyd/tests/test_environ.py b/scrapyd/tests/test_environ.py index bee8bfc4..08162a6d 100644 --- a/scrapyd/tests/test_environ.py +++ b/scrapyd/tests/test_environ.py @@ -30,8 +30,9 @@ def test_get_environment_with_eggfile(self): self.assertEqual(env['SCRAPY_SPIDER'], 'myspider') self.assertEqual(env['SCRAPY_JOB'], 'ID') self.assert_(env['SCRAPY_LOG_FILE'].endswith(os.path.join('mybot', 'myspider', 'ID.log'))) - self.assert_(env['SCRAPY_FEED_URI'].startswith('file://{}'.format(os.getcwd()))) - self.assert_(env['SCRAPY_FEED_URI'].endswith(os.path.join('mybot', 'myspider', 'ID.jl'))) + if env.get('SCRAPY_FEED_URI'): #not compulsory + self.assert_(env['SCRAPY_FEED_URI'].startswith('file://{}'.format(os.getcwd()))) + self.assert_(env['SCRAPY_FEED_URI'].endswith(os.path.join('mybot', 'myspider', 'ID.jl'))) self.failIf('SCRAPY_SETTINGS_MODULE' in env) def test_get_environment_with_no_items_dir(self): From 396421a6bcffc6d821bcaf4df06355dc9069bea5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sat, 10 Oct 2015 07:24:12 +0300 Subject: [PATCH 03/23] Update default config and docs for empty items_dir --- docs/config.rst | 10 +++++----- scrapyd/default_scrapyd.conf | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index 97541149..4871a681 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -75,11 +75,11 @@ items_dir .. versionadded:: 0.15 -The directory where the Scrapy items will be stored. If you want to disable -storing feeds of scraped items (perhaps, because you use a database or other -storage) set this option empty, like this:: - - items_dir = +The directory where the Scrapy items will be stored. +This option is disabled by default +because you are expected to use a database or a feed exporter. +Setting it to non-empty results in storing scraped item feeds +to the specified directory by overriding the scrapy setting ``FEED_URI``. .. _jobs_to_keep: diff --git a/scrapyd/default_scrapyd.conf b/scrapyd/default_scrapyd.conf index 14964f4a..bb4008d6 100644 --- a/scrapyd/default_scrapyd.conf +++ b/scrapyd/default_scrapyd.conf @@ -1,7 +1,7 @@ [scrapyd] eggs_dir = eggs logs_dir = logs -items_dir = items +items_dir = jobs_to_keep = 5 dbs_dir = dbs max_proc = 0 From 96aafef0f1d968b38ccb298922a2d4bddc1e8afb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Tue, 6 Oct 2015 21:41:30 +0300 Subject: [PATCH 04/23] clarification on jobs_to_keep --- docs/config.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/config.rst b/docs/config.rst index 4871a681..1d0fbf8b 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -88,8 +88,9 @@ jobs_to_keep .. versionadded:: 0.15 -The number of finished jobs to keep per spider. Defaults to ``5``. This -includes logs and items. +The number of finished jobs to keep per spider. +Defaults to ``5``. +This refers to logs and items. This setting was named ``logs_to_keep`` in previous versions. From dc1fea3c9bdadf1dbc732615c76de7027276ff91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Tue, 6 Oct 2015 21:41:43 +0300 Subject: [PATCH 05/23] missing doc for finished_to_keep --- docs/config.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/config.rst b/docs/config.rst index 1d0fbf8b..37511991 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -94,6 +94,17 @@ This refers to logs and items. This setting was named ``logs_to_keep`` in previous versions. +.. _finished_to_keep: + +finished_to_keep +---------------- + +.. versionadded:: 0.14 + +The number of finished processes to keep in the launcher. +Defaults to ``100``. +This only reflects on the website /jobs endpoint and relevant json webservices. + poll_interval ------------- From 1a6670963cbcf755eb616c674b625cb818e0f2fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Fri, 13 May 2016 09:14:32 +0300 Subject: [PATCH 06/23] Note the priority argument for the spider queue. I note this in the schedule.json webservice because: * This is an old branch * The spider queue itself is not documented * I plan to move its "popping" to the webservice --- docs/api.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/api.rst b/docs/api.rst index 306ca786..3cfde1f0 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -38,7 +38,10 @@ Schedule a spider run (also known as a job), returning the job id. * ``project`` (string, required) - the project name * ``spider`` (string, required) - the spider name * ``setting`` (string, optional) - a scrapy setting to use when running the spider - * any other parameter is passed as spider argument + * The spider queue also uses the optional ``priority`` argument (default 0.0) + which adjusts the priority of the scheduled spider run + in its project's queue. A greater number means higher priority. + * Any other parameter is passed as an argument to the spider. Example request:: From 905e01a152fbd187983a87696d76b8164bb629e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Wed, 27 Jan 2016 15:34:35 +0200 Subject: [PATCH 07/23] Dynamic requirements for different pythons --- setup.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ac991162..319fd1f4 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ from os.path import join, dirname +import sys with open(join(dirname(__file__), 'scrapyd/VERSION')) as f: version = f.read().strip() @@ -38,6 +39,10 @@ except ImportError: from distutils.core import setup else: - setup_args['install_requires'] = ['Twisted>=8.0', 'Scrapy>=0.17'] + if sys.version_info < (2, 7): + setup_args['install_requires'] = ['Twisted>=8.0,<=15.1', 'Scrapy>=0.17,<0.19', 'w3lib<1.9'] + else: + setup_args['install_requires'] = ['Twisted>=8.0', 'Scrapy>=0.17'] + setup(**setup_args) From f7df6305f6136a171b261fbff6860fcd1e0702ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Tue, 23 Feb 2016 15:11:37 +0200 Subject: [PATCH 08/23] Disable bdist_wheel command --- setup.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/setup.py b/setup.py index 319fd1f4..b2aec73b 100644 --- a/setup.py +++ b/setup.py @@ -45,4 +45,21 @@ setup_args['install_requires'] = ['Twisted>=8.0', 'Scrapy>=0.17'] +try: + import wheel +except ImportError: + pass +else: + from wheel.bdist_wheel import bdist_wheel as _bdist_wheel + class bdist_wheel(_bdist_wheel): + description = ( + 'Building wheels is disabled for this unsupported version of scrapyd' + ' because of dynamic dependencies.' + ' If you need to build a wheel, try a newer version of scrapyd.' + ) + def run(self): + raise SystemExit(self.description) + setup_args.setdefault('cmdclass', {}).update(bdist_wheel=bdist_wheel) + + setup(**setup_args) From 2fced1905dcdd9fc838c66a3d5ab8fadf8ddcb73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sat, 21 May 2016 10:26:48 +0300 Subject: [PATCH 09/23] Revert "Removed python 2.6/lucid env from travis." This reverts commit 5277755e95ba91f4f27629750c25a305274ecc05. Because when the release was made the requirements and trove classifiers were not updated 1.1 ended up accidentally supporting python2.6 --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 6cb92f6b..15ad9fdb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,8 @@ matrix: exclude: - env: TRAVISBUG="#1027" include: + - python: "2.6" + env: BUILDENV=lucid - python: "2.7" env: BUILDENV=precise - python: "2.7" From f75797ebd6c65f12e099a290be9489a37f9b528b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Fri, 20 May 2016 18:04:14 +0300 Subject: [PATCH 10/23] missing tests for some sqlite3 queues --- scrapyd/tests/test_spiderqueue.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/scrapyd/tests/test_spiderqueue.py b/scrapyd/tests/test_spiderqueue.py index 8a400a0e..72e7bc58 100644 --- a/scrapyd/tests/test_spiderqueue.py +++ b/scrapyd/tests/test_spiderqueue.py @@ -4,7 +4,7 @@ from zope.interface.verify import verifyObject from scrapyd.interfaces import ISpiderQueue -from scrapyd.spiderqueue import SqliteSpiderQueue +from scrapyd import spiderqueue class SpiderQueueTest(unittest.TestCase): """This test case can be used easily for testing other SpiderQueue's by @@ -15,12 +15,16 @@ class SpiderQueueTest(unittest.TestCase): def setUp(self): self.q = self._get_queue() self.name = 'spider1' - self.args = {'arg1': 'val1', 'arg2': 2} + self.args = { + 'arg1': 'val1', + 'arg2': 2, + 'arg3': u'\N{SNOWMAN}', + } self.msg = self.args.copy() self.msg['name'] = self.name def _get_queue(self): - return SqliteSpiderQueue(':memory:') + return spiderqueue.SqliteSpiderQueue(':memory:') def test_interface(self): verifyObject(ISpiderQueue, self.q) @@ -64,3 +68,13 @@ def test_clear(self): c = yield maybeDeferred(self.q.count) self.assertEqual(c, 0) + + +class JsonSpiderQueueTest(unittest.TestCase): + def _get_queue(self): + return spiderqueue.JsonSqliteSpiderQueue(':memory:') + + +class PickleSpiderQueueTest(unittest.TestCase): + def _get_queue(self): + return spiderqueue.PickleSqliteSpiderQueue(':memory:') From 07e4b8dea838bef1b96d3d906f1d85975558561b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Mon, 23 May 2016 10:04:40 +0300 Subject: [PATCH 11/23] update project version in sphinx conf --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index b31a8d61..da742182 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -48,9 +48,9 @@ # built documents. # # The short X.Y version. -version = '0.18' +version = '1.1' # The full version, including alpha/beta/rc tags. -release = '0.18' +release = '1.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 03439c454ef51e1a374f733c28ee7010d7dfe5de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Fri, 20 May 2016 18:05:19 +0300 Subject: [PATCH 12/23] Type adapter for sqlite3 BLOBs is sqlite3.Binary --- scrapyd/sqlite.py | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/scrapyd/sqlite.py b/scrapyd/sqlite.py index 69f44a66..602ec0a8 100644 --- a/scrapyd/sqlite.py +++ b/scrapyd/sqlite.py @@ -12,7 +12,7 @@ def __init__(self, database=None, table="dict"): self.table = table # about check_same_thread: http://twistedmatrix.com/trac/ticket/4040 self.conn = sqlite3.connect(self.database, check_same_thread=False) - q = "create table if not exists %s (key text primary key, value blob)" \ + q = "create table if not exists %s (key blob primary key, value blob)" \ % table self.conn.execute(q) @@ -60,26 +60,26 @@ def items(self): def encode(self, obj): return obj - def decode(self, text): - return text + def decode(self, obj): + return obj class PickleSqliteDict(SqliteDict): def encode(self, obj): - return buffer(cPickle.dumps(obj, protocol=2)) + return sqlite3.Binary(cPickle.dumps(obj, protocol=2)) - def decode(self, text): - return cPickle.loads(str(text)) + def decode(self, obj): + return cPickle.loads(bytes(obj)) class JsonSqliteDict(SqliteDict): def encode(self, obj): - return json.dumps(obj) + return sqlite3.Binary(json.dumps(obj)) - def decode(self, text): - return json.loads(text) + def decode(self, obj): + return json.loads(bytes(obj)) @@ -155,18 +155,16 @@ def decode(self, text): class PickleSqlitePriorityQueue(SqlitePriorityQueue): def encode(self, obj): - return buffer(cPickle.dumps(obj, protocol=2)) + return sqlite3.Binary(cPickle.dumps(obj, protocol=2)) - def decode(self, text): - return cPickle.loads(str(text)) + def decode(self, obj): + return cPickle.loads(bytes(obj)) class JsonSqlitePriorityQueue(SqlitePriorityQueue): def encode(self, obj): - return json.dumps(obj) - - def decode(self, text): - return json.loads(text) - + return sqlite3.Binary(json.dumps(obj)) + def decode(self, obj): + return json.loads(bytes(obj)) From 528f2ac42e207381b17e16be57a7d9a47d1f349c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Fri, 27 May 2016 19:49:11 +0300 Subject: [PATCH 13/23] portability: fallback to pure python pickle --- scrapyd/sqlite.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/scrapyd/sqlite.py b/scrapyd/sqlite.py index 602ec0a8..3fcfb41a 100644 --- a/scrapyd/sqlite.py +++ b/scrapyd/sqlite.py @@ -1,5 +1,8 @@ import sqlite3 -import cPickle +try: + import cPickle as pickle +except: + import pickle import json from UserDict import DictMixin @@ -67,10 +70,10 @@ def decode(self, obj): class PickleSqliteDict(SqliteDict): def encode(self, obj): - return sqlite3.Binary(cPickle.dumps(obj, protocol=2)) + return sqlite3.Binary(pickle.dumps(obj, protocol=2)) def decode(self, obj): - return cPickle.loads(bytes(obj)) + return pickle.loads(bytes(obj)) class JsonSqliteDict(SqliteDict): @@ -155,10 +158,10 @@ def decode(self, text): class PickleSqlitePriorityQueue(SqlitePriorityQueue): def encode(self, obj): - return sqlite3.Binary(cPickle.dumps(obj, protocol=2)) + return sqlite3.Binary(pickle.dumps(obj, protocol=2)) def decode(self, obj): - return cPickle.loads(bytes(obj)) + return pickle.loads(bytes(obj)) class JsonSqlitePriorityQueue(SqlitePriorityQueue): From 682585ce0469398c960ddf3e3bf73cd855d6ba32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Tue, 25 Oct 2016 19:59:14 +0300 Subject: [PATCH 14/23] Fix travis: py26 compatible w3lib & non-egg scrapy --- .travis/requirements-lucid.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis/requirements-lucid.txt b/.travis/requirements-lucid.txt index 6f9f8e44..ce5fafa1 100644 --- a/.travis/requirements-lucid.txt +++ b/.travis/requirements-lucid.txt @@ -1,2 +1,3 @@ -Scrapy +Scrapy<0.19 --install-option=--single-version-externally-managed +w3lib<1.9 twisted==10.0.0 From 1788487ce12be5d66aedaf010244371589074327 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 19:48:46 +0300 Subject: [PATCH 15/23] 1.0.2 notes --- docs/news.rst | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index 798f740a..fad4bbd5 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,6 +3,21 @@ Release notes ============= +1.0.2 +----- + +setup script +~~~~~~~~~~~~ + +- Specified maximum versions for requirements that became incompatible. +- Marked package as zip-unsafe because twistd requires a plain ``txapp.py`` + +documentation +~~~~~~~~~~~~~ + +- Updated broken links, references to wrong versions and scrapy +- Warn that scrapyd 1.0 felling out of support + 1.0 --- From 4247050d12427f74cdbbcc05cb2a08d2c5c76916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 19:54:52 +0300 Subject: [PATCH 16/23] import 1.1 release notes from github --- docs/news.rst | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index fad4bbd5..ec702b49 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,6 +3,45 @@ Release notes ============= +1.1.0 +----- + +Features & Enhancements +~~~~~~~~~~~~~~~~~~~~~~~ + +- Outsource scrapyd-deploy command to scrapyd-client (#92, #90) +- Look for a .scrapyd.conf file in the users home (~/.scrapyd.conf) (#58) +- Adding the nodename to identify the process that is working on the job (#42) +- Allow remote items store (#48) +- Debian sysvinit script (#41) +- Add 'start_time' field in webservice for running jobs (#24) + +Bugfixes +~~~~~~~~ + +- Updating integration test script (#98) +- Changed scripts to be installed using entry_points (#89) +- Fix bug with --list-projects option in scrapyd-deploy (#88) +- Update api.rst (#79) +- Renovate scrapy upstart job a bit (#57) +- Sanitize version names when creating egg paths (#72) +- Use w3lib to generate feed uris (#73) +- Copy txweb/JsonResource import from scrapy (#62) +- Travis.yml: remove deprecated --use-mirros pip option (b3cdc61) +- Make scrapyd package zip unsafe because the scrapyd command requires the txapp.py unpacked to run (f27c054, #49) +- Check if a spider exists before schedule it (with sqlite cache) (#8, #17) +- Fixing typo "mulitplied" (#51) +- Fix GIT versioning for projects without annotated tags (#47) +- Fix release notes: 1.0 is already released (6c8dcfb) +- Correcting HTML tags in scrapyd website monitor (#38) +- Update index.rst (#37) +- Added missing anchor closing tags (#35) +- Removed python 2.6/lucid env from travis (#32) +- Changed the links to the new documentation page (#33) +- Fix (at least) windows problem (#19) +- Remove reference to 'scrapy server' command (f599b60, #25) +- Made Scrapyd package name lowercase (1adfc31) + 1.0.2 ----- From 69e5e76c11f25bac0d09ce027c3a2d83c910f272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 20:10:16 +0300 Subject: [PATCH 17/23] split 1.1 notes into sections --- docs/news.rst | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/docs/news.rst b/docs/news.rst index ec702b49..1b0c558f 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -15,32 +15,40 @@ Features & Enhancements - Allow remote items store (#48) - Debian sysvinit script (#41) - Add 'start_time' field in webservice for running jobs (#24) +- Check if a spider exists before schedule it (with sqlite cache) (#8, #17) Bugfixes ~~~~~~~~ -- Updating integration test script (#98) -- Changed scripts to be installed using entry_points (#89) - Fix bug with --list-projects option in scrapyd-deploy (#88) -- Update api.rst (#79) -- Renovate scrapy upstart job a bit (#57) - Sanitize version names when creating egg paths (#72) -- Use w3lib to generate feed uris (#73) - Copy txweb/JsonResource import from scrapy (#62) +- Use w3lib to generate feed uris (#73) +- Fix GIT versioning for projects without annotated tags (#47) +- Correcting HTML tags in scrapyd website monitor (#38) +- Added missing anchor closing tags (#35) +- Fix (at least) windows problem (#19) + +Setup script and Tests/CI +~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Updating integration test script (#98) +- Changed scripts to be installed using entry_points (#89) +- Renovate scrapy upstart job a bit (#57) - Travis.yml: remove deprecated --use-mirros pip option (b3cdc61) - Make scrapyd package zip unsafe because the scrapyd command requires the txapp.py unpacked to run (f27c054, #49) -- Check if a spider exists before schedule it (with sqlite cache) (#8, #17) +- Removed python 2.6/lucid env from travis (#32) +- Made Scrapyd package name lowercase (1adfc31) + +Documentation +~~~~~~~~~~~~~ + +- Update api.rst (#79) - Fixing typo "mulitplied" (#51) -- Fix GIT versioning for projects without annotated tags (#47) - Fix release notes: 1.0 is already released (6c8dcfb) -- Correcting HTML tags in scrapyd website monitor (#38) - Update index.rst (#37) -- Added missing anchor closing tags (#35) -- Removed python 2.6/lucid env from travis (#32) - Changed the links to the new documentation page (#33) -- Fix (at least) windows problem (#19) - Remove reference to 'scrapy server' command (f599b60, #25) -- Made Scrapyd package name lowercase (1adfc31) 1.0.2 ----- From 354fd79d11efcbc18b9beaa5a1c4ce23694d44d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 20:54:34 +0300 Subject: [PATCH 18/23] summarize release notes replace PR ids with commits --- docs/news.rst | 50 ++++++++++++++++++++++++-------------------------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/docs/news.rst b/docs/news.rst index 1b0c558f..22b9399f 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -9,46 +9,44 @@ Release notes Features & Enhancements ~~~~~~~~~~~~~~~~~~~~~~~ -- Outsource scrapyd-deploy command to scrapyd-client (#92, #90) -- Look for a .scrapyd.conf file in the users home (~/.scrapyd.conf) (#58) -- Adding the nodename to identify the process that is working on the job (#42) -- Allow remote items store (#48) -- Debian sysvinit script (#41) -- Add 'start_time' field in webservice for running jobs (#24) -- Check if a spider exists before schedule it (with sqlite cache) (#8, #17) +- Outsource scrapyd-deploy command to scrapyd-client (c1358dc, c9d66ca..191353e) +- Look for a ``~/.scrapyd.conf`` file in the users home (1fce99b) +- Adding the nodename to identify the process that is working on the job (fac3a5c..4aebe1c) +- Allow remote items store (e261591..35a21db) +- Debian sysvinit script (a54193a, ff457a9) +- Add 'start_time' field in webservice for running jobs (6712af9, acd460b) +- Check if a spider exists before schedule it (with sqlite cache) (#8, 288afef..a185ff2) Bugfixes ~~~~~~~~ -- Fix bug with --list-projects option in scrapyd-deploy (#88) -- Sanitize version names when creating egg paths (#72) -- Copy txweb/JsonResource import from scrapy (#62) -- Use w3lib to generate feed uris (#73) -- Fix GIT versioning for projects without annotated tags (#47) -- Correcting HTML tags in scrapyd website monitor (#38) -- Added missing anchor closing tags (#35) -- Fix (at least) windows problem (#19) +- F̶i̶x̶ ̶s̶c̶r̶a̶p̶y̶d̶-̶d̶e̶p̶l̶o̶y̶ ̶-̶-̶l̶i̶s̶t̶-̶p̶r̶o̶j̶e̶c̶t̶s̶ ̶(̶9̶4̶2̶a̶1̶b̶2̶)̶ → moved to scrapyd-client +- Sanitize version names when creating egg paths (8023720) +- Copy txweb/JsonResource from scrapy which no longer provides it (99ea920) +- Use w3lib to generate correct feed uris (9a88ea5) +- Fix GIT versioning for projects without annotated tags (e91dcf4 #34) +- Correcting HTML tags in scrapyd website monitor (da5664f, 26089cd) +- Fix FEED_URI path on windows (4f0060a) Setup script and Tests/CI ~~~~~~~~~~~~~~~~~~~~~~~~~ -- Updating integration test script (#98) -- Changed scripts to be installed using entry_points (#89) -- Renovate scrapy upstart job a bit (#57) -- Travis.yml: remove deprecated --use-mirros pip option (b3cdc61) -- Make scrapyd package zip unsafe because the scrapyd command requires the txapp.py unpacked to run (f27c054, #49) -- Removed python 2.6/lucid env from travis (#32) +- Restore integration test script (66de25d) +- Changed scripts to be installed using entry_points (b670f5e) +- Renovate scrapy upstart job (d130770) +- Travis.yml: remove deprecated ``--use-mirros`` pip option (b3cdc61) +- Mark package as zip unsafe because twistd requires a plain ``txapp.py`` (f27c054) +- Removed python 2.6/lucid env from travis (5277755) - Made Scrapyd package name lowercase (1adfc31) Documentation ~~~~~~~~~~~~~ -- Update api.rst (#79) -- Fixing typo "mulitplied" (#51) +- Spiders should allow for arbitrary keyword arguments (696154) +- Various typos (51f1d69, 0a4a77a) - Fix release notes: 1.0 is already released (6c8dcfb) -- Update index.rst (#37) -- Changed the links to the new documentation page (#33) -- Remove reference to 'scrapy server' command (f599b60, #25) +- Point website module's links to readthedocs (215c700) +- Remove reference to 'scrapy server' command (f599b60) 1.0.2 ----- From 2de2a6900a24eaedd21daf01fadec2ecd3b3b41d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 23:11:59 +0300 Subject: [PATCH 19/23] Highlight breaking change --- docs/news.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/news.rst b/docs/news.rst index 22b9399f..06d59385 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -10,6 +10,7 @@ Features & Enhancements ~~~~~~~~~~~~~~~~~~~~~~~ - Outsource scrapyd-deploy command to scrapyd-client (c1358dc, c9d66ca..191353e) + **If you rely on this command, install the scrapyd-client package from pypi.** - Look for a ``~/.scrapyd.conf`` file in the users home (1fce99b) - Adding the nodename to identify the process that is working on the job (fac3a5c..4aebe1c) - Allow remote items store (e261591..35a21db) From bf9fbfdb6de5419585d7c48186cdb17db5d189e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 23:19:25 +0300 Subject: [PATCH 20/23] add release dates --- docs/news.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index 06d59385..9645eebe 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -5,6 +5,7 @@ Release notes 1.1.0 ----- +*Release date: 2015-06-29* Features & Enhancements ~~~~~~~~~~~~~~~~~~~~~~~ @@ -51,6 +52,7 @@ Documentation 1.0.2 ----- +*Release date: 2016-03-28* setup script ~~~~~~~~~~~~ @@ -66,5 +68,6 @@ documentation 1.0 --- +*Release date: 2013-09-02* First standalone release (it was previously shipped with Scrapy until Scrapy 0.16). From 5e9f27bdb2e906d5a5b90df28b6c0bcadafcad55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 23:20:51 +0300 Subject: [PATCH 21/23] skipping versions can cause confussion --- docs/news.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/news.rst b/docs/news.rst index 9645eebe..ca50899f 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -66,8 +66,13 @@ documentation - Updated broken links, references to wrong versions and scrapy - Warn that scrapyd 1.0 felling out of support -1.0 ---- +1.0.1 +----- +*Release date: 2013-09-02* +*Trivial update* + +1.0.0 +----- *Release date: 2013-09-02* First standalone release (it was previously shipped with Scrapy until Scrapy 0.16). From 5afaa0136e63dac5ef2f1399cd48d264a8b2695e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Sun, 22 May 2016 23:57:36 +0300 Subject: [PATCH 22/23] add 1.1.1 release notes --- docs/news.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/news.rst b/docs/news.rst index ca50899f..7df14fcd 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,6 +3,33 @@ Release notes ============= +1.1.1 - Unreleased +------------------ + +Removed +~~~~~~~ + +- Disabled bdist_wheel command in setup to define dynamic requirements + despite of pip-7 wheel caching bug. + +Fixed +~~~~~ + +- FEED_URI was always overridden by scrapyd +- Specified maximum versions for requirements that became incompatible. +- Marked package as zip-unsafe because twistd requires a plain ``txapp.py`` +- Don't install zipped scrapy in py26 CI env + because its setup doesn't include the ``scrapy/VERSION`` file. + +Added +~~~~~ + +- Enabled some missing tests for the sqlite queues. +- Enabled CI tests for python2.6 because it was supported by the 1.1 release. +- Document missing config options and include in default_scrapyd.conf +- Note the spider queue's ``priority`` argument in the scheduler's doc. + + 1.1.0 ----- *Release date: 2015-06-29* From 4ef975b9c29a724cf638552414f1afa1e222d7d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=CE=9D=CE=B9=CE=BA=CF=8C=CE=BB=CE=B1=CE=BF=CF=82-=CE=94?= =?UTF-8?q?=CE=B9=CE=B3=CE=B5=CE=BD=CE=AE=CF=82=20=CE=9A=CE=B1=CF=81=CE=B1?= =?UTF-8?q?=CE=B3=CE=B9=CE=AC=CE=BD=CE=BD=CE=B7=CF=82?= Date: Wed, 2 Nov 2016 11:24:42 +0200 Subject: [PATCH 23/23] Release 1.1.1 --- docs/news.rst | 2 +- scrapyd/VERSION | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/news.rst b/docs/news.rst index 7df14fcd..152980a7 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,7 +3,7 @@ Release notes ============= -1.1.1 - Unreleased +1.1.1 - 2016-11-03 ------------------ Removed diff --git a/scrapyd/VERSION b/scrapyd/VERSION index 9084fa2f..524cb552 100644 --- a/scrapyd/VERSION +++ b/scrapyd/VERSION @@ -1 +1 @@ -1.1.0 +1.1.1