diff --git a/.travis.yml b/.travis.yml index c7fce74..bfbf49f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,17 +1,18 @@ language: python sudo: false -env: - - TOXENV=py27 - - TOXENV=py34 - - TOXENV=pypy - - TOXENV=flake8 +python: + - '2.7' + - '3.4' + - '3.5' + - pypy install: - - travis_retry pip install tox coveralls + - pip install tox-travis codecov coveralls script: - - travis_retry tox + - tox after_success: + - codecov - coveralls diff --git a/CHANGES.txt b/CHANGES.txt index 94b043f..eb32ce5 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,16 @@ +4.1.0 +================ + +- Prevent duplicate calls to setup_app. + + +4.0.0 +================ + +- Add support for new lower-case settings in Celery 4. +- Support multiple --ini-var command-line arguments. + + 3.0.0 ================ diff --git a/README.md b/README.md new file mode 100644 index 0000000..b2a04c9 --- /dev/null +++ b/README.md @@ -0,0 +1,233 @@ +# Getting Started + +[![image](https://img.shields.io/travis/aarki/pyramid_celery/master.svg)](https://travis-ci.org/aarki/pyramid_celery) +[![image](https://img.shields.io/codecov/c/gh/aarki/pyramid_celery/master.svg)](https://codecov.io/gh/aarki/pyramid_celery) + +Include `pyramid_celery`, either in your `.ini`: + +``` ini +pyramid.includes = pyramid_celery +``` + +or, equivalently, using `config.include`: + +``` python +config.include('pyramid_celery') +``` + +Then you just need to tell `pyramid_celery` where to find the `[celery]` +section: + +``` python +config.configure_celery('development.ini') +``` + +Then you are free to use Celery, class-based: + +``` python +from pyramid_celery import celery_app as app + +class AddTask(app.Task): + def run(self, x, y): + print x+y +``` + +or decorator-based: + +``` python +from pyramid_celery import celery_app as app + +@app.task +def add(x, y): + print x+y +``` + +To get pyramid settings you may access them in +`app.conf.pyramid_registry`. + +# Configuration + +**Note on lower-case settings**: Celery version 4.0 introduced new +lower-case settings and setting organization. Examples in this +documentation use the new lower case settings, but `pyramid_celery` +continues to support old setting names, as does Celery. + +By default, `pyramid_celery` assumes you want to configure celery via +`.ini` file settings. You can do this by calling + +``` python +config.configure_celery('development.ini') +``` + +but if you are already in the `main` entry point of your application, +and want to use the `.ini` used to configure the app, you can do the +following: + +``` python +config.configure_celery(global_config['__file__']) +``` + +If you want to configure Celery from the standard `celeryconfig` Python +file, you can specify + +``` ini +[celery] +use_celeryconfig = True +``` + +You can get more information on `celeryconfig.py` +[here](http://celery.readthedocs.io/en/latest/userguide/configuration.html/). + +An example `.ini` configuration looks like this: + +``` ini +[celery] +broker_url = redis://localhost:1337/0 +imports = app1.tasks + app2.tasks + +[celerybeat:task1] +task = app1.tasks.Task1 +type = crontab +schedule = {"minute": 0} +``` + +## Scheduled/Periodic Tasks + +To use celery beat (periodic tasks), declare one `[celerybeat:...]` +config section per task. Within each section, the following settings are available: + + - `task`: the Python task you need executed. + - `type`: the type of scheduling your configuration uses, one of `crontab`, + `timedelta`, or `integer`. + - `schedule`: the actual schedule for your `type` of configuration, parsed as + JSON. + - `args`: additional positional arguments, parsed as JSON. + - `kwargs`: additional keyword arguments, parsed as JSON. + +Example configuration: + +``` ini +[celerybeat:task1] +task = app1.tasks.Task1 +type = crontab +schedule = {"minute": 0} + +[celerybeat:task2] +task = app1.tasks.Task2 +type = timedelta +schedule = {"seconds": 30} +args = [16, 16] + +[celerybeat:task3] +task = app2.tasks.Task1 +type = crontab +schedule = {"hour": 0, "minute": 0} +kwargs = {"boom": "shaka"} + +[celerybeat:task4] +task = myapp.tasks.Task4 +type = integer +schedule = 30 +``` + +Tasks are scheduled in UTC by default. If you want to schedule at a +specific date/time in a different time zone, use the +[`timezone` setting](https://celery.readthedocs.io/en/latest/userguide/configuration.html#std:setting-timezone/): + +``` ini +[celery] +timezone = US/Pacific +``` + +To get a list of available time zones, do + +``` python +from pprint import pprint +from pytz import all_timezones +pprint(all_timezones) +``` + +## Routing + +If you would like to route a task to a specific queue you can define a +route per task by declaring their `queue` and/or `routing_key` in a +`[celeryroute:...]` section. + +An example configuration for this: + +``` ini +[celeryroute:otherapp.tasks.Task3] +queue = slow_tasks +routing_key = turtle + +[celeryroute:myapp.tasks.Task1] +queue = fast_tasks +``` + +# Running the worker + +To run the worker, use the `celery worker` command, and pass an +additional `--ini` argument. + +``` bash +celery worker -A pyramid_celery.celery_app --ini development.ini +``` + +To run the celery beat task scheduler, use the `--beat` option (during +development), or the `celery beat` command (in production). + +``` bash +celery beat -A pyramid_celery.celery_app --ini development.ini +``` + +To expand variables in your `.ini` (e.g. `%(database_username)s`), use +the `--ini-var` option, and pass a comma-separated list of key-value +pairs. + +``` bash +celery worker -A pyramid_celery.celery_app \ + --ini development.ini \ + --ini-var database_username=sontek \ + --ini-var database_password=OhYeah! +``` + +# Logging + +If you use `.ini` configuration (rather than `celeryconfig.py`), then +the logging configuration will be loaded from the `.ini`, and the +default Celery loggers will not be used. + +You most likely want to add a `[logger_celery]` section to your `.ini`. + +``` ini +[logger_celery] +level = INFO +handlers = +qualname = celery +``` + +and then update your `[loggers]` section to include it. + +If you want to use the default Celery loggers, use the +[`worker_hijack_root_logger` setting](https://celery.readthedocs.io/en/latest/userguide/configuration.html#std:setting-worker_hijack_root_logger). + +``` ini +[celery] +worker_hijack_root_logger = True +``` + +Celery worker processes do not propagade exceptions inside tasks, +swallowing them silently by default. To fix, this, configure the +`celery.worker.job` logger to propagate exceptions: + +``` ini +# Make sure Celery worker doesn't silently swallow exceptions +# See http://stackoverflow.com/a/20719461/315168 +# https://github.com/celery/celery/issues/2437 +[logger_celery_worker_job] +level = ERROR +handlers = +qualname = celery.worker.job +propagate = 1 +``` diff --git a/README.rst b/README.rst deleted file mode 100644 index a7ccf46..0000000 --- a/README.rst +++ /dev/null @@ -1,259 +0,0 @@ -Getting Started -===================== -.. image:: https://travis-ci.org/sontek/pyramid_celery.png?branch=master - :target: https://travis-ci.org/sontek/pyramid_celery - -.. image:: https://coveralls.io/repos/sontek/pyramid_celery/badge.png?branch=master - :target: https://coveralls.io/r/sontek/pyramid_celery?branch=master - -.. image:: https://img.shields.io/pypi/v/pyramid_celery.svg - :target: https://pypi.python.org/pypi/pyramid_celery - -Include pyramid_celery either by setting your includes in your .ini, -or by calling ``config.include('pyramid_celery')``: - -.. code-block:: ini - - pyramid.includes = pyramid_celery - - -Then you just need to tell **pyramid_celery** what ini file your **[celery]** -section is in: - -.. code-block:: python - - config.configure_celery('development.ini') - -Then you are free to use celery, for example class based: - -.. code-block:: python - - from pyramid_celery import celery_app as app - - class AddTask(app.Task): - def run(self, x, y): - print x+y - -or decorator based: - -.. code-block:: python - - from pyramid_celery import celery_app as app - - @app.task - def add(x, y): - print x+y - -To get pyramid settings you may access them in ``app.conf['PYRAMID_REGISTRY']``. - -Configuration -===================== -By default **pyramid_celery** assumes you want to configure celery via an ini -settings. You can do this by calling **config.configure_celery('development.ini')** -but if you are already in the **main** of your application and want to use the ini -used to configure the app you can do the following: - -.. code-block:: python - - config.configure_celery(global_config['__file__']) - -If you want to use the standard **celeryconfig** python file you can set the -**USE_CELERYCONFIG = True** like this: - -.. code-block:: ini - - [celery] - USE_CELERYCONFIG = True - -You can get more information for celeryconfig.py here: - -http://celery.readthedocs.io/en/latest/userguide/configuration.html - -An example ini configuration looks like this: - -.. code-block:: ini - - [celery] - BROKER_URL = redis://localhost:1337/0 - CELERY_IMPORTS = app1.tasks - app2.tasks - - [celerybeat:task1] - task = app1.tasks.Task1 - type = crontab - schedule = {"minute": 0} - -Scheduled/Periodic Tasks ------------------------------ -To use celerybeat (periodic tasks) you need to declare 1 ``celerybeat`` config -section per task. The options are: - -- **task** - The python task you need executed. -- **type** - The type of scheduling your configuration uses, options are - ``crontab``, ``timedelta``, and ``integer``. -- **schedule** - The actual schedule for your ``type`` of configuration. -- **args** - Additional positional arguments. -- **kwargs** - Additional keyword arguments. - -Example configuration for this: - -.. code-block:: ini - - [celerybeat:task1] - task = app1.tasks.Task1 - type = crontab - schedule = {"minute": 0} - - [celerybeat:task2] - task = app1.tasks.Task2 - type = timedelta - schedule = {"seconds": 30} - args = [16, 16] - - [celerybeat:task3] - task = app2.tasks.Task1 - type = crontab - schedule = {"hour": 0, "minute": 0} - kwargs = {"boom": "shaka"} - - [celerybeat:task4] - task = myapp.tasks.Task4 - type = integer - schedule = 30 - -A gotcha you want to watchout for is that the date/time in scheduled tasks -is UTC by default. If you want to schedule for an exact date/time for your -local timezone you need to set ``CELERY_TIMEZONE``. Documentation for that -can be found here: - -http://celery.readthedocs.org/en/latest/userguide/periodic-tasks.html#time-zones - -If you need to find out what timezones are available you can do the following: - -.. code-block:: python - - from pprint import pprint - from pytz import all_timezones - pprint(all_timezones) - -Worker Execution ----------------- -The celerybeat worker will read your configuration and schedule tasks in the -queue to be executed at the time defined. This means if you are using -celerybeat you will end up running *2* workers: - -.. code-block:: bash - - $ celery worker -A pyramid_celery.celery_app --ini development.ini - $ celery beat -A pyramid_celery.celery_app --ini development.ini - -The first command is the standard worker command that will read messages off -of the queue and run the task. The second command will read the celerybeat -configuration and periodically schedule tasks on the queue. - - -Routing ------------------------------ -If you would like to route a task to a specific queue you can define a route -per task by declaring their ``queue`` and/or ``routing_key`` in a -``celeryroute`` section. - -An example configuration for this: - -.. code-block:: ini - - [celeryroute:otherapp.tasks.Task3] - queue = slow_tasks - routing_key = turtle - - [celeryroute:myapp.tasks.Task1] - queue = fast_tasks - -Running the worker -============================= -To run the worker we just use the standard celery command with an additional -argument: - -.. code-block:: bash - - celery worker -A pyramid_celery.celery_app --ini development.ini - -If you've defined variables in your .ini like %(database_username)s you can use -the *--ini-var* argument, which is a comma separated list of key value pairs: - -.. code-block:: bash - - celery worker -A pyramid_celery.celery_app --ini development.ini --ini-var=database_username=sontek,database_password=OhYeah! - -The values in *ini-var* cannot have spaces in them, this will break celery's -parser. - -The reason it is a csv instead of using *--ini-var* multiple times is because of -a bug in celery itself. When they fix the bug we will re-work the API. Ticket -is here: - -https://github.com/celery/celery/pull/2435 - -If you use celerybeat scheduler you need to run with the *--beat* flag to run -beat and the worker at the same time. - -.. code-block:: bash - - celery worker --beat -A pyramid_celery.celery_app --ini development.ini - -Or you can launch it separately like this: - -.. code-block:: bash - - celery beat -A pyramid_celery.celery_app --ini development.ini - -Logging -===================== -If you use the **.ini** configuration (i.e don't use celeryconfig.py) then the -logging configuration will be loaded from the .ini and will not use the default -celery loggers. - -You most likely want to add a logging section to your ini for celery as well: - -.. code-block:: ini - - [logger_celery] - level = INFO - handlers = - qualname = celery - -and then update your ``[loggers]`` section to include it. - -If you want use the default celery loggers then you can set -**CELERYD_HIJACK_ROOT_LOGGER=True** in the [celery] section of your .ini. - -Celery worker processes do not propagade exceptions inside tasks, but swallow them -silently by default. This is related to the behavior of reading asynchronous -task results back. To see if your tasks fail you might need to configure -``celery.worker.job`` logger to propagate exceptions: - -.. code-block:: ini - - # Make sure Celery worker doesn't silently swallow exceptions - # See http://stackoverflow.com/a/20719461/315168 - # https://github.com/celery/celery/issues/2437 - [logger_celery_worker_job] - level = ERROR - handlers = - qualname = celery.worker.job - propagate = 1 - -If you want use the default celery loggers then you can set -**CELERYD_HIJACK_ROOT_LOGGER=True** in the [celery] section of your .ini - -Demo -===================== -To see it all in action check out examples/long_running_with_tm, run -redis-server and then do: - -.. code-block:: bash - - $ python setup.py develop - $ populate_long_running_with_tm development.ini - $ pserve ./development.ini - $ celery worker -A pyramid_celery.celery_app --ini development.ini diff --git a/examples/long_running_with_tm/long_running_with_tm/tasks.py b/examples/long_running_with_tm/long_running_with_tm/tasks.py index 92927e9..a807c39 100644 --- a/examples/long_running_with_tm/long_running_with_tm/tasks.py +++ b/examples/long_running_with_tm/long_running_with_tm/tasks.py @@ -30,7 +30,7 @@ def delete_task(self, task_pk): @app.task def add_task(task): time.sleep(random.choice([2, 4, 6, 8, 10])) - print 'creating task %s' % task + print('creating task %s' % task) task = TaskItem(task=task) DBSession.add(task) transaction.commit() diff --git a/pyramid_celery/__init__.py b/pyramid_celery/__init__.py index 3fe281d..377eeb7 100644 --- a/pyramid_celery/__init__.py +++ b/pyramid_celery/__init__.py @@ -3,8 +3,8 @@ from celery import VERSION as celery_version from celery.bin import Option from pyramid.paster import bootstrap, setup_logging -from pyramid_celery.loaders import INILoader -from pyramid.settings import asbool +from pyramid_celery.loaders import INILoader, get_any, set_all +from pyramid_celery.bootsteps import DeadlockDetection def add_preload_arguments(parser): @@ -13,61 +13,45 @@ def add_preload_arguments(parser): help='Paste ini configuration file.' ) parser.add_argument( - '--ini-var', default=None, + '--ini-var', default=None, action='append', help='Comma separated list of key=value to pass to ini.' ) -celery_app = Celery() -if celery_version.major > 3: - celery_app.user_options['preload'].add(add_preload_arguments) -else: - celery_app.user_options['preload'].add(Option( - '-i', '--ini', default=None, - help='Paste ini configuration file.' - )) - celery_app.user_options['preload'].add(Option( - '--ini-var', default=None, - help='Comma separated list of key=value to pass to ini.' - )) +def make_app(): + app = Celery() + if celery_version.major > 3: + app.user_options['preload'].add(add_preload_arguments) + else: + app.user_options['preload'].add(Option( + '-i', '--ini', default=None, + help='Paste ini configuration file.' + )) + app.user_options['preload'].add(Option( + '--ini-var', default=None, action='append', + help='Comma separated list of key=value to pass to ini.' + )) + return app + + +celery_app = make_app() ini_file = None -def boolify(config, *names): - """Make config variables boolean. - - Celery wants ``False`` instead of ``"false"`` for CELERY_ALWAYS_EAGER. - """ - - for n in names: - if n in config: - config[n] = asbool(config[n]) - - def configure_logging(*args, **kwargs): setup_logging(ini_file) -def setup_app(app, root, request, registry, closer, ini_location): +def setup_app(ini_location): loader = INILoader(celery_app, ini_file=ini_location) celery_config = loader.read_configuration() - #: TODO: There might be other variables requiring special handling - boolify( - celery_config, 'CELERY_ALWAYS_EAGER', 'CELERY_ENABLE_UTC', - 'CELERY_RESULT_PERSISTENT' - ) - - if asbool(celery_config.get('USE_CELERYCONFIG', False)) is True: - config_path = 'celeryconfig' - celery_app.config_from_object(config_path) + if get_any(celery_config, ('use_celeryconfig', 'USE_CELERYCONFIG')): + celery_app.config_from_object('celeryconfig') else: # TODO: Couldn't find a way with celery to do this - hijack_logger = asbool( - celery_config.get('CELERYD_HIJACK_ROOT_LOGGER', False) - ) - - celery_config['CELERYD_HIJACK_ROOT_LOGGER'] = hijack_logger + hijack_logger = get_any(celery_config, ( + 'worker_hijack_root_logger', 'CELERYD_HIJACK_ROOT_LOGGER'), False) if hijack_logger is False: global ini_file @@ -76,11 +60,18 @@ def setup_app(app, root, request, registry, closer, ini_location): celery_app.config_from_object(celery_config) - celery_app.conf.update({'PYRAMID_APP': app}) - celery_app.conf.update({'PYRAMID_ROOT': root}) - celery_app.conf.update({'PYRAMID_REQUEST': request}) - celery_app.conf.update({'PYRAMID_REGISTRY': registry}) - celery_app.conf.update({'PYRAMID_CLOSER': closer}) + +def update_app(app=None, root=None, request=None, registry=None, closer=None): + # include custom pyramid_* settings + pyramid_conf = ( + ('pyramid_app', app), + ('pyramid_root', root), + ('pyramid_request', request), + ('pyramid_registry', registry), + ('pyramid_closer', closer), + ) + for k, v in pyramid_conf: + set_all(celery_app.conf, (k, k.upper()), v) @signals.user_preload_options.connect @@ -93,8 +84,8 @@ def on_preload_parsed(options, **kwargs): exit(-1) options = {} - if ini_vars is not None: - for pairs in ini_vars.split(','): + if ini_vars: + for pairs in ini_vars: key, value = pairs.split('=') options[key] = value @@ -107,19 +98,17 @@ def on_preload_parsed(options, **kwargs): root = env['root'] request = env['request'] closer = env['closer'] - setup_app(app, root, request, registry, closer, ini_location) + update_app(app, root, request, registry, closer) def configure(config, ini_location): - setup_app( - None, - None, - None, - config.registry, - None, - ini_location - ) + setup_app(ini_location) + + +def configure_bootsteps(config): + celery_app.steps['worker'].add(DeadlockDetection) def includeme(config): config.add_directive('configure_celery', configure) + config.add_directive('configure_celery_bootsteps', configure_bootsteps) diff --git a/pyramid_celery/bootsteps.py b/pyramid_celery/bootsteps.py new file mode 100644 index 0000000..6f08f0a --- /dev/null +++ b/pyramid_celery/bootsteps.py @@ -0,0 +1,28 @@ +from celery import bootsteps +import time + + +class DeadlockDetection(bootsteps.StartStopStep): + requires = {'celery.worker.components:Timer'} + + def __init__(self, worker, deadlock_timeout=600): + self.timeout = deadlock_timeout + self.requests = [] + self.tref = None + + def start(self, worker): + # run every 30 seconds. + self.tref = worker.timer.call_repeatedly( + 30.0, self.detect, (worker,), priority=10, + ) + + def stop(self, worker): + if self.tref: + self.tref.cancel() + self.tref = None + + def detect(self, worker): + # update active requests + for req in worker.active_requests: + if req.time_start and time() - req.time_start > self.timeout: + raise SystemExit() diff --git a/pyramid_celery/loaders.py b/pyramid_celery/loaders.py index 71549a8..8366cc3 100644 --- a/pyramid_celery/loaders.py +++ b/pyramid_celery/loaders.py @@ -8,6 +8,20 @@ from functools import partial +def get_any(dict_, keys, default=None): + for key in keys: + try: + return dict_[key] + except KeyError: + pass + return default + + +def set_all(dict_, keys, value): + for key in keys: + dict_[key] = value + + def crontab(value): return celery.schedules.crontab(**value) @@ -83,53 +97,93 @@ def get_route_config(parser, section): return config -class INILoader(celery.loaders.base.BaseLoader): - ConfigParser = configparser.SafeConfigParser +#: TODO: There might be other variables requiring special handling +bool_settings = [ + 'always_eager', 'CELERY_ALWAYS_EAGER', + 'enable_utc', 'CELERY_ENABLE_UTC', + 'result_persistent', 'CELERY_RESULT_PERSISTENT', + 'worker_hijack_root_logger', 'CELERYD_HIJACK_ROOT_LOGGER', + 'use_celeryconfig', 'USE_CELERYCONFIG', +] - def __init__(self, app, **kwargs): - self.celery_conf = kwargs.pop('ini_file') - self.parser = self.ConfigParser() +list_settings = [ + 'imports', 'CELERY_IMPORTS', + 'accept_content', 'CELERY_ACCEPT_CONTENT', +] - super(INILoader, self).__init__(app, **kwargs) +tuple_list_settings = [ + 'admins', 'ADMINS', +] + +dict_settings = [ + 'broker_transport_options', 'BROKER_TRANSPORT_OPTIONS', +] - def read_configuration(self, fail_silently=True): - self.parser.read(self.celery_conf) - config_dict = {} +def parse_list_setting(setting): + split_setting = setting.split() + return split_setting - for key, value in self.parser.items('celery'): - config_dict[key.upper()] = value - list_settings = ['CELERY_IMPORTS', 'CELERY_ACCEPT_CONTENT'] +def parse_tuple_list_setting(setting): + items = setting.split('\n') + tuple_settings = [tuple(item.split(',')) for item in items] + return tuple_settings - for setting in list_settings: - if setting in config_dict: - split_setting = config_dict[setting].split() - config_dict[setting] = split_setting - tuple_list_settings = ['ADMINS'] +def parse_dict_setting(setting): + return json.loads(setting.encode('ascii')) - for setting in tuple_list_settings: - if setting in config_dict: - items = config_dict[setting].split() - tuple_settings = [tuple(item.split(',')) for item in items] - config_dict[setting] = tuple_settings + +class INILoader(celery.loaders.base.BaseLoader): + def __init__(self, app, **kwargs): + self.celery_conf = kwargs.pop('ini_file') + self.parser = configparser.SafeConfigParser() + self.parser.optionxform = str + super(INILoader, self).__init__(app, **kwargs) + + def read_configuration(self, fail_silently=False): + self.parser.read(self.celery_conf) + config_dict = dict(self.parser.items('celery')) + + for setting in config_dict.keys(): + try: + if setting in bool_settings: + config_dict[setting] = self.parser.getboolean('celery', setting) + if setting in list_settings: + config_dict[setting] = parse_list_setting(config_dict[setting]) + if setting in tuple_list_settings: + config_dict[setting] = parse_tuple_list_setting(config_dict[setting]) + if setting in dict_settings: + config_dict[setting] = parse_dict_setting(config_dict[setting]) + except Exception as exc: + if not fail_silently: + raise ConfigurationError('Can\'t parse value for {}. {}'.format(setting, exc.message)) + del config_dict[setting] beat_config = {} route_config = {} for section in self.parser.sections(): if section.startswith('celerybeat:'): - name = section.split(':', 1)[1] - beat_config[name] = get_beat_config(self.parser, section) + try: + name = section.split(':', 1)[1] + beat_config[name] = get_beat_config(self.parser, section) + if beat_config: + set_all(config_dict, ( + 'beat_schedule', 'CELERYBEAT_SCHEDULE'), beat_config) + except Exception as exc: + if not fail_silently: + raise ConfigurationError('Can\'t parse celerybeat config. {}'.format(exc.message)) elif section.startswith('celeryroute:'): - name = section.split(':', 1)[1] - route_config[name] = get_route_config(self.parser, section) - - if beat_config: - config_dict['CELERYBEAT_SCHEDULE'] = beat_config - - if route_config: - config_dict['CELERY_ROUTES'] = route_config + try: + name = section.split(':', 1)[1] + route_config[name] = get_route_config(self.parser, section) + if route_config: + set_all(config_dict, ( + 'task_routes', 'CELERY_ROUTES'), route_config) + except Exception as exc: + if not fail_silently: + raise ConfigurationError('Can\'t parse celeryroute config', exc.message) return config_dict diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 4f34d3e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pyramid -celery -pytest -mock diff --git a/setup.cfg b/setup.cfg index f7aea29..03f770c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,20 +1,8 @@ -[pytest] -norecursedirs = - .git - .tox - dist - build - *.egg - examples/ - +[tool:pytest] addopts = -rxEfs --strict - --doctest-modules - --doctest-glob=\*.rst --tb=short --ignore=setup.py - - tests/ markers = unit diff --git a/setup.py b/setup.py index ff8db75..3e80b11 100644 --- a/setup.py +++ b/setup.py @@ -3,17 +3,16 @@ from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) -README = open(os.path.join(here, 'README.rst')).read() +README = open(os.path.join(here, 'README.md')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = ['pyramid', 'celery'] - if sys.version_info < (2, 7): requires.append('argparse') setup(name='pyramid_celery', - version='3.0.0', + version='4.1.0', description='Celery integration with pyramid', long_description=README + "\n" + CHANGES, classifiers=[ @@ -40,6 +39,7 @@ include_package_data=True, zip_safe=False, install_requires=requires, - tests_require=requires + ['pytest', 'mock'], - test_suite="pyramid_celery", + extras_require={ + 'dev': ['tox'] + } ) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 9b090cd..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -pytest -pytest-cov -mock -tox -redis diff --git a/tests/conftest.py b/tests/conftest.py index 21a19f0..af57a8c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,9 @@ import pyramid_celery import pytest -from celery import Celery - @pytest.fixture(autouse=True) def setup_celery_app(monkeypatch): - app = Celery() + # use a fresh app instance for each test + app = pyramid_celery.make_app() monkeypatch.setattr(pyramid_celery, 'celery_app', app) diff --git a/tests/test_celery.py b/tests/test_celery.py index deba4a0..fb790a8 100644 --- a/tests/test_celery.py +++ b/tests/test_celery.py @@ -69,7 +69,26 @@ def test_preload_ini(): with mock.patch('pyramid_celery.bootstrap') as boot: on_preload_parsed(options) - assert boot.called_with('dev.ini') + boot.assert_called_with('tests/configs/dev.ini') + + +@pytest.mark.unit +def test_preload_options(): + from pyramid_celery import celery_app + from celery.bin.celery import Command + + with mock.patch('pyramid_celery.bootstrap') as boot: + cmd = Command(celery_app) + cmd.setup_app_from_commandline(['--ini', 'tests/configs/dev.ini']) + boot.assert_called_with('tests/configs/dev.ini') + + with mock.patch('pyramid_celery.bootstrap') as boot: + cmd = Command(celery_app) + cmd.setup_app_from_commandline([ + '--ini', 'tests/configs/dev.ini', + '--ini-var', 'foo=bar', + '--ini-var=bar=baz', + ]) @pytest.mark.unit @@ -95,13 +114,13 @@ def test_preload_with_ini_vars(): from pyramid_celery import on_preload_parsed options = { 'ini': 'tests/configs/dev.ini', - 'ini_var': 'database=foo,password=bar', + 'ini_var': ['database=foo', 'password=bar'], } with mock.patch('pyramid_celery.bootstrap') as boot: on_preload_parsed(options) expected_vars = {'database': 'foo', 'password': 'bar'} - assert boot.called_with('dev.ini', expected_vars) + boot.assert_called_with('tests/configs/dev.ini', options=expected_vars) @pytest.mark.unit @@ -121,8 +140,7 @@ def test_ini_logging(): sender=None, loglevel='INFO', logfile=None, format='', colorize=False, ) - - assert setup_logging.called_with('tests/configs/dev.ini') + setup_logging.assert_called_with('tests/configs/dev.ini') @pytest.mark.unit diff --git a/tox.ini b/tox.ini index b62ffdb..ad182b8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,31 +1,39 @@ [tox] -skipsdist = True -envlist = py27, py34, py35, pypy, flake8, celery3, celery4 - -[base] -commands = - pip install -e . -r test-requirements.txt +skipsdist = true +envlist = clean, py{27,34,35,37,py}-celery{3,4}, flake8, report [testenv] -pip_pre=False +usedevelop = true +deps = + mock + pytest + pytest-cov + celery3: celery<4 + celery4: celery>=4 commands = - {[base]commands} - py.test {posargs} py.test {posargs} \ --cov=pyramid_celery \ - --cov-report=xml \ - --cov-report=term-missing + --cov-report= \ + --cov-append [testenv:flake8] +deps = + flake8 +skip_install = true basepython = python3 commands = - pip install flake8 flake8 pyramid_celery/ tests/ -[testenv:celery4] -commands = - pip install 'celery<4' +[testenv:clean] +deps = + coverage +skip_install = true +commands = + coverage erase -[testenv:celery3] -commands = - pip install 'celery>=4' \ No newline at end of file +[testenv:report] +deps = + coverage +skip_install = true +commands = + coverage report