From da83647b9d11076936128e80ceafce1176d35a2a Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Tue, 6 Oct 2015 16:43:25 +0200 Subject: [PATCH 01/43] Allow to define seconds when raising RetryableJobError The job will be retried after the given number of seconds if the exception is raised with a defined 'seconds' argument. --- connector/CHANGES.rst | 5 +++++ connector/controllers/main.py | 2 +- connector/exception.py | 12 +++++++++++- connector/queue/worker.py | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/connector/CHANGES.rst b/connector/CHANGES.rst index 50ec88a77..1f2fce886 100644 --- a/connector/CHANGES.rst +++ b/connector/CHANGES.rst @@ -1,6 +1,11 @@ Changelog --------- +Future (?) +~~~~~~~~~~ + +* Allow to define seconds when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) + 3.2.0 (2015-09-10) ~~~~~~~~~~~~~~~~~~ diff --git a/connector/controllers/main.py b/connector/controllers/main.py index 23ed6b1ef..16c46007d 100644 --- a/connector/controllers/main.py +++ b/connector/controllers/main.py @@ -94,7 +94,7 @@ def retry_postpone(job, message, seconds=None): except RetryableJobError as err: # delay the job later, requeue - retry_postpone(job, unicode(err)) + retry_postpone(job, unicode(err), seconds=err.seconds) _logger.debug('%s postponed', job) except OperationalError as err: diff --git a/connector/exception.py b/connector/exception.py index 957f14dca..3a1c78d2b 100644 --- a/connector/exception.py +++ b/connector/exception.py @@ -53,7 +53,17 @@ class FailedJobError(JobError): class RetryableJobError(JobError): - """ A job had an error but can be retried. """ + """ A job had an error but can be retried. + + The job will be retried after the given number of seconds. + If seconds is empty, it will be retried according to the ``retry_pattern`` + of the job or by :const:`connector.queue.job.RETRY_INTERVAL` if nothing + is defined. + """ + + def __init__(self, msg, seconds=None): + super(RetryableJobError, self).__init__(msg) + self.seconds = seconds class NetworkRetryableError(RetryableJobError): diff --git a/connector/queue/worker.py b/connector/queue/worker.py index e6a47b220..3de7786ae 100644 --- a/connector/queue/worker.py +++ b/connector/queue/worker.py @@ -137,7 +137,7 @@ def retry_postpone(job, message, seconds=None): except RetryableJobError as err: # delay the job later, requeue - retry_postpone(job, unicode(err)) + retry_postpone(job, unicode(err), seconds=err.seconds) _logger.debug('%s postponed', job) except OperationalError as err: From dde980b7c79bb51978c0257ca8637bd5531c048e Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Tue, 6 Oct 2015 17:01:18 +0200 Subject: [PATCH 02/43] Allow to ignore the retry counter when raising RetryableJobError --- connector/CHANGES.rst | 1 + connector/exception.py | 5 ++++- connector/queue/job.py | 7 +++++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/connector/CHANGES.rst b/connector/CHANGES.rst index 1f2fce886..3a7947ef9 100644 --- a/connector/CHANGES.rst +++ b/connector/CHANGES.rst @@ -5,6 +5,7 @@ Future (?) ~~~~~~~~~~ * Allow to define seconds when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) +* Allow to ignore the retry counter when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) 3.2.0 (2015-09-10) ~~~~~~~~~~~~~~~~~~ diff --git a/connector/exception.py b/connector/exception.py index 3a1c78d2b..d83dc2f68 100644 --- a/connector/exception.py +++ b/connector/exception.py @@ -59,11 +59,14 @@ class RetryableJobError(JobError): If seconds is empty, it will be retried according to the ``retry_pattern`` of the job or by :const:`connector.queue.job.RETRY_INTERVAL` if nothing is defined. + + If ``ignore_retry`` is True, the retry counter will not be increased. """ - def __init__(self, msg, seconds=None): + def __init__(self, msg, seconds=None, ignore_retry=False): super(RetryableJobError, self).__init__(msg) self.seconds = seconds + self.ignore_retry = ignore_retry class NetworkRetryableError(RetryableJobError): diff --git a/connector/queue/job.py b/connector/queue/job.py index 2558cc11d..93a7e4d97 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -465,8 +465,11 @@ def perform(self, session): self.retry += 1 try: self.result = self.func(session, *self.args, **self.kwargs) - except RetryableJobError: - if not self.max_retries: # infinite retries + except RetryableJobError as err: + if err.ignore_retry: + self.retry -= 1 + raise + elif not self.max_retries: # infinite retries raise elif self.retry >= self.max_retries: type_, value, traceback = sys.exc_info() From 51ee1c42b72bc291c2f849f82cd88dc9f8042b52 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Wed, 7 Oct 2015 08:59:16 +0200 Subject: [PATCH 03/43] Exception should be raised with arguments --- connector/queue/job.py | 2 +- connector/tests/test_job.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/connector/queue/job.py b/connector/queue/job.py index 93a7e4d97..4dcc00a6e 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -694,7 +694,7 @@ def retryable_example(session): # retries 5 to 10 postponed 20 minutes later # retries 10 to 15 postponed 30 minutes later # all subsequent retries postponed 12 hours later - raise RetryableJobError + raise RetryableJobError('Must be retried later') retryable_example.delay(session) diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index 0d0e072cf..f9f3841de 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -48,7 +48,7 @@ def dummy_task_args(session, model_name, a, b, c=None): def retryable_error_task(session): - raise RetryableJobError + raise RetryableJobError('Must be retried later') class TestJobs(unittest2.TestCase): From 2790b8e4189de6e95d5b14596486fa553519e1bf Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Thu, 15 Oct 2015 14:42:01 +0200 Subject: [PATCH 04/43] Check if a module is installed from the registry Instead of `ir.module.module`. Simplifies as we never hit the database nor have to maintain a cache. Also, it fixes an issue when we run tests directly at the installation of a module, during the tests the previous implementation was considering the module as uninstalled instead of installed. Change proposed @bealdav. Thanks! Fixes #85 --- connector/__init__.py | 1 - connector/connector.py | 22 +++------------ connector/ir_module_module.py | 48 --------------------------------- connector/tests/test_session.py | 17 ------------ 4 files changed, 4 insertions(+), 84 deletions(-) delete mode 100644 connector/ir_module_module.py diff --git a/connector/__init__.py b/connector/__init__.py index f01bd9db3..e833791c4 100644 --- a/connector/__init__.py +++ b/connector/__init__.py @@ -7,4 +7,3 @@ from . import checkpoint from . import controllers from . import jobrunner -from . import ir_module_module diff --git a/connector/connector.py b/connector/connector.py index 8f9118a4e..8e74898c5 100644 --- a/connector/connector.py +++ b/connector/connector.py @@ -55,25 +55,11 @@ def install_in_connector(): def is_module_installed(env, module_name): """ Check if an Odoo addon is installed. - The function might be called before `connector` is even installed; - in such case, `ir_module_module.is_module_installed()` is not available yet - and this is why we first check the installation of `connector` by looking - up for a model in the registry. - - :param module_name: name of the addon to check being 'connector' or - an addon depending on it - + :param module_name: name of the addon """ - if env.registry.get('connector.backend'): - if module_name == 'connector': - # fast-path: connector is necessarily installed because - # the model is in the registry - return True - # for another addon, check in ir.module.module - return env['ir.module.module'].is_module_installed(module_name) - - # connector module is not installed neither any sub-addons - return False + # the registry maintains a set of fully loaded modules so we can + # lookup for our module there + return module_name in env.registry._init_modules def get_openerp_module(cls_or_func): diff --git a/connector/ir_module_module.py b/connector/ir_module_module.py deleted file mode 100644 index cc1d07444..000000000 --- a/connector/ir_module_module.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# Author: Guewen Baconnier -# Copyright 2012-2013 Camptocamp SA -# Copyright 2015 anybox SA -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -############################################################################## - -from openerp import models, api -from openerp.tools.cache import ormcache - - -class IrModuleModule(models.Model): - """Overwrite ir.module.module to add cached method 'is_module_installed'. - This method is cached, because connector will always check if a module is - installed before do action. - - All update (write method) on the field state, invalidate the cache - """ - _inherit = 'ir.module.module' - - @ormcache(skiparg=1) - def is_module_installed(self, module_name): - states = ('installed', 'to upgrade') - return bool(len(self.search([('name', '=', module_name), - ('state', 'in', states)]))) - - @api.multi - def write(self, values): - res = super(IrModuleModule, self).write(values) - if 'state' in values: - self.clear_caches() - - return res diff --git a/connector/tests/test_session.py b/connector/tests/test_session.py index 27a389b48..8ded6877a 100644 --- a/connector/tests/test_session.py +++ b/connector/tests/test_session.py @@ -149,20 +149,3 @@ def test_is_module_installed_cache_not_propagated(self): """ Test if the cache is well different for the different modules """ self.assertTrue(self.session.is_module_installed('connector')) self.assertFalse(self.session.is_module_installed('#dummy#')) - - def test_is_module_installed_cache_invalidation(self): - """ Test on an invalidation of cache about installed modules """ - module = self.env['ir.module.module'] - domain = [('name', '=', 'base')] - self.assertTrue(self.session.is_module_installed('base')) - # only to check that the cache works, the in validation is done only - # if the field state is modified by write method, UGLY but no other - # solution - self.env.cr.execute("UPDATE ir_module_module " - "SET state='uninstalled' " - "WHERE name='base'") - self.assertTrue(self.session.is_module_installed('base')) - module.search(domain).state = 'uninstalled' - self.assertFalse(self.session.is_module_installed('base')) - module.search(domain).state = 'installed' - self.assertTrue(self.session.is_module_installed('base')) From 1d5e183817a4f5121ea0da353f353f26f72f82da Mon Sep 17 00:00:00 2001 From: OCA Transbot Date: Sat, 17 Oct 2015 22:16:12 -0400 Subject: [PATCH 05/43] OCA Transbot updated translations from Transifex --- connector/i18n/ca.po | 11 +++-------- connector/i18n/en.po | 11 +++-------- connector/i18n/es_MX.po | 13 ++++--------- connector/i18n/fr.po | 11 +++-------- connector/i18n/pt_BR.po | 11 +++-------- connector/i18n/sl.po | 11 +++-------- 6 files changed, 19 insertions(+), 49 deletions(-) diff --git a/connector/i18n/ca.po b/connector/i18n/ca.po index 988d538db..784291457 100644 --- a/connector/i18n/ca.po +++ b/connector/i18n/ca.po @@ -9,8 +9,8 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-09-11 07:47+0000\n" -"PO-Revision-Date: 2015-09-10 13:20+0000\n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" "Last-Translator: OCA Transbot \n" "Language-Team: Catalan (http://www.transifex.com/oca/OCA-connector-8-0/language/ca/)\n" "MIME-Version: 1.0\n" @@ -92,7 +92,7 @@ msgid "Cancel" msgstr "Cancel·la" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "Cancel·lat, res a fer." @@ -463,11 +463,6 @@ msgstr "Historial de missatges i comunicacións" msgid "Model" msgstr "Model" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Mòdul" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 diff --git a/connector/i18n/en.po b/connector/i18n/en.po index 9d3e205c1..f45609171 100644 --- a/connector/i18n/en.po +++ b/connector/i18n/en.po @@ -7,8 +7,8 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-09-11 07:47+0000\n" -"PO-Revision-Date: 2015-09-10 13:20+0000\n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" "Last-Translator: OCA Transbot \n" "Language-Team: English (http://www.transifex.com/oca/OCA-connector-8-0/language/en/)\n" "MIME-Version: 1.0\n" @@ -90,7 +90,7 @@ msgid "Cancel" msgstr "Cancel" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "Canceled. Nothing to do." @@ -461,11 +461,6 @@ msgstr "Messages and communication history" msgid "Model" msgstr "Model" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Module" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 diff --git a/connector/i18n/es_MX.po b/connector/i18n/es_MX.po index cdda996bc..1939da97b 100644 --- a/connector/i18n/es_MX.po +++ b/connector/i18n/es_MX.po @@ -8,9 +8,9 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-10-09 10:06+0000\n" -"PO-Revision-Date: 2015-10-09 16:14+0000\n" -"Last-Translator: Jesus Alan Ramos Rodriguez \n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" +"Last-Translator: OCA Transbot \n" "Language-Team: Spanish (Mexico) (http://www.transifex.com/oca/OCA-connector-8-0/language/es_MX/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -91,7 +91,7 @@ msgid "Cancel" msgstr "Cancelar" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "" @@ -462,11 +462,6 @@ msgstr "" msgid "Model" msgstr "Modelo" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Módulo" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 diff --git a/connector/i18n/fr.po b/connector/i18n/fr.po index c24ace5c4..f83d30dd7 100644 --- a/connector/i18n/fr.po +++ b/connector/i18n/fr.po @@ -9,8 +9,8 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-09-11 07:47+0000\n" -"PO-Revision-Date: 2015-09-10 13:20+0000\n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" "Last-Translator: OCA Transbot \n" "Language-Team: French (http://www.transifex.com/oca/OCA-connector-8-0/language/fr/)\n" "MIME-Version: 1.0\n" @@ -92,7 +92,7 @@ msgid "Cancel" msgstr "Annuler" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "Annulé. Rien à faire." @@ -463,11 +463,6 @@ msgstr "Historique des messages et des communications" msgid "Model" msgstr "Modèle" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Module" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 diff --git a/connector/i18n/pt_BR.po b/connector/i18n/pt_BR.po index 9858551d0..429f3f193 100644 --- a/connector/i18n/pt_BR.po +++ b/connector/i18n/pt_BR.po @@ -9,8 +9,8 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-09-11 07:47+0000\n" -"PO-Revision-Date: 2015-09-10 13:20+0000\n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" "Last-Translator: OCA Transbot \n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/oca/OCA-connector-8-0/language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -92,7 +92,7 @@ msgid "Cancel" msgstr "Cancelar" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "Cancelado. Nada a se fazer" @@ -463,11 +463,6 @@ msgstr "Mensagens e histórico de comunicação" msgid "Model" msgstr "Modelo" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Módulo" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 diff --git a/connector/i18n/sl.po b/connector/i18n/sl.po index 4cda46329..17582f1a6 100644 --- a/connector/i18n/sl.po +++ b/connector/i18n/sl.po @@ -8,8 +8,8 @@ msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-09-11 07:47+0000\n" -"PO-Revision-Date: 2015-09-10 13:20+0000\n" +"POT-Creation-Date: 2015-10-16 06:23+0000\n" +"PO-Revision-Date: 2015-10-15 13:38+0000\n" "Last-Translator: OCA Transbot \n" "Language-Team: Slovenian (http://www.transifex.com/oca/OCA-connector-8-0/language/sl/)\n" "MIME-Version: 1.0\n" @@ -91,7 +91,7 @@ msgid "Cancel" msgstr "Preklic" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." msgstr "Preklicano. Ni opravkov." @@ -462,11 +462,6 @@ msgstr "Sporočila in kronologija komunikacij" msgid "Model" msgstr "Model" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "Modul" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 From decde3d2517e31e05895f30608693a013bd0d311 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Thu, 1 Oct 2015 20:34:30 +0200 Subject: [PATCH 06/43] Add 'mock_job_delay_to_direct' to ease tests on jobs This context manager allows to execute a job function synchronously when it should normally have been delayed in a asynchronous job. This is really useful for writing tests that check the flow of a synchronisation without having to deal with the jobs execution mechanisms. It is used in the OCA/connector-magento project and ought to be used by other projects. --- connector/CHANGES.rst | 2 ++ connector/tests/common.py | 64 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 connector/tests/common.py diff --git a/connector/CHANGES.rst b/connector/CHANGES.rst index 3a7947ef9..cacc7453c 100644 --- a/connector/CHANGES.rst +++ b/connector/CHANGES.rst @@ -6,6 +6,8 @@ Future (?) * Allow to define seconds when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) * Allow to ignore the retry counter when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) +* Add 'mock_job_delay_to_direct' to ease tests on jobs (https://github.com/OCA/connector/pull/123) + 3.2.0 (2015-09-10) ~~~~~~~~~~~~~~~~~~ diff --git a/connector/tests/common.py b/connector/tests/common.py new file mode 100644 index 000000000..30034dc9a --- /dev/null +++ b/connector/tests/common.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# +# Authors: Guewen Baconnier +# Copyright 2015 Camptocamp SA +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# + +import importlib +from contextlib import contextmanager + +import mock + + +@contextmanager +def mock_job_delay_to_direct(job_path): + """ Replace the .delay() of a job by a direct call + + job_path is the python path as string, such as:: + + 'openerp.addons.magentoerpconnect.stock_picking.export_picking_done' + + This is a context manager, all the calls made to the job function in + job_path inside the context manager will be executed synchronously. + + .. note:: It uses :meth:`mock.patch` so it has the same pitfall + regarding the python path. If the mock seems to have no + effect, read `Where to patch + `_ + in the mock documentation. + + """ + job_module, job_name = job_path.rsplit('.', 1) + module = importlib.import_module(job_module) + job_func = getattr(module, job_name, None) + assert job_func, "The function %s must exist in %s" % (job_name, + job_module) + + def clean_args_for_func(*args, **kwargs): + # remove the special args reserved to '.delay()' + kwargs.pop('priority', None) + kwargs.pop('eta', None) + kwargs.pop('model_name', None) + kwargs.pop('max_retries', None) + kwargs.pop('description', None) + job_func(*args, **kwargs) + + with mock.patch(job_path) as patched_job: + # call the function directly instead of '.delay()' + patched_job.delay.side_effect = clean_args_for_func + yield patched_job From 73bab2926da93cb2e80deb23357e84ef5aa11373 Mon Sep 17 00:00:00 2001 From: Nicolas Piganeau Date: Mon, 26 Oct 2015 10:17:26 +0100 Subject: [PATCH 07/43] [FIX] Set job to failed after non-retryable OperationalError --- connector/controllers/main.py | 63 +++++++++++++++++++---------------- 1 file changed, 35 insertions(+), 28 deletions(-) diff --git a/connector/controllers/main.py b/connector/controllers/main.py index 16c46007d..4c5619989 100644 --- a/connector/controllers/main.py +++ b/connector/controllers/main.py @@ -42,6 +42,32 @@ def _load_job(self, session, job_uuid): raise return job + def _try_perform_job(self, session_hdl, job): + """Try to perform the job.""" + + # if the job has been manually set to DONE or PENDING, + # or if something tries to run a job that is not enqueued + # before its execution, stop + if job.state != ENQUEUED: + _logger.warning('job %s is in state %s ' + 'instead of enqueued in /runjob', + job.uuid, job.state) + return + + with session_hdl.session() as session: + # TODO: set_started should be done atomically with + # update queue_job set=state=started + # where state=enqueid and id= + job.set_started() + self.job_storage_class(session).store(job) + + _logger.debug('%s started', job) + with session_hdl.session() as session: + job.perform(session) + job.set_done() + self.job_storage_class(session).store(job) + _logger.debug('%s done', job) + @http.route('/connector/runjob', type='http', auth='none') def runjob(self, db, job_uuid, **kw): @@ -60,28 +86,16 @@ def retry_postpone(job, message, seconds=None): return "" try: - # if the job has been manually set to DONE or PENDING, - # or if something tries to run a job that is not enqueued - # before its execution, stop - if job.state != ENQUEUED: - _logger.warning('job %s is in state %s ' - 'instead of enqueued in /runjob', - job_uuid, job.state) - return + try: + self._try_perform_job(session_hdl, job) + except OperationalError as err: + # Automatically retry the typical transaction serialization + # errors + if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: + raise - with session_hdl.session() as session: - # TODO: set_started should be done atomically with - # update queue_job set=state=started - # where state=enqueid and id= - job.set_started() - self.job_storage_class(session).store(job) - - _logger.debug('%s started', job) - with session_hdl.session() as session: - job.perform(session) - job.set_done() - self.job_storage_class(session).store(job) - _logger.debug('%s done', job) + retry_postpone(job, unicode(err), seconds=PG_RETRY) + _logger.debug('%s OperationalError, postponed', job) except NothingToDoJob as err: if unicode(err): @@ -97,13 +111,6 @@ def retry_postpone(job, message, seconds=None): retry_postpone(job, unicode(err), seconds=err.seconds) _logger.debug('%s postponed', job) - except OperationalError as err: - # Automatically retry the typical transaction serialization errors - if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: - raise - retry_postpone(job, unicode(err), seconds=PG_RETRY) - _logger.debug('%s OperationalError, postponed', job) - except (FailedJobError, Exception): buff = StringIO() traceback.print_exc(file=buff) From cf2b6bc53f8a18f91c213a3697bb0c3df197086a Mon Sep 17 00:00:00 2001 From: Nicolas PIGANEAU Date: Wed, 21 Oct 2015 16:26:45 +0200 Subject: [PATCH 08/43] [FIX] Yield session in a clear_upon_failure() environment --- connector/session.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/connector/session.py b/connector/session.py index 93d6392ba..4bba37c7e 100644 --- a/connector/session.py +++ b/connector/session.py @@ -77,9 +77,10 @@ def session(self): context=self.context) try: - RegistryManager.check_registry_signaling(self.db_name) - yield session - RegistryManager.signal_caches_change(self.db_name) + with session.env.clear_upon_failure(): + RegistryManager.check_registry_signaling(self.db_name) + yield session + RegistryManager.signal_caches_change(self.db_name) except: session.rollback() raise From 66b66f88b7680b34b12380bf1e526be68428a742 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Fri, 30 Oct 2015 11:35:10 +0100 Subject: [PATCH 09/43] Update mailing list address --- README.md | 4 ++-- connector/doc/index.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 42a3858c5..a870f903a 100644 --- a/README.md +++ b/README.md @@ -9,8 +9,8 @@ Odoo Connector Modules This Odoo add-on has a modular and generic core, with the ability to be extended with additional modules for new features or customizations. -The team for the community is here: https://launchpad.net/~openerp-connector-community -Please join and subscribe to the mailing list. +Feel free to subscribe on the mailing list (its name is 'Connectors'): +https://odoo-community.org/groups Documentation: http://www.odoo-connector.com diff --git a/connector/doc/index.rst b/connector/doc/index.rst index 3e5c7a7d9..6bf7dee44 100644 --- a/connector/doc/index.rst +++ b/connector/doc/index.rst @@ -18,7 +18,7 @@ customizations. The development of Odoo Connector has been started by `Camptocamp`_ and is now maintained by `Camptocamp`_, `Akretion`_, `Acsone`_ and several :ref:`contributors`. -*Subscribe to the* `project's mailing list`_ +*Subscribe to the* `project's mailing list (name: Connectors)`_ *Learn how to* :ref:`contribute` @@ -42,7 +42,7 @@ Core Features .. _Acsone: http://www.acsone.eu .. _`source code is available on GitHub`: https://github.com/OCA/connector .. _`AGPL version 3`: http://www.gnu.org/licenses/agpl-3.0.html -.. _`project's mailing list`: https://launchpad.net/~openerp-connector-community +.. _`project's mailing list (name: Connectors)`: https://odoo-community.org/groups ********************************* Connectors based on the framework From 2fc2eb5301337539ba27794fe1de5540443bec20 Mon Sep 17 00:00:00 2001 From: Olivier LAURENT Date: Fri, 30 Oct 2015 20:51:03 +0100 Subject: [PATCH 10/43] [8.0][FIX] when a job fails, inactive users are added to its followers --- connector/queue/job.py | 2 +- connector/tests/test_job.py | 20 +++++++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/connector/queue/job.py b/connector/queue/job.py index 4dcc00a6e..1c65b73e8 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -148,7 +148,7 @@ def db_record_from_uuid(self, job_uuid): model = self.job_model.sudo().with_context(active_test=False) record = model.search([('uuid', '=', job_uuid)], limit=1) if record: - return record + return record.with_env(self.job_model.env) def db_record(self, job_): return self.db_record_from_uuid(job_.uuid) diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index f9f3841de..15918578c 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -486,12 +486,13 @@ def setUp(self): super(TestJobModel, self).setUp() self.session = ConnectorSession(self.cr, self.uid) self.queue_job = self.env['queue.job'] + self.user = self.env['res.users'] def _create_job(self): test_job = Job(func=task_a) storage = OpenERPJobStorage(self.session) storage.store(test_job) - stored = self.queue_job.search([('uuid', '=', test_job.uuid)]) + stored = storage.db_record_from_uuid(test_job.uuid) self.assertEqual(len(stored), 1) return stored @@ -528,6 +529,23 @@ def test_message_when_write_fail(self): messages = stored.message_ids self.assertEqual(len(messages), 2) + def test_follower_when_write_fail(self): + group = self.env.ref('connector.group_connector_manager') + vals = {'name': 'xx', + 'login': 'xx', + 'groups_id': [(6, 0, [group.id])], + 'active': False, + } + inactiveusr = self.user.create(vals) + self.assertTrue(inactiveusr.partner_id.active) + self.assertFalse(inactiveusr in group.users) + stored = self._create_job() + stored.write({'state': 'failed'}) + followers = stored.message_follower_ids + self.assertFalse(inactiveusr.partner_id in followers) + self.assertFalse( + set([u.partner_id for u in group.users]) - set(followers)) + def test_autovacuum(self): stored = self._create_job() stored2 = self._create_job() From 6e23779fdc5b8c4967e603aa9d40d26470b59fea Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Mon, 2 Nov 2015 15:26:15 +0100 Subject: [PATCH 11/43] Context manager to acquire Postgres advisory locks This Postgres feature is invaluable when dealing with synchronisations, especially when importing concurrently records from a system. When we export a record, we are able to acquire a lock on the exported record to prevent 2 jobs to export it at the same time. This is different when we import a record for the first time and with several jobs running in parallel, chances are high that 2 jobs will import the same record at the same moment. The Postgres advisory lock comes handy there for they allow to acquire an application lock. Usually we'll acquire the lock at the beginning of an import (beginning of ``Importer.run()``) and we'll throw a ``RetryableJobError`` if the lock cannot be acquired so the job is retried later. The lock will remain in place until the end of the transaction. Example: - Job 1 imports Partner A - Job 2 imports Partner B - Partner A has a category X which happens not to exist yet - Partner B has a category X which happens not to exist yet - Job 1 import category X as a dependency - Job 2 import category X as a dependency Since both jobs are executed concurrently, they both create a record for category X. With this lock: - Job 1 imports Partner A, it puts a lock for this partner - Job 2 imports Partner B, it puts a lock for this partner - Partner A has a category X which happens not to exist yet - Partner B has a category X which happens not to exist yet - Job 1 import category X as a dependency, it puts a lock for this category - Job 2 import category X as a dependency, try to put a lock but can't, Job 2 is retried later, and when it is retried, it sees the category X created by Job 1 See http://topopps.com/implementing-postgres-advisory-locks/ for the article where I learned about the computation of the hash for this purpose. --- connector/connector.py | 102 ++++++++++++++++++++++++++++++ connector/tests/test_connector.py | 77 ++++++++++++++++++---- 2 files changed, 168 insertions(+), 11 deletions(-) diff --git a/connector/connector.py b/connector/connector.py index 8e74898c5..a7f18850f 100644 --- a/connector/connector.py +++ b/connector/connector.py @@ -19,11 +19,15 @@ # ############################################################################## +import hashlib import logging +import struct + from contextlib import contextmanager from openerp import models, fields from .deprecate import log_deprecate, DeprecatedClass +from .exception import RetryableJobError _logger = logging.getLogger(__name__) @@ -227,6 +231,33 @@ def get_binder_for_model(self, model=None): log_deprecate('renamed to binder_for()') return self.binder_for(model=model) + @contextmanager + def try_advisory_lock(self, lock, retry_seconds=1): + """ Context manager, tries to acquire a Postgres transactional + advisory lock. + + If the lock cannot be acquired, it raises a + ``RetryableJobError`` so the jobs is retried after n + ``retry_seconds``. + + See :func:``openerp.addons.connector.connector.pg_try_advisory_lock`` + for details. + + :param lock: The lock name. Can be anything convertible to a + string. It needs to represents what should not be synchronized + concurrently so usually the string will contain at least: the + action, the backend type, the backend id, the model name, the + external id + :param retry_seconds: number of seconds after which a job should + be retried when the lock cannot be acquired. + """ + if pg_try_advisory_lock(self.env, lock): + yield + else: + raise RetryableJobError('Could not acquire advisory lock', + seconds=retry_seconds, + ignore_retry=True) + class ConnectorEnvironment(object): """ Environment used by the different units for the synchronization. @@ -466,3 +497,74 @@ def unwrap_model(self): 'Cannot unwrap model %s, because it has no %s fields' % (self.model._name, self._openerp_field)) return column.comodel_name + + +def pg_try_advisory_lock(env, lock): + """ Try to acquire a Postgres transactional advisory lock. + + The function tries to acquire a lock, returns a boolean indicating + if it could be obtained or not. An acquired lock is released at the + end of the transaction. + + A typical use is to acquire a lock at the beginning of an importer + to prevent 2 jobs to do the same import at the same time. Since the + record doesn't exist yet, we can't put a lock on a record, so we put + an advisory lock. + + Example: + - Job 1 imports Partner A + - Job 2 imports Partner B + - Partner A has a category X which happens not to exist yet + - Partner B has a category X which happens not to exist yet + - Job 1 import category X as a dependency + - Job 2 import category X as a dependency + + Since both jobs are executed concurrently, they both create a record + for category X so we have duplicated records. With this lock: + + - Job 1 imports Partner A, it acquires a lock for this partner + - Job 2 imports Partner B, it acquires a lock for this partner + - Partner A has a category X which happens not to exist yet + - Partner B has a category X which happens not to exist yet + - Job 1 import category X as a dependency, it acquires a lock for + this category + - Job 2 import category X as a dependency, try to acquire a lock + but can't, Job 2 is retried later, and when it is retried, it + sees the category X created by Job 1. + + The lock is acquired until the end of the transaction. + + Usage example: + + :: + + lock_name = 'import_record({}, {}, {}, {})'.format( + self.backend_record._name, + self.backend_record.id, + self.model._name, + self.lefac_id, + ) + if pg_try_advisory_lock(lock_name): + # do sync + else: + raise RetryableJobError('Could not acquire advisory lock', + seconds=2, + ignore_retry=True) + + :param env: the Odoo Environment + :param lock: The lock name. Can be anything convertible to a + string. It needs to represents what should not be synchronized + concurrently so usually the string will contain at least: the + action, the backend type, the backend id, the model name, the + external id + :return True/False whether lock was acquired. + """ + hasher = hashlib.sha1() + hasher.update('{}'.format(lock)) + # pg_lock accepts an int8 so we build an hash composed with + # contextual information and we throw away some bits + int_lock = struct.unpack('q', hasher.digest()[:8]) + + env.cr.execute('SELECT pg_try_advisory_xact_lock(%s);', (int_lock,)) + acquired = env.cr.fetchone()[0] + return acquired diff --git a/connector/tests/test_connector.py b/connector/tests/test_connector.py index 0d5393cf0..2d4fec1ec 100644 --- a/connector/tests/test_connector.py +++ b/connector/tests/test_connector.py @@ -3,13 +3,31 @@ import mock import unittest2 +from openerp import api +from openerp.modules.registry import RegistryManager from openerp.tests import common from openerp.addons.connector import connector -from openerp.addons.connector.connector import (ConnectorUnit, - ConnectorEnvironment) +from openerp.addons.connector.exception import RetryableJobError +from openerp.addons.connector.connector import ( + ConnectorEnvironment, + ConnectorUnit, + pg_try_advisory_lock, +) from openerp.addons.connector.session import ConnectorSession +def mock_connector_unit(env): + session = ConnectorSession(env.cr, env.uid, + context=env.context) + backend_record = mock.Mock(name='BackendRecord') + backend = mock.Mock(name='Backend') + backend_record.get_backend.return_value = backend + connector_env = connector.ConnectorEnvironment(backend_record, + session, + 'res.users') + return ConnectorUnit(connector_env) + + class ConnectorHelpers(unittest2.TestCase): def test_openerp_module_name(self): @@ -122,15 +140,7 @@ def test_instance(self): class ModelUnit(ConnectorUnit): _model_name = 'res.users' - session = ConnectorSession(self.env.cr, self.env.uid, - context=self.env.context) - backend_record = mock.Mock(name='BackendRecord') - backend = mock.Mock(name='Backend') - backend_record.get_backend.return_value = backend - connector_env = connector.ConnectorEnvironment(backend_record, - session, - 'res.users') - unit = ConnectorUnit(connector_env) + unit = mock_connector_unit(self.env) self.assertEqual(unit.model, self.env['res.users']) self.assertEqual(unit.env, self.env) self.assertEqual(unit.localcontext, self.env.context) @@ -177,3 +187,48 @@ def __init__(self, backend_record, session, model_name, api=None): self.assertEqual(type(new_env), MyConnectorEnvironment) self.assertEqual(new_env.api, api) + + +class TestAdvisoryLock(common.TransactionCase): + + def setUp(self): + super(TestAdvisoryLock, self).setUp() + self.registry2 = RegistryManager.get(common.get_db_name()) + self.cr2 = self.registry2.cursor() + self.env2 = api.Environment(self.cr2, self.env.uid, {}) + + @self.addCleanup + def reset_cr2(): + # rollback and close the cursor, and reset the environments + self.env2.reset() + self.cr2.rollback() + self.cr2.close() + + def test_concurrent_lock(self): + """ 2 concurrent transactions cannot acquire the same lock """ + lock = 'import_record({}, {}, {}, {})'.format( + 'backend.name', + 1, + 'res.partner', + '999999', + ) + acquired = pg_try_advisory_lock(self.env, lock) + self.assertTrue(acquired) + inner_acquired = pg_try_advisory_lock(self.env2, lock) + self.assertFalse(inner_acquired) + + def test_concurrent_import_lock(self): + """ A 2nd concurrent transaction must retry """ + lock = 'import_record({}, {}, {}, {})'.format( + 'backend.name', + 1, + 'res.partner', + '999999', + ) + connector_unit = mock_connector_unit(self.env) + with connector_unit.try_advisory_lock(lock): + connector_unit2 = mock_connector_unit(self.env2) + with self.assertRaises(RetryableJobError) as cm: + with connector_unit2.try_advisory_lock(lock, retry_seconds=3): + pass + self.assertEquals(cm.exception.seconds, 3) From 15e0e45731971d68a4063e784f59b67025e95d70 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Tue, 3 Nov 2015 08:34:11 +0100 Subject: [PATCH 12/43] Rename try_advisory_lock to advisory_lock_or_retry It is no longer a context manager, because we would expect the lock to be released at the end of the 'with' statement but it lasts until the end of the transaction. --- connector/connector.py | 32 +++++++++++++++++++------------ connector/tests/test_connector.py | 9 ++++----- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/connector/connector.py b/connector/connector.py index a7f18850f..46b81dfcf 100644 --- a/connector/connector.py +++ b/connector/connector.py @@ -231,29 +231,37 @@ def get_binder_for_model(self, model=None): log_deprecate('renamed to binder_for()') return self.binder_for(model=model) - @contextmanager - def try_advisory_lock(self, lock, retry_seconds=1): - """ Context manager, tries to acquire a Postgres transactional - advisory lock. + def advisory_lock_or_retry(self, lock, retry_seconds=1): + """ Acquire a Postgres transactional advisory lock or retry job - If the lock cannot be acquired, it raises a - ``RetryableJobError`` so the jobs is retried after n + When the lock cannot be acquired, it raises a + ``RetryableJobError`` so the job is retried after n ``retry_seconds``. + Usage example: + + :: + + lock_name = 'import_record({}, {}, {}, {})'.format( + self.backend_record._name, + self.backend_record.id, + self.model._name, + self.external_id, + ) + self.advisory_lock_or_retry(lock_name, retry_seconds=2) + See :func:``openerp.addons.connector.connector.pg_try_advisory_lock`` for details. :param lock: The lock name. Can be anything convertible to a - string. It needs to represents what should not be synchronized - concurrently so usually the string will contain at least: the + string. It needs to represent what should not be synchronized + concurrently, usually the string will contain at least: the action, the backend type, the backend id, the model name, the external id :param retry_seconds: number of seconds after which a job should be retried when the lock cannot be acquired. """ - if pg_try_advisory_lock(self.env, lock): - yield - else: + if not pg_try_advisory_lock(self.env, lock): raise RetryableJobError('Could not acquire advisory lock', seconds=retry_seconds, ignore_retry=True) @@ -542,7 +550,7 @@ def pg_try_advisory_lock(env, lock): self.backend_record._name, self.backend_record.id, self.model._name, - self.lefac_id, + self.external_id, ) if pg_try_advisory_lock(lock_name): # do sync diff --git a/connector/tests/test_connector.py b/connector/tests/test_connector.py index 2d4fec1ec..33ab6f380 100644 --- a/connector/tests/test_connector.py +++ b/connector/tests/test_connector.py @@ -226,9 +226,8 @@ def test_concurrent_import_lock(self): '999999', ) connector_unit = mock_connector_unit(self.env) - with connector_unit.try_advisory_lock(lock): - connector_unit2 = mock_connector_unit(self.env2) - with self.assertRaises(RetryableJobError) as cm: - with connector_unit2.try_advisory_lock(lock, retry_seconds=3): - pass + connector_unit.advisory_lock_or_retry(lock) + connector_unit2 = mock_connector_unit(self.env2) + with self.assertRaises(RetryableJobError) as cm: + connector_unit2.advisory_lock_or_retry(lock, retry_seconds=3) self.assertEquals(cm.exception.seconds, 3) From 297dbdc65ab019fd54b0a6e96337ba9e1c54bba3 Mon Sep 17 00:00:00 2001 From: OCA Transbot Date: Sat, 7 Nov 2015 22:18:24 -0500 Subject: [PATCH 13/43] OCA Transbot updated translations from Transifex --- connector/i18n/ro.po | 745 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 745 insertions(+) create mode 100644 connector/i18n/ro.po diff --git a/connector/i18n/ro.po b/connector/i18n/ro.po new file mode 100644 index 000000000..3b9bcbd9c --- /dev/null +++ b/connector/i18n/ro.po @@ -0,0 +1,745 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * connector +# +# Translators: +# Dorin Hongu , 2015 +msgid "" +msgstr "" +"Project-Id-Version: connector (8.0)\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-11-06 19:37+0000\n" +"PO-Revision-Date: 2015-11-03 17:25+0000\n" +"Last-Translator: Dorin Hongu \n" +"Language-Team: Romanian (http://www.transifex.com/oca/OCA-connector-8-0/language/ro/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: \n" +"Language: ro\n" +"Plural-Forms: nplurals=3; plural=(n==1?0:(((n%100>19)||((n%100==0)&&(n!=0)))?2:1));\n" + +#. module: connector +#: model:ir.actions.act_window,help:connector.action_connector_checkpoint +msgid "" +"

No record to check.

\n" +"

When a connector imports new records which have\n" +" configuration or reviews to do manually, they\n" +" will appear in this list. Once a record has been\n" +" checked, click on the 'Reviewed' button.

\n" +"

The connectors list the new records to verify\n" +" based on their type. Only some need a manual\n" +" review.

\n" +" " +msgstr "" + +#. module: connector +#: code:addons/connector/checkpoint/checkpoint.py:136 +#, python-format +msgid "A %s needs a review." +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_portal:0 +msgid "Activate the customer portal" +msgstr "" + +#. module: connector +#: field:queue.job,active:0 +msgid "Active" +msgstr "Activ" + +#. module: connector +#: field:connector.config.settings,alias_domain:0 +msgid "Alias Domain" +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_share:0 +msgid "Allow documents sharing" +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_google_calendar:0 +msgid "Allow the users to synchronize their calendar with Google Calendar" +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_base_import:0 +msgid "Allow users to import data from CSV files" +msgstr "" + +#. module: connector +#: view:connector.config.settings:connector.view_connector_config_settings +msgid "Apply" +msgstr "Aplică" + +#. module: connector +#: field:connector.config.settings,module_google_drive:0 +msgid "Attach Google documents to any record" +msgstr "" + +#. module: connector +#: view:connector.config.settings:connector.view_connector_config_settings +msgid "Backends" +msgstr "" + +#. module: connector +#: view:connector.checkpoint.review:connector.view_connector_checkpoint_review +#: view:connector.config.settings:connector.view_connector_config_settings +#: view:queue.requeue.job:connector.view_requeue_job +msgid "Cancel" +msgstr "" + +#. module: connector +#: code:addons/connector/queue/job.py:574 +#, python-format +msgid "Canceled. Nothing to do." +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:457 +#, python-format +msgid "Cannot change the root channel" +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:464 +#, python-format +msgid "Cannot remove the root channel" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search field:queue.job,channel:0 +#: view:queue.job.function:connector.view_queue_job_function_search +#: field:queue.job.function,channel_id:0 +msgid "Channel" +msgstr "" + +#. module: connector +#: sql_constraint:queue.job.channel:0 +msgid "Channel complete name must be unique" +msgstr "" + +#. module: connector +#: model:ir.actions.act_window,name:connector.action_queue_job_channel +#: model:ir.ui.menu,name:connector.menu_queue_job_channel +#: view:queue.job.channel:connector.view_queue_job_channel_form +#: view:queue.job.channel:connector.view_queue_job_channel_search +#: view:queue.job.channel:connector.view_queue_job_channel_tree +msgid "Channels" +msgstr "" + +#. module: connector +#: model:ir.ui.menu,name:connector.menu_checkpoint +msgid "Checkpoint" +msgstr "" + +#. module: connector +#: field:connector.checkpoint.review,checkpoint_ids:0 +msgid "Checkpoints" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_connector_checkpoint_review +msgid "Checkpoints Review" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_form +msgid "Click on the" +msgstr "" + +#. module: connector +#: field:queue.job,company_id:0 +msgid "Company" +msgstr "" + +#. module: connector +#: field:queue.job.channel,complete_name:0 field:queue.job.function,channel:0 +msgid "Complete Name" +msgstr "" + +#. module: connector +#: view:connector.config.settings:connector.view_connector_config_settings +#: model:ir.actions.act_window,name:connector.action_connector_config_settings +msgid "Configure Connector" +msgstr "" + +#. module: connector +#: model:ir.module.category,name:connector.module_category_connector +#: model:ir.ui.menu,name:connector.menu_connector_config_settings +msgid "Connector" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_connector_backend +msgid "Connector Backend" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_form +#: view:connector.checkpoint:connector.view_connector_checkpoint_search +#: view:connector.checkpoint:connector.view_connector_checkpoint_tree +#: model:ir.actions.act_window,name:connector.action_connector_checkpoint +#: model:ir.model,name:connector.model_connector_checkpoint +msgid "Connector Checkpoint" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_connector_config_settings +msgid "Connector Configuration" +msgstr "" + +#. module: connector +#: model:res.groups,name:connector.group_connector_manager +msgid "Connector Manager" +msgstr "" + +#. module: connector +#: model:ir.ui.menu,name:connector.menu_connector +#: model:ir.ui.menu,name:connector.menu_connector_root +#: view:res.partner:connector.view_partner_connector_form +msgid "Connectors" +msgstr "" + +#. module: connector +#: field:queue.job,date_created:0 +msgid "Created Date" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,create_uid:0 +#: field:connector.checkpoint.review,create_uid:0 +#: field:connector.config.settings,create_uid:0 +#: field:queue.job.channel,create_uid:0 field:queue.requeue.job,create_uid:0 +msgid "Created by" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,create_date:0 +#: field:connector.checkpoint.review,create_date:0 +#: field:connector.config.settings,create_date:0 +#: field:queue.job.channel,create_date:0 field:queue.requeue.job,create_date:0 +msgid "Created on" +msgstr "" + +#. module: connector +#: field:queue.job,retry:0 +msgid "Current try" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "Current try / max. retries" +msgstr "" + +#. module: connector +#: field:queue.job,date_done:0 +msgid "Date Done" +msgstr "" + +#. module: connector +#: help:connector.checkpoint,message_last_post:0 +#: help:queue.job,message_last_post:0 +msgid "Date of the last message posted on the record." +msgstr "" + +#. module: connector +#: field:queue.job,name:0 +msgid "Description" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 +msgid "Done" +msgstr "" + +#. module: connector +#: field:queue.job,date_enqueued:0 +msgid "Enqueue Time" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 +msgid "Enqueued" +msgstr "" + +#. module: connector +#: field:queue.job,exc_info:0 +msgid "Exception Info" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "Exception Information" +msgstr "" + +#. module: connector +#: field:queue.job,eta:0 +msgid "Execute only after" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_external_binding +msgid "External Binding (abstract)" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 +msgid "Failed" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_follower_ids:0 +#: field:queue.job,message_follower_ids:0 +msgid "Followers" +msgstr "" + +#. module: connector +#: field:queue.job,func_name:0 +msgid "Func name" +msgstr "" + +#. module: connector +#: help:connector.config.settings,module_portal:0 +msgid "Give your customers access to their documents." +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search +#: view:queue.job.function:connector.view_queue_job_function_search +msgid "Group By" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_search +msgid "Group By..." +msgstr "" + +#. module: connector +#: help:connector.checkpoint,message_summary:0 +#: help:queue.job,message_summary:0 +msgid "" +"Holds the Chatter summary (number of messages, ...). This summary is " +"directly in html format in order to be inserted in kanban views." +msgstr "" + +#. module: connector +#: field:connector.backend,id:0 field:connector.checkpoint,id:0 +#: field:connector.checkpoint.review,id:0 field:connector.config.settings,id:0 +#: field:external.binding,id:0 field:queue.job,id:0 +#: field:queue.job.channel,id:0 field:queue.job.function,id:0 +#: field:queue.requeue.job,id:0 field:queue.worker,id:0 +msgid "ID" +msgstr "" + +#. module: connector +#: help:connector.checkpoint,message_unread:0 help:queue.job,message_unread:0 +msgid "If checked new messages require your attention." +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "If the max. retries is 0, the number of retries is infinite." +msgstr "" + +#. module: connector +#: help:connector.config.settings,alias_domain:0 +msgid "" +"If you have setup a catch-all email domain redirected to the Odoo server, " +"enter the domain name here." +msgstr "" + +#. module: connector +#: field:connector.checkpoint,backend_id:0 +msgid "Imported from" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_is_follower:0 +#: field:queue.job,message_is_follower:0 +msgid "Is a Follower" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_queue_job_channel +msgid "Job Channels" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search +#: field:queue.job,job_function_id:0 +msgid "Job Function" +msgstr "" + +#. module: connector +#: model:ir.actions.act_window,name:connector.action_queue_job_function +#: model:ir.model,name:connector.model_queue_job_function +#: model:ir.ui.menu,name:connector.menu_queue_job_function +#: field:queue.job.channel,job_function_ids:0 +#: view:queue.job.function:connector.view_queue_job_function_form +#: view:queue.job.function:connector.view_queue_job_function_search +#: view:queue.job.function:connector.view_queue_job_function_tree +msgid "Job Functions" +msgstr "" + +#. module: connector +#: model:mail.message.subtype,name:connector.mt_job_failed +msgid "Job failed" +msgstr "" + +#. module: connector +#: model:ir.actions.act_window,name:connector.action_queue_job +#: model:ir.ui.menu,name:connector.menu_queue_job +#: view:queue.job:connector.view_queue_job_form +#: view:queue.job:connector.view_queue_job_search +#: view:queue.job:connector.view_queue_job_tree +#: field:queue.requeue.job,job_ids:0 field:queue.worker,job_ids:0 +msgid "Jobs" +msgstr "" + +#. module: connector +#: field:queue.worker,date_alive:0 +msgid "Last Alive Check" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_last_post:0 +#: field:queue.job,message_last_post:0 +msgid "Last Message Date" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,write_uid:0 +#: field:connector.checkpoint.review,write_uid:0 +#: field:connector.config.settings,write_uid:0 +#: field:queue.job.channel,write_uid:0 field:queue.requeue.job,write_uid:0 +msgid "Last Updated by" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,write_date:0 +#: field:connector.checkpoint.review,write_date:0 +#: field:connector.config.settings,write_date:0 +#: field:queue.job.channel,write_date:0 field:queue.requeue.job,write_date:0 +msgid "Last Updated on" +msgstr "" + +#. module: connector +#: field:external.binding,sync_date:0 +msgid "Last synchronization date" +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_multi_company:0 +msgid "Manage multiple companies" +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:141 +#, python-format +msgid "Manually set to done by %s" +msgstr "" + +#. module: connector +#: field:queue.job,max_retries:0 +msgid "Max. retries" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_ids:0 field:queue.job,message_ids:0 +msgid "Messages" +msgstr "" + +#. module: connector +#: help:connector.checkpoint,message_ids:0 help:queue.job,message_ids:0 +msgid "Messages and communication history" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_search +#: field:connector.checkpoint,model_id:0 field:queue.job,model_name:0 +msgid "Model" +msgstr "" + +#. module: connector +#: field:connector.backend,name:0 field:queue.job.channel,name:0 +#: field:queue.job.function,name:0 +msgid "Name" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_search +#: selection:connector.checkpoint,state:0 +msgid "Need Review" +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:117 +#, python-format +msgid "No action available for this job" +msgstr "" + +#. module: connector +#: model:ir.actions.client,name:connector.action_client_connector_menu +msgid "Open Connector Menu" +msgstr "" + +#. module: connector +#: field:queue.worker,pid:0 +msgid "PID" +msgstr "" + +#. module: connector +#: field:queue.job.channel,parent_id:0 +msgid "Parent Channel" +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:449 +#, python-format +msgid "Parent channel required." +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 +msgid "Pending" +msgstr "" + +#. module: connector +#: field:queue.job,func:0 +msgid "Pickled Function" +msgstr "" + +#. module: connector +#: field:queue.job,priority:0 +msgid "Priority" +msgstr "" + +#. module: connector +#: model:ir.ui.menu,name:connector.menu_queue +msgid "Queue" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_queue_job +msgid "Queue Job" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_queue_worker +msgid "Queue Worker" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,record_id:0 +msgid "Record ID" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "Related" +msgstr "" + +#. module: connector +#: code:addons/connector/related_action.py:48 +#, python-format +msgid "Related Record" +msgstr "" + +#. module: connector +#: field:connector.config.settings,font:0 +msgid "Report Font" +msgstr "" + +#. module: connector +#: view:queue.requeue.job:connector.view_requeue_job +msgid "Requeue" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "Requeue Job" +msgstr "" + +#. module: connector +#: model:ir.actions.act_window,name:connector.action_requeue_job +#: view:queue.requeue.job:connector.view_requeue_job +msgid "Requeue Jobs" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form field:queue.job,result:0 +msgid "Result" +msgstr "" + +#. module: connector +#: view:connector.checkpoint.review:connector.view_connector_checkpoint_review +#: model:ir.actions.act_window,name:connector.action_connector_checkpoint_review +msgid "Review Checkpoints" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_form +#: view:connector.checkpoint:connector.view_connector_checkpoint_search +#: view:connector.checkpoint:connector.view_connector_checkpoint_tree +#: selection:connector.checkpoint,state:0 +msgid "Reviewed" +msgstr "" + +#. module: connector +#: view:connector.checkpoint.review:connector.view_connector_checkpoint_review +msgid "Set as reviewed" +msgstr "" + +#. module: connector +#: help:connector.config.settings,font:0 +msgid "" +"Set the font into the report header, it will be used as default font in the " +"RML reports of the user company" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_form +msgid "Set to 'Done'" +msgstr "" + +#. module: connector +#: help:connector.config.settings,module_share:0 +msgid "Share or embbed any screen of Odoo." +msgstr "" + +#. module: connector +#: code:addons/connector/queue/model.py:187 +#, python-format +msgid "" +"Something bad happened during the execution of the job. More details in the " +"'Exception Information' section." +msgstr "" + +#. module: connector +#: field:queue.job,date_started:0 field:queue.worker,date_start:0 +msgid "Start Date" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 +msgid "Started" +msgstr "" + +#. module: connector +#: view:queue.job:connector.view_queue_job_search field:queue.job,state:0 +msgid "State" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,state:0 +msgid "Status" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_summary:0 +#: field:queue.job,message_summary:0 +msgid "Summary" +msgstr "" + +#. module: connector +#: field:queue.job,func_string:0 +msgid "Task" +msgstr "" + +#. module: connector +#: help:queue.job,max_retries:0 +msgid "" +"The job will fail if the number of tries reach the max. retries.\n" +"Retries are infinite when empty." +msgstr "" + +#. module: connector +#: help:connector.checkpoint,backend_id:0 +msgid "The record has been imported from this backend" +msgstr "" + +#. module: connector +#: view:connector.checkpoint.review:connector.view_connector_checkpoint_review +msgid "The selected checkpoints will be set as reviewed." +msgstr "" + +#. module: connector +#: view:queue.requeue.job:connector.view_requeue_job +msgid "The selected jobs will be requeued." +msgstr "" + +#. module: connector +#: help:connector.config.settings,module_google_calendar:0 +msgid "This installs the module google_calendar." +msgstr "" + +#. module: connector +#: help:connector.config.settings,module_google_drive:0 +msgid "This installs the module google_docs." +msgstr "" + +#. module: connector +#: field:queue.job,uuid:0 field:queue.worker,uuid:0 +msgid "UUID" +msgstr "" + +#. module: connector +#: field:connector.checkpoint,message_unread:0 +#: field:queue.job,message_unread:0 +msgid "Unread Messages" +msgstr "" + +#. module: connector +#: field:connector.config.settings,module_auth_oauth:0 +msgid "" +"Use external authentication providers, sign in with google, facebook, ..." +msgstr "" + +#. module: connector +#: field:queue.job,user_id:0 +msgid "User ID" +msgstr "" + +#. module: connector +#: field:connector.backend,version:0 +msgid "Version" +msgstr "" + +#. module: connector +#: model:ir.model,name:connector.model_queue_requeue_job +msgid "Wizard to requeue a selection of jobs" +msgstr "" + +#. module: connector +#: help:connector.config.settings,module_multi_company:0 +msgid "" +"Work in multi-company environments, with appropriate security access between companies.\n" +"-This installs the module multi_company." +msgstr "" + +#. module: connector +#: field:queue.job,worker_id:0 +#: view:queue.worker:connector.view_queue_worker_form +#: view:queue.worker:connector.view_queue_worker_tree +msgid "Worker" +msgstr "" + +#. module: connector +#: model:ir.actions.act_window,name:connector.action_queue_worker +#: model:ir.ui.menu,name:connector.menu_queue_worker +msgid "Workers" +msgstr "" + +#. module: connector +#: view:connector.checkpoint.review:connector.view_connector_checkpoint_review +#: view:connector.config.settings:connector.view_connector_config_settings +#: view:queue.requeue.job:connector.view_requeue_job +msgid "or" +msgstr "" + +#. module: connector +#: view:connector.checkpoint:connector.view_connector_checkpoint_form +msgid "to verify it:" +msgstr "" From 7b8d33ec6be9ce43d34c7c338949285451698cc5 Mon Sep 17 00:00:00 2001 From: maxime-c2c Date: Mon, 16 Nov 2015 17:19:52 +0100 Subject: [PATCH 14/43] Update index.rst --- connector/doc/index.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/connector/doc/index.rst b/connector/doc/index.rst index 6bf7dee44..6e6349ae9 100644 --- a/connector/doc/index.rst +++ b/connector/doc/index.rst @@ -44,6 +44,14 @@ Core Features .. _`AGPL version 3`: http://www.gnu.org/licenses/agpl-3.0.html .. _`project's mailing list (name: Connectors)`: https://odoo-community.org/groups +**************************************** +Last minute information regarding Odoo 9 +**************************************** + +* A crowdfunding campaign is currently running to finance the migration of this connector to make it compatible with Odoo 9. **Be part of this project now!** `More info here`_ + +.. _`More info here`: https://www.indiegogo.com/projects/odoo-connector-odoo-magento-connector-for-odoo-9#/ + ********************************* Connectors based on the framework ********************************* From c97ebbd4d2694fc8058dab4a515c43c55224e93a Mon Sep 17 00:00:00 2001 From: Matthieu Dietrich Date: Wed, 18 Nov 2015 16:11:07 +0100 Subject: [PATCH 15/43] Set job back to 'pending' in case of exception --- connector/jobrunner/runner.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index 80688d632..d458a73a2 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -123,7 +123,7 @@ import openerp -from .channels import ChannelManager, ENQUEUED, NOT_DONE +from .channels import ChannelManager, PENDING, ENQUEUED, NOT_DONE SELECT_TIMEOUT = 60 ERROR_RECOVERY_DELAY = 5 @@ -131,19 +131,34 @@ _logger = logging.getLogger(__name__) -def _async_http_get(url): +def _async_http_get(port, db_name, job_uuid): + # Method to set failed job (due to timeout, etc) as pending, + # to avoid keeping it as enqueued. + def set_job_pending(): + conn = psycopg2.connect(openerp.sql_db.dsn(db_name)) + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + with closing(conn.cursor()) as cr: + cr.execute( + "UPDATE queue_job SET state=%s, " + "date_enqueued=NULL, date_started=NULL " + "WHERE uuid=%s and state=%s", (PENDING, job_uuid, ENQUEUED) + ) + # TODO: better way to HTTP GET asynchronously (grequest, ...)? # if this was python3 I would be doing this with # asyncio, aiohttp and aiopg def urlopen(): + url = ('http://localhost:%s/connector/runjob?db=%s&job_uuid=%s' % + (port, db_name, job_uuid)) try: # we are not interested in the result, so we set a short timeout # but not too short so we trap and log hard configuration errors requests.get(url, timeout=1) except requests.Timeout: - pass + set_job_pending() except: _logger.exception("exception in GET %s", url) + set_job_pending() thread = threading.Thread(target=urlopen) thread.daemon = True thread.start() @@ -285,9 +300,7 @@ def run_jobs(self): _logger.info("asking Odoo to run job %s on db %s", job.uuid, job.db_name) self.db_by_name[job.db_name].set_job_enqueued(job.uuid) - _async_http_get('http://localhost:%s' - '/connector/runjob?db=%s&job_uuid=%s' % - (self.port, job.db_name, job.uuid)) + _async_http_get(self.port, job.db_name, job.uuid) def process_notifications(self): for db in self.db_by_name.values(): From 9bcf6862f4abfb6842d26f4dbf49166c3f509345 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Fri, 27 Nov 2015 19:22:09 +0100 Subject: [PATCH 16/43] Fix bug introduced in c97ebbd in jobrunner Commit c97ebbd was a frontport from 7.0. However, openerp.sql_db.dsn was changed in 8.0 in that it now returns a tuple. Extract the db_name from the returned tuple. --- connector/jobrunner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index d458a73a2..de95ec22d 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -135,7 +135,7 @@ def _async_http_get(port, db_name, job_uuid): # Method to set failed job (due to timeout, etc) as pending, # to avoid keeping it as enqueued. def set_job_pending(): - conn = psycopg2.connect(openerp.sql_db.dsn(db_name)) + conn = psycopg2.connect(openerp.sql_db.dsn(db_name)[0]) conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) with closing(conn.cursor()) as cr: cr.execute( From 45ca642a3e102433d9c741da38174319b52a119e Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Thu, 3 Dec 2015 18:16:56 +0100 Subject: [PATCH 17/43] Fix the fix c3fefd2 :-( The first item contains the name of the DB such as 'odoo_db' and the second item contains 'user=gbaconnier dbname=odoo_db' which is what expects psycopg2.connect. --- connector/jobrunner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index de95ec22d..341dbb2b7 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -135,7 +135,7 @@ def _async_http_get(port, db_name, job_uuid): # Method to set failed job (due to timeout, etc) as pending, # to avoid keeping it as enqueued. def set_job_pending(): - conn = psycopg2.connect(openerp.sql_db.dsn(db_name)[0]) + conn = psycopg2.connect(openerp.sql_db.dsn(db_name)[1]) conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) with closing(conn.cursor()) as cr: cr.execute( From 029f9e0dc36dc5f0a4b17a18579026d5d20305b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 27 Dec 2015 13:21:11 +0100 Subject: [PATCH 18/43] [FIX] correctly obtain the list of database with odoo is started with --no-database-list --- connector/jobrunner/runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index 341dbb2b7..2c8fce325 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -264,7 +264,7 @@ def get_db_names(self): if openerp.tools.config['db_name']: db_names = openerp.tools.config['db_name'].split(',') else: - db_names = openerp.service.db.exp_list() + db_names = openerp.service.db.exp_list(True) dbfilter = openerp.tools.config['dbfilter'] if dbfilter: db_names = [d for d in db_names if re.match(dbfilter, d)] From 2290696377641fdc6f621ae70377b9333ae7e27b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 27 Dec 2015 13:32:18 +0100 Subject: [PATCH 19/43] [FIX] ignore dbfilter containing %d or %h, fixes #58 This is a temporary fix. In version 4.0, dbfilter will be completely ignored by connector. --- connector/jobrunner/runner.py | 2 +- connector/queue/worker.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index 341dbb2b7..7a7128935 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -266,7 +266,7 @@ def get_db_names(self): else: db_names = openerp.service.db.exp_list() dbfilter = openerp.tools.config['dbfilter'] - if dbfilter: + if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: db_names = [d for d in db_names if re.match(dbfilter, d)] return db_names diff --git a/connector/queue/worker.py b/connector/queue/worker.py index 3de7786ae..65350d0ff 100644 --- a/connector/queue/worker.py +++ b/connector/queue/worker.py @@ -262,7 +262,7 @@ def available_db_names(): else: db_names = db.exp_list(True) dbfilter = config['dbfilter'] - if dbfilter and db_names: + if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: db_names = [d for d in db_names if re.match(dbfilter, d)] available_db_names = [] for db_name in db_names: From ebb81b83cb4db2b2f0d18dc244c12a24ce359226 Mon Sep 17 00:00:00 2001 From: Nicolas PIGANEAU Date: Mon, 28 Dec 2015 13:18:34 +0100 Subject: [PATCH 20/43] Manage non-ascii PG errors When PG is localized, error messages are not ascii and jobs are not postponed. Instead they are failed with a 'unicode decode error'. Note: even if PG ```lc_messages``` is set to en_us.utf8 on a localized system, the error message prefixes (such as ```DETAILS:```) are still localized. --- connector/controllers/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/connector/controllers/main.py b/connector/controllers/main.py index 4c5619989..e6c74e372 100644 --- a/connector/controllers/main.py +++ b/connector/controllers/main.py @@ -94,7 +94,8 @@ def retry_postpone(job, message, seconds=None): if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise - retry_postpone(job, unicode(err), seconds=PG_RETRY) + retry_postpone(job, unicode(err.pgerror, errors='replace'), + seconds=PG_RETRY) _logger.debug('%s OperationalError, postponed', job) except NothingToDoJob as err: From e66b0f21b4d9e626e375d87e5a19e44a1c6ce744 Mon Sep 17 00:00:00 2001 From: Nicolas PIGANEAU Date: Mon, 28 Dec 2015 13:38:16 +0100 Subject: [PATCH 21/43] Removed trailing whitespace --- connector/controllers/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/controllers/main.py b/connector/controllers/main.py index e6c74e372..0d4b6dac3 100644 --- a/connector/controllers/main.py +++ b/connector/controllers/main.py @@ -94,7 +94,7 @@ def retry_postpone(job, message, seconds=None): if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise - retry_postpone(job, unicode(err.pgerror, errors='replace'), + retry_postpone(job, unicode(err.pgerror, errors='replace'), seconds=PG_RETRY) _logger.debug('%s OperationalError, postponed', job) From 7354724a18878fa86104b85dc1fb50fad48df117 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 4 Jan 2016 10:04:00 +0100 Subject: [PATCH 22/43] [FIX] ignore dbfilter, fixes #58 --- connector/jobrunner/runner.py | 4 ---- connector/queue/worker.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index cbaae3358..b11bfebb0 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -112,7 +112,6 @@ from contextlib import closing import logging import os -import re import select import threading import time @@ -265,9 +264,6 @@ def get_db_names(self): db_names = openerp.tools.config['db_name'].split(',') else: db_names = openerp.service.db.exp_list(True) - dbfilter = openerp.tools.config['dbfilter'] - if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: - db_names = [d for d in db_names if re.match(dbfilter, d)] return db_names def close_databases(self, remove_jobs=True): diff --git a/connector/queue/worker.py b/connector/queue/worker.py index 65350d0ff..e9a5873f2 100644 --- a/connector/queue/worker.py +++ b/connector/queue/worker.py @@ -19,7 +19,6 @@ # ############################################################################## -import re import logging import os import threading @@ -261,9 +260,6 @@ def available_db_names(): db_names = config['db_name'].split(',') else: db_names = db.exp_list(True) - dbfilter = config['dbfilter'] - if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: - db_names = [d for d in db_names if re.match(dbfilter, d)] available_db_names = [] for db_name in db_names: session_hdl = ConnectorSessionHandler(db_name, From 6c32f93cc92459ea6568bd40847f0d0e5c8d8619 Mon Sep 17 00:00:00 2001 From: Nicolas Piganeau Date: Mon, 4 Jan 2016 15:52:28 +0100 Subject: [PATCH 23/43] Use tools.ustr() instead of unicode() --- connector/controllers/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/connector/controllers/main.py b/connector/controllers/main.py index 0d4b6dac3..193f0cdcf 100644 --- a/connector/controllers/main.py +++ b/connector/controllers/main.py @@ -5,7 +5,7 @@ from psycopg2 import OperationalError import openerp -from openerp import http +from openerp import http, tools from openerp.service.model import PG_CONCURRENCY_ERRORS_TO_RETRY from ..session import ConnectorSessionHandler @@ -94,7 +94,7 @@ def retry_postpone(job, message, seconds=None): if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise - retry_postpone(job, unicode(err.pgerror, errors='replace'), + retry_postpone(job, tools.ustr(err.pgerror, errors='replace'), seconds=PG_RETRY) _logger.debug('%s OperationalError, postponed', job) From 4909ff76a69ed6735d15a12e97bb94afcf3dfa32 Mon Sep 17 00:00:00 2001 From: OCA Transbot Date: Sat, 16 Jan 2016 00:48:42 -0500 Subject: [PATCH 24/43] OCA Transbot updated translations from Transifex --- connector/i18n/de.po | 116 ++++++++++++++++++++----------------------- 1 file changed, 54 insertions(+), 62 deletions(-) diff --git a/connector/i18n/de.po b/connector/i18n/de.po index 5f5e64d1b..c426b2079 100644 --- a/connector/i18n/de.po +++ b/connector/i18n/de.po @@ -4,13 +4,14 @@ # # Translators: # FIRST AUTHOR , 2014 +# Rudolf Schnapka , 2016 msgid "" msgstr "" "Project-Id-Version: connector (8.0)\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-08-21 14:26+0000\n" -"PO-Revision-Date: 2015-06-17 07:37+0000\n" -"Last-Translator: OCA Transbot \n" +"POT-Creation-Date: 2016-01-04 08:58+0000\n" +"PO-Revision-Date: 2016-01-14 12:58+0000\n" +"Last-Translator: Rudolf Schnapka \n" "Language-Team: German (http://www.transifex.com/oca/OCA-connector-8-0/language/de/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" @@ -36,7 +37,7 @@ msgstr "

Kein Datensatz zum Überprüfen.

\n

Wenn ein Con #: code:addons/connector/checkpoint/checkpoint.py:136 #, python-format msgid "A %s needs a review." -msgstr "" +msgstr "Ein %s erfordert Sichtung" #. module: connector #: field:connector.config.settings,module_portal:0 @@ -61,7 +62,7 @@ msgstr "Erlaube Dokumentenfreigabe" #. module: connector #: field:connector.config.settings,module_google_calendar:0 msgid "Allow the users to synchronize their calendar with Google Calendar" -msgstr "" +msgstr "Ermöglichen Sie Benutzern ihren Kalender mit Google-Kalender zu synchronisieren" #. module: connector #: field:connector.config.settings,module_base_import:0 @@ -76,7 +77,7 @@ msgstr "anwenden" #. module: connector #: field:connector.config.settings,module_google_drive:0 msgid "Attach Google documents to any record" -msgstr "" +msgstr "Google-Dokumente an beliebige Datensätze anhängen" #. module: connector #: view:connector.config.settings:connector.view_connector_config_settings @@ -91,34 +92,34 @@ msgid "Cancel" msgstr "Abbrechen" #. module: connector -#: code:addons/connector/queue/job.py:571 +#: code:addons/connector/queue/job.py:574 #, python-format msgid "Canceled. Nothing to do." -msgstr "" +msgstr "Storniert. Nicht zu tun." #. module: connector -#: code:addons/connector/queue/model.py:456 +#: code:addons/connector/queue/model.py:457 #, python-format msgid "Cannot change the root channel" -msgstr "" +msgstr "Kann den Hauptkanal nicht ändern" #. module: connector -#: code:addons/connector/queue/model.py:463 +#: code:addons/connector/queue/model.py:464 #, python-format msgid "Cannot remove the root channel" -msgstr "" +msgstr "Kann den Hauptkanal nicht entfernen" #. module: connector #: view:queue.job:connector.view_queue_job_search field:queue.job,channel:0 #: view:queue.job.function:connector.view_queue_job_function_search #: field:queue.job.function,channel_id:0 msgid "Channel" -msgstr "" +msgstr "Kanal" #. module: connector #: sql_constraint:queue.job.channel:0 msgid "Channel complete name must be unique" -msgstr "" +msgstr "Vollständige Bezeichnung muss eindeutig sein" #. module: connector #: model:ir.actions.act_window,name:connector.action_queue_job_channel @@ -127,7 +128,7 @@ msgstr "" #: view:queue.job.channel:connector.view_queue_job_channel_search #: view:queue.job.channel:connector.view_queue_job_channel_tree msgid "Channels" -msgstr "" +msgstr "Kanäle" #. module: connector #: model:ir.ui.menu,name:connector.menu_checkpoint @@ -152,12 +153,12 @@ msgstr "Klicken Sie auf den" #. module: connector #: field:queue.job,company_id:0 msgid "Company" -msgstr "" +msgstr "Unternehmen" #. module: connector #: field:queue.job.channel,complete_name:0 field:queue.job.function,channel:0 msgid "Complete Name" -msgstr "" +msgstr "Vollständige Bezeichnung" #. module: connector #: view:connector.config.settings:connector.view_connector_config_settings @@ -174,7 +175,7 @@ msgstr "Connector" #. module: connector #: model:ir.model,name:connector.model_connector_backend msgid "Connector Backend" -msgstr "" +msgstr "Connector Backend" #. module: connector #: view:connector.checkpoint:connector.view_connector_checkpoint_form @@ -188,7 +189,7 @@ msgstr "Connector Checkpunkt" #. module: connector #: model:ir.model,name:connector.model_connector_config_settings msgid "Connector Configuration" -msgstr "" +msgstr "Connector-Einstellungen" #. module: connector #: model:res.groups,name:connector.group_connector_manager @@ -213,7 +214,7 @@ msgstr "Anlagedatum" #: field:connector.config.settings,create_uid:0 #: field:queue.job.channel,create_uid:0 field:queue.requeue.job,create_uid:0 msgid "Created by" -msgstr "" +msgstr "Angelegt durch" #. module: connector #: field:connector.checkpoint,create_date:0 @@ -221,7 +222,7 @@ msgstr "" #: field:connector.config.settings,create_date:0 #: field:queue.job.channel,create_date:0 field:queue.requeue.job,create_date:0 msgid "Created on" -msgstr "" +msgstr "Angelegt am" #. module: connector #: field:queue.job,retry:0 @@ -242,7 +243,7 @@ msgstr "Erledigt am" #: help:connector.checkpoint,message_last_post:0 #: help:queue.job,message_last_post:0 msgid "Date of the last message posted on the record." -msgstr "" +msgstr "Datum der letzten Nachricht zu diesem Datensatz" #. module: connector #: field:queue.job,name:0 @@ -298,7 +299,7 @@ msgstr "Follower" #. module: connector #: field:queue.job,func_name:0 msgid "Func name" -msgstr "" +msgstr "Funk.bezeichnung" #. module: connector #: help:connector.config.settings,module_portal:0 @@ -309,7 +310,7 @@ msgstr "Erteilen Sie den Kunden Zugriff auf Ihre eigenen Dokumente." #: view:queue.job:connector.view_queue_job_search #: view:queue.job.function:connector.view_queue_job_function_search msgid "Group By" -msgstr "" +msgstr "Gruppieren" #. module: connector #: view:connector.checkpoint:connector.view_connector_checkpoint_search @@ -331,7 +332,7 @@ msgstr "Beinhaltet die Chatter Zusammenfassung (Anzahl der Nachrichten, ...). Di #: field:queue.job.channel,id:0 field:queue.job.function,id:0 #: field:queue.requeue.job,id:0 field:queue.worker,id:0 msgid "ID" -msgstr "" +msgstr "ID" #. module: connector #: help:connector.checkpoint,message_unread:0 help:queue.job,message_unread:0 @@ -348,7 +349,7 @@ msgstr "Wenn die Anzahl der maximalen Versuche 0 ist, ist die maximal Anzahl der msgid "" "If you have setup a catch-all email domain redirected to the Odoo server, " "enter the domain name here." -msgstr "" +msgstr "Wenn Sie eine Catch-All-EMail-Domäne eingerichtet haben, die auf den Oddo-Server weiterleitet, geben Sie hier diesen Domänennamen ein." #. module: connector #: field:connector.checkpoint,backend_id:0 @@ -364,12 +365,13 @@ msgstr "Ist ein Follower" #. module: connector #: model:ir.model,name:connector.model_queue_job_channel msgid "Job Channels" -msgstr "" +msgstr "Job-Kanäle" #. module: connector #: view:queue.job:connector.view_queue_job_search +#: field:queue.job,job_function_id:0 msgid "Job Function" -msgstr "" +msgstr "Job-Funktion" #. module: connector #: model:ir.actions.act_window,name:connector.action_queue_job_function @@ -380,18 +382,13 @@ msgstr "" #: view:queue.job.function:connector.view_queue_job_function_search #: view:queue.job.function:connector.view_queue_job_function_tree msgid "Job Functions" -msgstr "" +msgstr "Job-Funktionen" #. module: connector #: model:mail.message.subtype,name:connector.mt_job_failed msgid "Job failed" msgstr "Auftrag fehlgeschlagen" -#. module: connector -#: field:queue.job,job_function_id:0 -msgid "Job function id" -msgstr "" - #. module: connector #: model:ir.actions.act_window,name:connector.action_queue_job #: model:ir.ui.menu,name:connector.menu_queue_job @@ -411,7 +408,7 @@ msgstr "Letzte Statusüberprüfung" #: field:connector.checkpoint,message_last_post:0 #: field:queue.job,message_last_post:0 msgid "Last Message Date" -msgstr "" +msgstr "Letztes Meldungsdatum" #. module: connector #: field:connector.checkpoint,write_uid:0 @@ -419,7 +416,7 @@ msgstr "" #: field:connector.config.settings,write_uid:0 #: field:queue.job.channel,write_uid:0 field:queue.requeue.job,write_uid:0 msgid "Last Updated by" -msgstr "" +msgstr "Zuletzt aktualisiert durch" #. module: connector #: field:connector.checkpoint,write_date:0 @@ -427,7 +424,7 @@ msgstr "" #: field:connector.config.settings,write_date:0 #: field:queue.job.channel,write_date:0 field:queue.requeue.job,write_date:0 msgid "Last Updated on" -msgstr "" +msgstr "Zuletzt aktualisiert am" #. module: connector #: field:external.binding,sync_date:0 @@ -440,10 +437,10 @@ msgid "Manage multiple companies" msgstr "Multi-Company Verwaltung" #. module: connector -#: code:addons/connector/queue/model.py:140 +#: code:addons/connector/queue/model.py:141 #, python-format msgid "Manually set to done by %s" -msgstr "" +msgstr "Manuell auf erledigt gesetzt durch %s" #. module: connector #: field:queue.job,max_retries:0 @@ -466,11 +463,6 @@ msgstr "Nachrichten und Kommunikations-Historie" msgid "Model" msgstr "Modell" -#. module: connector -#: model:ir.model,name:connector.model_ir_module_module -msgid "Module" -msgstr "" - #. module: connector #: field:connector.backend,name:0 field:queue.job.channel,name:0 #: field:queue.job.function,name:0 @@ -484,10 +476,10 @@ msgid "Need Review" msgstr "Benötigt Prüfung" #. module: connector -#: code:addons/connector/queue/model.py:116 +#: code:addons/connector/queue/model.py:117 #, python-format msgid "No action available for this job" -msgstr "" +msgstr "Kein Vorgang für diesen Job verfügbar" #. module: connector #: model:ir.actions.client,name:connector.action_client_connector_menu @@ -502,18 +494,18 @@ msgstr "Prozess-ID" #. module: connector #: field:queue.job.channel,parent_id:0 msgid "Parent Channel" -msgstr "" +msgstr "Übergeordneter Kanal" #. module: connector -#: code:addons/connector/queue/model.py:448 +#: code:addons/connector/queue/model.py:449 #, python-format msgid "Parent channel required." -msgstr "" +msgstr "Übergeordneter Kanal erforderlich." #. module: connector #: view:queue.job:connector.view_queue_job_search selection:queue.job,state:0 msgid "Pending" -msgstr "Warteschlange" +msgstr "Wartend" #. module: connector #: field:queue.job,func:0 @@ -538,7 +530,7 @@ msgstr "Aufgabe in Warteschlange einreihen" #. module: connector #: model:ir.model,name:connector.model_queue_worker msgid "Queue Worker" -msgstr "" +msgstr "Warteschlangenarbeiter" #. module: connector #: field:connector.checkpoint,record_id:0 @@ -548,18 +540,18 @@ msgstr "Datensatz Nr." #. module: connector #: view:queue.job:connector.view_queue_job_form msgid "Related" -msgstr "" +msgstr "Zugehörig" #. module: connector #: code:addons/connector/related_action.py:48 #, python-format msgid "Related Record" -msgstr "" +msgstr "Zugehöriger Datensatz" #. module: connector #: field:connector.config.settings,font:0 msgid "Report Font" -msgstr "" +msgstr "Berichtsschriftart" #. module: connector #: view:queue.requeue.job:connector.view_requeue_job @@ -606,7 +598,7 @@ msgstr "Als Überprüft markieren" msgid "" "Set the font into the report header, it will be used as default font in the " "RML reports of the user company" -msgstr "" +msgstr "Bestimmen Sie die Schriftart im Berichtskopf, diese wird als Vorgabeschriftart für RML-Berichte zum Unternehmen des Benutzers verwendet" #. module: connector #: view:queue.job:connector.view_queue_job_form @@ -616,15 +608,15 @@ msgstr "Setzte auf 'Fertig'" #. module: connector #: help:connector.config.settings,module_share:0 msgid "Share or embbed any screen of Odoo." -msgstr "" +msgstr "Teilen oder Einbetten eines jeden Bildschirms von Odoo." #. module: connector -#: code:addons/connector/queue/model.py:186 +#: code:addons/connector/queue/model.py:187 #, python-format msgid "" "Something bad happened during the execution of the job. More details in the " "'Exception Information' section." -msgstr "" +msgstr "Etwas schlechtes ist bei der Ausführung des Jobs passiert. Mehr Details finden Sie im Abschnitt 'Ausnahmeninformation'." #. module: connector #: field:queue.job,date_started:0 field:queue.worker,date_start:0 @@ -682,12 +674,12 @@ msgstr "Die ausgewählten Arbeitsschritte werden nochmal eingereiht." #. module: connector #: help:connector.config.settings,module_google_calendar:0 msgid "This installs the module google_calendar." -msgstr "" +msgstr "Das Modul google_calendar wird installiert." #. module: connector #: help:connector.config.settings,module_google_drive:0 msgid "This installs the module google_docs." -msgstr "" +msgstr "Das Modul google_docs wird installiert." #. module: connector #: field:queue.job,uuid:0 field:queue.worker,uuid:0 @@ -726,7 +718,7 @@ msgstr "Wizard um eine Auswahl an Aufgaben neu in die Warteschlange einzureihen" msgid "" "Work in multi-company environments, with appropriate security access between companies.\n" "-This installs the module multi_company." -msgstr "" +msgstr "Arbeiten in Umgebungen mit mehreren Unternehmen, mit entsprechenden Zugriffsbeschränkungen zwischen Unternehmen.\n- Das Modul multi_company wird installiert." #. module: connector #: field:queue.job,worker_id:0 @@ -751,4 +743,4 @@ msgstr "oder" #. module: connector #: view:connector.checkpoint:connector.view_connector_checkpoint_form msgid "to verify it:" -msgstr "Überprüfe:" +msgstr "es zu überprüfen:" From 48fa359ef9f8c7714cab41f4db74f7445eec7e1f Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Mon, 8 Feb 2016 17:00:27 +0100 Subject: [PATCH 25/43] Prevent to unpickle globals which are not jobs This is a safeguard to prevent someone to write arbitrary code in jobs. Builtin types and datetime/timedelta are allowed in job arguments, and a new function 'whitelist_unpickle_global' allows to register new objects if needed. --- connector/queue/job.py | 47 +++++++++++++++++++++++++++++++++++-- connector/tests/test_job.py | 27 +++++++++++++++++++++ 2 files changed, 72 insertions(+), 2 deletions(-) diff --git a/connector/queue/job.py b/connector/queue/job.py index 1c65b73e8..28947043e 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -25,7 +25,8 @@ import uuid import sys from datetime import datetime, timedelta, MINYEAR -from pickle import loads, dumps, UnpicklingError +from cPickle import dumps, UnpicklingError, Unpickler +from cStringIO import StringIO import openerp from openerp.tools.translate import _ @@ -54,6 +55,31 @@ _logger = logging.getLogger(__name__) +_UNPICKLE_WHITELIST = set() + + +def whitelist_unpickle_global(fn_or_class): + """ Allow a function or class to be used in jobs + + By default, the only types allowed to be used in job arguments are: + + * the builtins: str/unicode, int/long, float, bool, tuple, list, dict, None + * the pre-registered: datetime.datetime datetime.timedelta + + If you need to use an argument in a job which is not in this whitelist, + you can add it by using:: + + whitelist_unpickle_global(fn_or_class_to_register) + + """ + _UNPICKLE_WHITELIST.add(fn_or_class) + + +# register common types that might be used in job arguments +whitelist_unpickle_global(datetime) +whitelist_unpickle_global(timedelta) + + def _unpickle(pickled): """ Unpickles a string and catch all types of errors it can throw, to raise only NotReadableJobError in case of error. @@ -63,9 +89,26 @@ def _unpickle(pickled): `loads()` may raises many types of exceptions (AttributeError, IndexError, TypeError, KeyError, ...). They are all catched and raised as `NotReadableJobError`). + + Pickle could be exploited by an attacker who would write a value in a job + that would run arbitrary code when unpickled. This is why we set a custom + ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of + classes/functions are allowed to be unpickled (plus the builtins types). """ + def restricted_find_global(mod_name, fn_name): + __import__(mod_name) + mod = sys.modules[mod_name] + fn = getattr(mod, fn_name) + if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST): + raise UnpicklingError( + '{}.{} is not allowed in jobs'.format(mod_name, fn_name) + ) + return fn + + unpickler = Unpickler(StringIO(pickled)) + unpickler.find_global = restricted_find_global try: - unpickled = loads(pickled) + unpickled = unpickler.load() except (StandardError, UnpicklingError): raise NotReadableJobError('Could not unpickle.', pickled) return unpickled diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index 15918578c..cd4be9f76 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- +import cPickle import mock import unittest2 from datetime import datetime, timedelta @@ -51,6 +52,15 @@ def retryable_error_task(session): raise RetryableJobError('Must be retried later') +def pickle_forbidden_function(session): + pass + + +@job +def pickle_allowed_function(session): + pass + + class TestJobs(unittest2.TestCase): """ Test Job """ @@ -310,6 +320,23 @@ def test_unpickle(self): 'a small cucumber preserved in vinegar, ' 'brine, or a similar solution.') + def test_unpickle_unsafe(self): + """ unpickling function not decorated by @job is forbidden """ + pickled = cPickle.dumps(pickle_forbidden_function) + with self.assertRaises(NotReadableJobError): + _unpickle(pickled) + + def test_unpickle_safe(self): + """ unpickling function decorated by @job is allowed """ + pickled = cPickle.dumps(pickle_allowed_function) + self.assertEqual(_unpickle(pickled), pickle_allowed_function) + + def test_unpickle_whitelist(self): + """ unpickling function/class that is in the whitelist is allowed """ + arg = datetime(2016, 2, 10) + pickled = cPickle.dumps(arg) + self.assertEqual(_unpickle(pickled), arg) + def test_unpickle_not_readable(self): with self.assertRaises(NotReadableJobError): self.assertEqual(_unpickle('cucumber')) From b4302feb07ac3c7b242b5ab604b96747923b3109 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Mon, 22 Feb 2016 09:21:00 +0100 Subject: [PATCH 26/43] Fix too long line --- connector/jobrunner/runner.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index cbaae3358..87e4770c5 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -233,10 +233,11 @@ def _initialize(self): cr.execute("LISTEN connector") def select_jobs(self, where, args): - query = "SELECT %s, uuid, id as seq, date_created, priority, eta, state " \ - "FROM queue_job WHERE %s" % \ - ('channel' if self.has_channel else 'NULL', - where) + query = ("SELECT %s, uuid, id as seq, date_created, " + "priority, eta, state " + "FROM queue_job WHERE %s" % + ('channel' if self.has_channel else 'NULL', + where)) with closing(self.conn.cursor()) as cr: cr.execute(query, args) return list(cr.fetchall()) From 646ea58f13ff29c9a5b6846e2fb6f4d8de032c09 Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Mon, 29 Feb 2016 09:24:45 +0100 Subject: [PATCH 27/43] Release 8.0.3.3.0 --- connector/AUTHORS | 4 ++++ connector/CHANGES.rst | 22 ++++++++++++++++++++-- connector/__openerp__.py | 2 +- 3 files changed, 25 insertions(+), 3 deletions(-) diff --git a/connector/AUTHORS b/connector/AUTHORS index 5f90a1623..b311dd439 100644 --- a/connector/AUTHORS +++ b/connector/AUTHORS @@ -18,3 +18,7 @@ * Leonardo Donelli at MONK Software * Mathias Colpaert * Yannick Vaucher at Camptocamp +* Nicolas Piganeau at NDP Systèmes +* Florent Thomas at Mind And Go +* Matthieu Dietrich at Camptocamp +* Olivier Laurent at Acsone diff --git a/connector/CHANGES.rst b/connector/CHANGES.rst index cacc7453c..0f3b3a4a9 100644 --- a/connector/CHANGES.rst +++ b/connector/CHANGES.rst @@ -1,12 +1,30 @@ Changelog --------- -Future (?) -~~~~~~~~~~ +.. Future (?) +.. ~~~~~~~~~~ +.. +.. * + + +8.0.3.3.0 (2016-02-29) +~~~~~~~~~~~~~~~~~~~~~~ * Allow to define seconds when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) * Allow to ignore the retry counter when raising a RetryableJobError (https://github.com/OCA/connector/pull/124) * Add 'mock_job_delay_to_direct' to ease tests on jobs (https://github.com/OCA/connector/pull/123) +* Add helper function to acquire Posgres advisory locks (https://github.com/OCA/connector/pull/138, https://github.com/OCA/connector/pull/139) +* Improvement of 'is_module_installed' which now uses the registry instead of db + cache (https://github.com/OCA/connector/pull/130) +* Security: Prevent to unpickle globals which are not jobs or whitelisted types (https://github.com/OCA/connector/pull/170) +* Fix: Manage non-ascii Postgres errors (https://github.com/OCA/connector/pull/167) +* Fix: ignore dbfilter containing %d or %h (https://github.com/OCA/connector/pull/166) +* Fix: correctly obtain the list of database with odoo is started with --no-database-list (https://github.com/OCA/connector/pull/164) +* Fix: Set job back to 'pending' in case of exception (https://github.com/OCA/connector/pull/150, https://github.com/OCA/connector/pull/151, https://github.com/OCA/connector/pull/152, https://github.com/OCA/connector/pull/155) +* Fix: Clear environment caches and recomputations upon failures (https://github.com/OCA/connector/pull/131) +* Fix: when a job fails, inactive users are no longer added to its followers (https://github.com/OCA/connector/pull/137) +* Fix: Set job to failed after non-retryable OperationalError (https://github.com/OCA/connector/pull/132) +* Fix: wrong model in connector_base_product's views (https://github.com/OCA/connector/pull/119) +* Various documentation improvements 3.2.0 (2015-09-10) diff --git a/connector/__openerp__.py b/connector/__openerp__.py index 3668d4b4d..1efa783b3 100644 --- a/connector/__openerp__.py +++ b/connector/__openerp__.py @@ -20,7 +20,7 @@ ############################################################################## {'name': 'Connector', - 'version': '8.0.3.2.0', + 'version': '8.0.3.3.0', 'author': 'Camptocamp,Openerp Connector Core Editors,' 'Odoo Community Association (OCA)', 'website': 'http://odoo-connector.com', From d86b27b6a901276430273181d3e39da9d7b4f764 Mon Sep 17 00:00:00 2001 From: "Pedro M. Baeza" Date: Wed, 14 Oct 2015 02:20:19 +0200 Subject: [PATCH 28/43] [MIG] Make modules uninstallable --- connector/__openerp__.py | 2 +- connector_base_product/__openerp__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/connector/__openerp__.py b/connector/__openerp__.py index 1efa783b3..5025ca4f4 100644 --- a/connector/__openerp__.py +++ b/connector/__openerp__.py @@ -40,6 +40,6 @@ 'setting_view.xml', 'res_partner_view.xml', ], - 'installable': True, + 'installable': False, 'application': True, } diff --git a/connector_base_product/__openerp__.py b/connector_base_product/__openerp__.py index 4a11d1f8b..dba31dae7 100644 --- a/connector_base_product/__openerp__.py +++ b/connector_base_product/__openerp__.py @@ -37,5 +37,5 @@ 'data': [ 'product_view.xml' ], - 'installable': True, + 'installable': False, } From ece8a241d981b2a2b92d2de16ca8af6d70bb7098 Mon Sep 17 00:00:00 2001 From: "Pedro M. Baeza" Date: Wed, 14 Oct 2015 02:20:21 +0200 Subject: [PATCH 29/43] [MIG] Update metafiles --- .travis.yml | 2 +- README.md | 13 +++---------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index f39dff1bc..71ac866ff 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ env: - TRANSIFEX="1" TRANSIFEX_USER='transbot@odoo-community.org' - DOCS="1" global: - - VERSION="8.0" TESTS="0" LINT_CHECK="0" TRANSIFEX="0" DOCS="0" + - VERSION="9.0" TESTS="0" LINT_CHECK="0" TRANSIFEX="0" DOCS="0" # travis for docs build - secure: "lazdZGpkqFTQiu44grYAqKcaGh0cVTBlrHcBxxs43K/OH4Uc2AjMBty83zKc0ZgzizrNOsfK3Z3UHLmTNI92Vi3PtgGzA9dUmiReGO6QcE9P31Geg4pN/1Fwosv6aSpG3hejJJ8ZYpMbEPJkZyjTVpdM2r1VEcqEvFQ2xR5bEUg=" # transifex diff --git a/README.md b/README.md index a870f903a..fef818766 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -[![Build Status](https://travis-ci.org/OCA/connector.svg?branch=8.0)](https://travis-ci.org/OCA/connector) -[![Coverage Status](https://coveralls.io/repos/OCA/connector/badge.png?branch=8.0)](https://coveralls.io/r/OCA/connector?branch=8.0) +[![Build Status](https://travis-ci.org/OCA/connector.svg?branch=9.0)](https://travis-ci.org/OCA/connector) +[![Coverage Status](https://coveralls.io/repos/OCA/connector/badge.png?branch=9.0)](https://coveralls.io/r/OCA/connector?branch=9.0) Odoo Connector Modules @@ -16,15 +16,8 @@ Documentation: http://www.odoo-connector.com [//]: # (addons) -Available addons ----------------- -addon | version | summary ---- | --- | --- -[connector](connector/) | 8.0.3.2.0 | Connector -[connector_base_product](connector_base_product/) | 8.0.1.0.0 | Connector Base Product - [//]: # (end addons) Translation Status ------------------ -[![Transifex Status](https://www.transifex.com/projects/p/OCA-connector-8-0/chart/image_png)](https://www.transifex.com/projects/p/OCA-connector-8-0) +[![Transifex Status](https://www.transifex.com/projects/p/OCA-connector-9-0/chart/image_png)](https://www.transifex.com/projects/p/OCA-connector-9-0) From a75affd45c00322eccc64264918d69a427be6476 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 14 Oct 2015 09:55:58 +0200 Subject: [PATCH 30/43] [UPD] addons table in README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index fef818766..daf3c55ef 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,13 @@ Documentation: http://www.odoo-connector.com [//]: # (addons) +Unported addons +--------------- +addon | version | summary +--- | --- | --- +[connector](connector/) | 8.0.3.2.0 (unported) | Connector +[connector_base_product](connector_base_product/) | 8.0.1.0.0 (unported) | Connector Base Product + [//]: # (end addons) Translation Status From cbba13d7cf904bf12fd0105e6baccfbcbbb1338b Mon Sep 17 00:00:00 2001 From: Guewen Baconnier Date: Fri, 30 Oct 2015 11:35:10 +0100 Subject: [PATCH 31/43] Update mailing list address --- connector/doc/index.rst | 8 -------- 1 file changed, 8 deletions(-) diff --git a/connector/doc/index.rst b/connector/doc/index.rst index 6e6349ae9..6bf7dee44 100644 --- a/connector/doc/index.rst +++ b/connector/doc/index.rst @@ -44,14 +44,6 @@ Core Features .. _`AGPL version 3`: http://www.gnu.org/licenses/agpl-3.0.html .. _`project's mailing list (name: Connectors)`: https://odoo-community.org/groups -**************************************** -Last minute information regarding Odoo 9 -**************************************** - -* A crowdfunding campaign is currently running to finance the migration of this connector to make it compatible with Odoo 9. **Be part of this project now!** `More info here`_ - -.. _`More info here`: https://www.indiegogo.com/projects/odoo-connector-odoo-magento-connector-for-odoo-9#/ - ********************************* Connectors based on the framework ********************************* From 92ade8063c723beeb838e243fd87e77967744484 Mon Sep 17 00:00:00 2001 From: maxime-c2c Date: Mon, 16 Nov 2015 17:19:52 +0100 Subject: [PATCH 32/43] Update index.rst --- connector/doc/index.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/connector/doc/index.rst b/connector/doc/index.rst index 6bf7dee44..6e6349ae9 100644 --- a/connector/doc/index.rst +++ b/connector/doc/index.rst @@ -44,6 +44,14 @@ Core Features .. _`AGPL version 3`: http://www.gnu.org/licenses/agpl-3.0.html .. _`project's mailing list (name: Connectors)`: https://odoo-community.org/groups +**************************************** +Last minute information regarding Odoo 9 +**************************************** + +* A crowdfunding campaign is currently running to finance the migration of this connector to make it compatible with Odoo 9. **Be part of this project now!** `More info here`_ + +.. _`More info here`: https://www.indiegogo.com/projects/odoo-connector-odoo-magento-connector-for-odoo-9#/ + ********************************* Connectors based on the framework ********************************* From 3c83de414638d955f4e302cb1f00a749b2bd5029 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 4 Jan 2016 10:04:00 +0100 Subject: [PATCH 33/43] [FIX] ignore dbfilter, fixes #58 --- connector/jobrunner/runner.py | 4 ---- connector/queue/worker.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/connector/jobrunner/runner.py b/connector/jobrunner/runner.py index 87e4770c5..5639b9959 100644 --- a/connector/jobrunner/runner.py +++ b/connector/jobrunner/runner.py @@ -112,7 +112,6 @@ from contextlib import closing import logging import os -import re import select import threading import time @@ -266,9 +265,6 @@ def get_db_names(self): db_names = openerp.tools.config['db_name'].split(',') else: db_names = openerp.service.db.exp_list(True) - dbfilter = openerp.tools.config['dbfilter'] - if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: - db_names = [d for d in db_names if re.match(dbfilter, d)] return db_names def close_databases(self, remove_jobs=True): diff --git a/connector/queue/worker.py b/connector/queue/worker.py index 65350d0ff..e9a5873f2 100644 --- a/connector/queue/worker.py +++ b/connector/queue/worker.py @@ -19,7 +19,6 @@ # ############################################################################## -import re import logging import os import threading @@ -261,9 +260,6 @@ def available_db_names(): db_names = config['db_name'].split(',') else: db_names = db.exp_list(True) - dbfilter = config['dbfilter'] - if dbfilter and '%d' not in dbfilter and '%h' not in dbfilter: - db_names = [d for d in db_names if re.match(dbfilter, d)] available_db_names = [] for db_name in db_names: session_hdl = ConnectorSessionHandler(db_name, From 29079562e2cf36cb33c12c4d068ca893d977977f Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 14:50:31 +0100 Subject: [PATCH 34/43] [MIG] Make module installable --- connector/__openerp__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/connector/__openerp__.py b/connector/__openerp__.py index 5025ca4f4..19918194a 100644 --- a/connector/__openerp__.py +++ b/connector/__openerp__.py @@ -20,7 +20,7 @@ ############################################################################## {'name': 'Connector', - 'version': '8.0.3.3.0', + 'version': '9.0.0.1.0', 'author': 'Camptocamp,Openerp Connector Core Editors,' 'Odoo Community Association (OCA)', 'website': 'http://odoo-connector.com', @@ -40,6 +40,6 @@ 'setting_view.xml', 'res_partner_view.xml', ], - 'installable': False, + 'installable': True, 'application': True, } From 4a8536b6ef5126fd49e16ac3f8652426811fa231 Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 14:56:01 +0100 Subject: [PATCH 35/43] [MIG] Use python standard lib unittest in place of unittest2 --- connector/tests/test_backend.py | 4 ++-- connector/tests/test_connector.py | 8 ++++---- connector/tests/test_job.py | 4 ++-- connector/tests/test_mapper.py | 4 ++-- connector/tests/test_queue.py | 4 ++-- connector/tests/test_related_action.py | 4 ++-- connector/tests/test_worker.py | 4 ++-- 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/connector/tests/test_backend.py b/connector/tests/test_backend.py index 7a025bbeb..1655f8b4c 100644 --- a/connector/tests/test_backend.py +++ b/connector/tests/test_backend.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import unittest2 +import unittest import openerp.tests.common as common from openerp.addons.connector.backend import (Backend, @@ -14,7 +14,7 @@ from openerp.addons.connector.session import ConnectorSession -class test_backend(unittest2.TestCase): +class test_backend(unittest.TestCase): """ Test Backend """ def setUp(self): diff --git a/connector/tests/test_connector.py b/connector/tests/test_connector.py index 33ab6f380..e5093cd04 100644 --- a/connector/tests/test_connector.py +++ b/connector/tests/test_connector.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import mock -import unittest2 +import unittest from openerp import api from openerp.modules.registry import RegistryManager @@ -28,7 +28,7 @@ def mock_connector_unit(env): return ConnectorUnit(connector_env) -class ConnectorHelpers(unittest2.TestCase): +class ConnectorHelpers(unittest.TestCase): def test_openerp_module_name(self): name = connector._get_openerp_module_name('openerp.addons.sale') @@ -37,7 +37,7 @@ def test_openerp_module_name(self): self.assertEqual(name, 'sale') -class TestConnectorUnit(unittest2.TestCase): +class TestConnectorUnit(unittest.TestCase): """ Test Connector Unit """ def test_connector_unit_for_model_names(self): @@ -146,7 +146,7 @@ class ModelUnit(ConnectorUnit): self.assertEqual(unit.localcontext, self.env.context) -class TestConnectorEnvironment(unittest2.TestCase): +class TestConnectorEnvironment(unittest.TestCase): def test_create_environment_no_connector_env(self): session = mock.MagicMock(name='Session') diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index cd4be9f76..568a3d15c 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -2,7 +2,7 @@ import cPickle import mock -import unittest2 +import unittest from datetime import datetime, timedelta from openerp import SUPERUSER_ID, exceptions @@ -61,7 +61,7 @@ def pickle_allowed_function(session): pass -class TestJobs(unittest2.TestCase): +class TestJobs(unittest.TestCase): """ Test Job """ def setUp(self): diff --git a/connector/tests/test_mapper.py b/connector/tests/test_mapper.py index e3b0bd605..6777c8ee9 100644 --- a/connector/tests/test_mapper.py +++ b/connector/tests/test_mapper.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import unittest2 +import unittest import mock import openerp.tests.common as common @@ -25,7 +25,7 @@ from openerp.addons.connector.session import ConnectorSession -class test_mapper(unittest2.TestCase): +class test_mapper(unittest.TestCase): """ Test Mapper """ def test_mapping_decorator(self): diff --git a/connector/tests/test_queue.py b/connector/tests/test_queue.py index 8a56b854f..c533bc9ed 100644 --- a/connector/tests/test_queue.py +++ b/connector/tests/test_queue.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import unittest2 +import unittest from datetime import timedelta from openerp.addons.connector.queue.queue import JobsQueue @@ -11,7 +11,7 @@ def dummy_task(session): pass -class test_queue(unittest2.TestCase): +class test_queue(unittest.TestCase): """ Test Queue """ def setUp(self): diff --git a/connector/tests/test_related_action.py b/connector/tests/test_related_action.py index 687b4986a..240299ef2 100644 --- a/connector/tests/test_related_action.py +++ b/connector/tests/test_related_action.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import mock -import unittest2 +import unittest import openerp.tests.common as common from ..connector import Binder @@ -47,7 +47,7 @@ def try_unwrap_binding(session, model_name, binding_id): pass -class test_related_action(unittest2.TestCase): +class test_related_action(unittest.TestCase): """ Test Related Actions """ def setUp(self): diff --git a/connector/tests/test_worker.py b/connector/tests/test_worker.py index d55c9eb5a..499ee33ee 100644 --- a/connector/tests/test_worker.py +++ b/connector/tests/test_worker.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- -import unittest2 +import unittest from openerp.addons.connector.queue.queue import JobsQueue -class test_worker(unittest2.TestCase): +class test_worker(unittest.TestCase): """ Test Worker """ def setUp(self): From aa04ccba0688e3c9f60ef6485835bafdbb50e596 Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 15:01:55 +0100 Subject: [PATCH 36/43] [MIG] email followers are instance of model 'mail.followers' --- connector/tests/test_job.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index 568a3d15c..243db6637 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -568,7 +568,7 @@ def test_follower_when_write_fail(self): self.assertFalse(inactiveusr in group.users) stored = self._create_job() stored.write({'state': 'failed'}) - followers = stored.message_follower_ids + followers = stored.message_follower_ids.mapped('partner_id') self.assertFalse(inactiveusr.partner_id in followers) self.assertFalse( set([u.partner_id for u in group.users]) - set(followers)) @@ -692,9 +692,10 @@ def test_job_subscription(self): ) self.assertEqual(len(stored.message_follower_ids), len(users)) expected_partners = [u.partner_id for u in users] - self.assertSetEqual(set(stored.message_follower_ids), - set(expected_partners)) - followers_id = [f.id for f in stored.message_follower_ids] + self.assertSetEqual( + set(stored.message_follower_ids.mapped('partner_id')), + set(expected_partners)) + followers_id = stored.message_follower_ids.mapped('partner_id.id') self.assertIn(self.other_partner_a.id, followers_id) self.assertIn(self.other_partner_b.id, followers_id) # jobs created for a specific company_id are followed only by @@ -707,9 +708,10 @@ def test_job_subscription(self): self.assertEqual(len(stored.message_follower_ids), 2) users = User.browse([SUPERUSER_ID, self.other_user_a.id]) expected_partners = [u.partner_id for u in users] - self.assertSetEqual(set(stored.message_follower_ids), - set(expected_partners)) - followers_id = [f.id for f in stored.message_follower_ids] + self.assertSetEqual( + set(stored.message_follower_ids.mapped('partner_id')), + set(expected_partners)) + followers_id = stored.message_follower_ids.mapped('partner_id.id') self.assertIn(self.other_partner_a.id, followers_id) self.assertNotIn(self.other_partner_b.id, followers_id) From 43dff939ee897e8c90d646d8c2c800af28e867be Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 15:07:52 +0100 Subject: [PATCH 37/43] [MIG] value returned by _company_default_get is a BrowseRecord --- connector/queue/job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/queue/job.py b/connector/queue/job.py index 28947043e..3476cc5d0 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -163,7 +163,7 @@ def enqueue(self, func, model_name=None, args=None, kwargs=None, company_model = company_model.sudo(new_job.user_id) company_id = company_model._company_default_get( object='queue.job', - field='company_id') + field='company_id').id new_job.company_id = company_id self.store(new_job) return new_job.uuid From d81811f1d36c75814055cb6294e79e24542ea9fe Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 15:13:42 +0100 Subject: [PATCH 38/43] [DEL] Remove the old connector worker --- connector/connector_menu.xml | 5 - connector/doc/guides/concepts.rst | 29 ---- connector/jobrunner/runner.py | 16 +-- connector/openerp-connector-worker | 118 ---------------- connector/queue/__init__.py | 1 - connector/queue/job.py | 27 +--- connector/queue/model.py | 182 ------------------------- connector/queue/model_view.xml | 45 ------ connector/queue/queue_data.xml | 18 --- connector/security/ir.model.access.csv | 1 - connector/tests/test_job.py | 26 +--- 11 files changed, 4 insertions(+), 464 deletions(-) delete mode 100755 connector/openerp-connector-worker diff --git a/connector/connector_menu.xml b/connector/connector_menu.xml index edf6ad44d..b36d38c6d 100644 --- a/connector/connector_menu.xml +++ b/connector/connector_menu.xml @@ -23,11 +23,6 @@ sequence="14" parent="menu_queue"/> - - ...INFO...connector.jobrunner.runner: database connections ready -* Disable the "Enqueue Jobs" cron. - -* Do NOT start openerp-connector-worker. - * Create jobs (eg using base_import_async) and observe they start immediately and in parallel. diff --git a/connector/openerp-connector-worker b/connector/openerp-connector-worker deleted file mode 100755 index 5bdde75ac..000000000 --- a/connector/openerp-connector-worker +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python -import re -import sys -import logging -import os -import signal -import time -import threading -from contextlib import closing -from psycopg2 import ProgrammingError - -import openerp -from openerp.cli import server as servercli -import openerp.service.server as workers -from openerp.modules.registry import RegistryManager -from openerp.tools import config - -_logger = logging.getLogger('openerp-connector-worker') - -MAX_JOBS = 50 - - -class Multicornnector(workers.PreforkServer): - - def __init__(self, app): - super(Multicornnector, self).__init__(app) - self.address = ('0.0.0.0', 0) - self.population = config['workers'] or 1 - self.workers_connector = {} - - def process_spawn(self): - while len(self.workers_connector) < self.population: - self.worker_spawn(WorkerConnector, self.workers_connector) - - def worker_pop(self, pid): - if pid in self.workers: - _logger.debug("Worker (%s) unregistered", pid) - try: - self.workers_connector.pop(pid, None) - u = self.workers.pop(pid) - u.close() - except OSError: - return - - -class WorkerConnector(workers.Worker): - """ HTTP Request workers """ - - def __init__(self, multi): - super(WorkerConnector, self).__init__(multi) - self.db_index = 0 - - def _work_database(self, cr): - db_name = cr.dbname - try: - cr.execute("SELECT 1 FROM ir_module_module " - "WHERE name = %s " - "AND state = %s", ('connector', 'installed'), - log_exceptions=False) - except ProgrammingError as err: - if unicode(err).startswith('relation "ir_module_module" does not exist'): - _logger.debug('Database %s is not an OpenERP database,' - ' connector worker not started', db_name) - else: - raise - else: - if cr.fetchone(): - RegistryManager.check_registry_signaling(db_name) - registry = openerp.registry(db_name) - if registry: - queue_worker = registry['queue.worker'] - queue_worker.assign_then_enqueue(cr, - openerp.SUPERUSER_ID, - max_jobs=MAX_JOBS) - RegistryManager.signal_caches_change(db_name) - - def process_work(self): - with openerp.api.Environment.manage(): - if config['db_name']: - db_names = config['db_name'].split(',') - else: - db_names = openerp.service.db.exp_list(True) - dbfilter = config['dbfilter'] - if dbfilter and db_names: - db_names = [d for d in db_names if re.match(dbfilter, d)] - if len(db_names): - self.db_index = (self.db_index + 1) % len(db_names) - db_name = db_names[self.db_index] - self.setproctitle(db_name) - db = openerp.sql_db.db_connect(db_name) - threading.current_thread().dbname = db_name - with closing(db.cursor()) as cr: - self._work_database(cr) - else: - self.db_index = 0 - - def sleep(self): - # Really sleep once all the databases have been processed. - if self.db_index == 0: - interval = 15 + self.pid % self.multi.population # chorus effect - time.sleep(interval) - - def start(self): - workers.Worker.start(self) - - -if __name__ == "__main__": - args = sys.argv[1:] - servercli.check_root_user() - config.parse_config(args) - - servercli.check_postgres_user() - openerp.netsvc.init_logger() - servercli.report_configuration() - - openerp.multi_process = True - openerp.worker_connector = True - Multicornnector(openerp.service.wsgi_server.application).run([], False) diff --git a/connector/queue/__init__.py b/connector/queue/__init__.py index 2bc97de03..643bee7ab 100644 --- a/connector/queue/__init__.py +++ b/connector/queue/__init__.py @@ -20,4 +20,3 @@ ############################################################################## from . import model -from . import worker diff --git a/connector/queue/job.py b/connector/queue/job.py index 3476cc5d0..7ea90d5ce 100644 --- a/connector/queue/job.py +++ b/connector/queue/job.py @@ -134,13 +134,11 @@ class OpenERPJobStorage(JobStorage): """ Store a job on OpenERP """ _job_model_name = 'queue.job' - _worker_model_name = 'queue.worker' def __init__(self, session): super(OpenERPJobStorage, self).__init__() self.session = session self.job_model = self.session.env[self._job_model_name] - self.worker_model = self.session.env[self._worker_model_name] assert self.job_model is not None, ( "Model %s not found" % self._job_model_name) @@ -196,13 +194,6 @@ def db_record_from_uuid(self, job_uuid): def db_record(self, job_): return self.db_record_from_uuid(job_.uuid) - def _worker_id(self, worker_uuid): - worker = self.worker_model.sudo().search( - [('uuid', '=', worker_uuid)], - limit=1) - if worker: - return worker.id - def store(self, job_): """ Store the Job """ vals = {'state': job_.state, @@ -233,11 +224,6 @@ def store(self, job_): if job_.canceled: vals['active'] = False - if job_.worker_uuid: - vals['worker_id'] = self._worker_id(job_.worker_uuid) - else: - vals['worker_id'] = False - db_record = self.db_record(job_) if db_record: db_record.write(vals) @@ -297,8 +283,6 @@ def load(self, job_uuid): job_.model_name = stored.model_name if stored.model_name else None job_.retry = stored.retry job_.max_retries = stored.max_retries - if stored.worker_id: - job_.worker_uuid = stored.worker_id.uuid if stored.company_id: job_.company_id = stored.company_id.id return job_ @@ -311,10 +295,6 @@ class Job(object): Id (UUID) of the job. - .. attribute:: worker_uuid - - When the job is enqueued, UUID of the worker. - .. attribute:: state State of the job, can pending, enqueued, started, done or failed. @@ -484,7 +464,6 @@ def __init__(self, func=None, model_name=None, self._eta = None self.eta = eta self.canceled = False - self.worker_uuid = None def __cmp__(self, other): if not isinstance(other, Job): @@ -579,17 +558,15 @@ def set_pending(self, result=None, reset_retry=True): self.state = PENDING self.date_enqueued = None self.date_started = None - self.worker_uuid = None if reset_retry: self.retry = 0 if result is not None: self.result = result - def set_enqueued(self, worker): + def set_enqueued(self): self.state = ENQUEUED self.date_enqueued = datetime.now() self.date_started = None - self.worker_uuid = worker.uuid def set_started(self): self.state = STARTED @@ -599,13 +576,11 @@ def set_done(self, result=None): self.state = DONE self.exc_info = None self.date_done = datetime.now() - self.worker_uuid = None if result is not None: self.result = result def set_failed(self, exc_info=None): self.state = FAILED - self.worker_uuid = None if exc_info is not None: self.exc_info = exc_info diff --git a/connector/queue/model.py b/connector/queue/model.py index e215693ed..7f4e65db3 100644 --- a/connector/queue/model.py +++ b/connector/queue/model.py @@ -26,9 +26,7 @@ from openerp import models, fields, api, exceptions, _ from .job import STATES, DONE, PENDING, OpenERPJobStorage, JOB_REGISTRY -from .worker import WORKER_TIMEOUT from ..session import ConnectorSession -from .worker import watcher from ..connector import get_openerp_module, is_module_installed _logger = logging.getLogger(__name__) @@ -45,11 +43,6 @@ class QueueJob(models.Model): _removal_interval = 30 # days - worker_id = fields.Many2one(comodel_name='queue.worker', - string='Worker', - ondelete='set null', - select=True, - readonly=True) uuid = fields.Char(string='UUID', readonly=True, select=True, @@ -209,181 +202,6 @@ def autovacuum(self): return True -class QueueWorker(models.Model): - """ Worker """ - _name = 'queue.worker' - _description = 'Queue Worker' - _log_access = False - _rec_name = 'uuid' - - worker_timeout = WORKER_TIMEOUT - - uuid = fields.Char(string='UUID', - readonly=True, - select=True, - required=True) - pid = fields.Char(string='PID', readonly=True) - date_start = fields.Datetime(string='Start Date', readonly=True) - date_alive = fields.Datetime(string='Last Alive Check', readonly=True) - job_ids = fields.One2many(comodel_name='queue.job', - inverse_name='worker_id', - string='Jobs', - readonly=True) - - @api.model - def _notify_alive(self, worker): - workers = self.search([('uuid', '=', worker.uuid)]) - - now = fields.Datetime.now() - if not workers: - self.create({'uuid': worker.uuid, - 'pid': os.getpid(), - 'date_start': now, - 'date_alive': now, - }) - else: - workers.write({'date_alive': now}) - - @api.model - def _purge_dead_workers(self): - deadline = datetime.now() - timedelta(seconds=self.worker_timeout) - deads = self.search( - [('date_alive', '<', fields.Datetime.to_string(deadline))], - ) - for worker in deads: - _logger.debug('Worker %s is dead', worker.uuid) - try: - deads.unlink() - except Exception: - _logger.debug("Failed attempt to unlink a dead worker, likely due " - "to another transaction in progress.") - - @api.model - def _worker(self): - worker = watcher.worker_for_db(self.env.cr.dbname) - assert worker - workers = self.search([('uuid', '=', worker.uuid)]) - assert len(workers) == 1, ("%s worker found in database instead " - "of 1" % len(workers)) - return workers - - @api.model - def assign_then_enqueue(self, max_jobs=None): - """ Assign all the jobs not already assigned to a worker. - Then enqueue all the jobs having a worker but not enqueued. - - Each operation is atomic. - - .. warning:: commit transaction - ``cr.commit()`` is called, so please always call - this method in your own transaction, not in the main - OpenERP's transaction - - :param max_jobs: maximal limit of jobs to assign on a worker - :type max_jobs: int - """ - self.assign_jobs(max_jobs=max_jobs) - self.env.cr.commit() - self.enqueue_jobs() - self.env.cr.commit() - return True - - @api.model - def assign_jobs(self, max_jobs=None): - """ Assign ``n`` jobs to the worker of the current process - - ``n`` is ``max_jobs`` or unlimited if ``max_jobs`` is None - - :param max_jobs: maximal limit of jobs to assign on a worker - :type max_jobs: int - """ - worker = watcher.worker_for_db(self.env.cr.dbname) - if worker: - self._assign_jobs(max_jobs=max_jobs) - else: - _logger.debug('No worker started for process %s', os.getpid()) - return True - - @api.model - def enqueue_jobs(self): - """ Enqueue all the jobs assigned to the worker of the current - process - """ - worker = watcher.worker_for_db(self.env.cr.dbname) - if worker: - self._enqueue_jobs() - else: - _logger.debug('No worker started for process %s', os.getpid()) - return True - - @api.model - def _assign_jobs(self, max_jobs=None): - sql = ("SELECT id FROM queue_job " - "WHERE worker_id IS NULL " - "AND state not in ('failed', 'done') " - "AND active = true " - "ORDER BY eta NULLS LAST, priority, date_created ") - if max_jobs is not None: - sql += ' LIMIT %d' % max_jobs - sql += ' FOR UPDATE NOWAIT' - # use a SAVEPOINT to be able to rollback this part of the - # transaction without failing the whole transaction if the LOCK - # cannot be acquired - worker = watcher.worker_for_db(self.env.cr.dbname) - self.env.cr.execute("SAVEPOINT queue_assign_jobs") - try: - self.env.cr.execute(sql, log_exceptions=False) - except Exception: - # Here it's likely that the FOR UPDATE NOWAIT failed to get - # the LOCK, so we ROLLBACK to the SAVEPOINT to restore the - # transaction to its earlier state. The assign will be done - # the next time. - self.env.cr.execute("ROLLBACK TO queue_assign_jobs") - _logger.debug("Failed attempt to assign jobs, likely due to " - "another transaction in progress. " - "Trace of the failed assignment of jobs on worker " - "%s attempt: ", worker.uuid, exc_info=True) - return - job_rows = self.env.cr.fetchall() - if not job_rows: - _logger.debug('No job to assign to worker %s', worker.uuid) - return - job_ids = [id for id, in job_rows] - - try: - worker_id = self._worker().id - except AssertionError as e: - _logger.exception(e) - return - _logger.debug('Assign %d jobs to worker %s', len(job_ids), - worker.uuid) - # ready to be enqueued in the worker - try: - self.env['queue.job'].browse(job_ids).write( - {'state': 'pending', - 'worker_id': worker_id, - } - ) - except Exception: - pass # will be assigned to another worker - - def _enqueue_jobs(self): - """ Add to the queue of the worker all the jobs not - yet queued but already assigned.""" - job_model = self.env['queue.job'] - try: - db_worker_id = self._worker().id - except AssertionError as e: - _logger.exception(e) - return - jobs = job_model.search([('worker_id', '=', db_worker_id), - ('state', '=', 'pending')], - ) - worker = watcher.worker_for_db(self.env.cr.dbname) - for job in jobs: - worker.enqueue_job_uuid(job.uuid) - - class RequeueJob(models.TransientModel): _name = 'queue.requeue.job' _description = 'Wizard to requeue a selection of jobs' diff --git a/connector/queue/model_view.xml b/connector/queue/model_view.xml index 22047999e..c6dee32f9 100644 --- a/connector/queue/model_view.xml +++ b/connector/queue/model_view.xml @@ -2,50 +2,6 @@ - - queue.worker.form - queue.worker - -

- - - - - - - - - - - -
- - - - - queue.worker.tree - queue.worker - - - - - - - - - - - - Workers - queue.worker - form - tree,form - {} - - - queue.job.form queue.job @@ -83,7 +39,6 @@ - diff --git a/connector/queue/queue_data.xml b/connector/queue/queue_data.xml index 93465583a..4e4d11707 100644 --- a/connector/queue/queue_data.xml +++ b/connector/queue/queue_data.xml @@ -9,24 +9,6 @@ - - - Enqueue Jobs - - - 1 - minutes - -1 - - - - - 1 - - AutoVacuum Queue Jobs diff --git a/connector/security/ir.model.access.csv b/connector/security/ir.model.access.csv index 35407b03a..377c11fb1 100644 --- a/connector/security/ir.model.access.csv +++ b/connector/security/ir.model.access.csv @@ -1,5 +1,4 @@ id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink -access_connector_queue_worker_manager,connector worker manager,connector.model_queue_worker,connector.group_connector_manager,1,1,1,1 access_connector_queue_job_manager,connector job manager,connector.model_queue_job,connector.group_connector_manager,1,1,1,1 access_connector_checkpoint_manager,connector checkpoint manager,connector.model_connector_checkpoint,connector.group_connector_manager,1,1,1,1 access_connector_queue_job_function_manager,connector job functions manager,connector.model_queue_job_function,connector.group_connector_manager,1,1,1,1 diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index 243db6637..480e1429a 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -243,24 +243,19 @@ def test_set_pending(self): self.assertEquals(job_a.state, PENDING) self.assertFalse(job_a.date_enqueued) self.assertFalse(job_a.date_started) - self.assertFalse(job_a.worker_uuid) self.assertEquals(job_a.retry, 0) self.assertEquals(job_a.result, 'test') def test_set_enqueued(self): job_a = Job(func=task_a) - worker = mock.Mock(name='Worker') - uuid = 'ae7d1161-dc34-40b1-af06-8057c049133e' - worker.uuid = 'ae7d1161-dc34-40b1-af06-8057c049133e' datetime_path = 'openerp.addons.connector.queue.job.datetime' with mock.patch(datetime_path, autospec=True) as mock_datetime: mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0) - job_a.set_enqueued(worker) + job_a.set_enqueued() self.assertEquals(job_a.state, ENQUEUED) self.assertEquals(job_a.date_enqueued, datetime(2015, 3, 15, 16, 41, 0)) - self.assertEquals(job_a.worker_uuid, uuid) self.assertFalse(job_a.date_started) def test_set_started(self): @@ -285,7 +280,6 @@ def test_set_done(self): self.assertEquals(job_a.result, 'test') self.assertEquals(job_a.date_done, datetime(2015, 3, 15, 16, 41, 0)) - self.assertFalse(job_a.worker_uuid) self.assertFalse(job_a.exc_info) def test_set_failed(self): @@ -293,7 +287,6 @@ def test_set_failed(self): job_a.set_failed(exc_info='failed test') self.assertEquals(job_a.state, FAILED) self.assertEquals(job_a.exc_info, 'failed test') - self.assertFalse(job_a.worker_uuid) def test_cancel(self): job_a = Job(func=task_a) @@ -421,21 +414,6 @@ def test_read(self): delta=delta) self.assertEqual(job_read.canceled, True) - def test_job_worker(self): - worker = self.env['queue.worker'].create( - {'uuid': '57569b99-c2c1-47b6-aad1-72f953c92c87'} - ) - test_job = Job(func=dummy_task_args, - model_name='res.users', - args=('o', 'k'), - kwargs={'c': '!'}) - test_job.worker_uuid = worker.uuid - storage = OpenERPJobStorage(self.session) - self.assertEqual(storage._worker_id(worker.uuid), worker.id) - storage.store(test_job) - job_read = storage.load(test_job.uuid) - self.assertEqual(job_read.worker_uuid, worker.uuid) - def test_job_unlinked(self): test_job = Job(func=dummy_task_args, model_name='res.users', @@ -547,7 +525,6 @@ def test_requeue(self): stored.write({'state': 'failed'}) stored.requeue() self.assertEqual(stored.state, PENDING) - self.assertFalse(stored.worker_id) def test_message_when_write_fail(self): stored = self._create_job() @@ -589,7 +566,6 @@ def test_wizard_requeue(self): active_ids=stored.ids) model.create({}).requeue() self.assertEqual(stored.state, PENDING) - self.assertFalse(stored.worker_id) class TestJobStorageMultiCompany(common.TransactionCase): From 07184c3b72899df6ed2abef469c0d9bde1f147dd Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 15:35:37 +0100 Subject: [PATCH 39/43] [PEP8] --- connector/queue/model.py | 1 - 1 file changed, 1 deletion(-) diff --git a/connector/queue/model.py b/connector/queue/model.py index 7f4e65db3..136a189b0 100644 --- a/connector/queue/model.py +++ b/connector/queue/model.py @@ -19,7 +19,6 @@ # ############################################################################## -import os import logging from datetime import datetime, timedelta From 9898bde0796ae2a772b55a86a17221826aa724a2 Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 16:37:02 +0100 Subject: [PATCH 40/43] [FIX] In the last release of Odoo related partner created from an inactive user is also inactive Fix the test accordingly (https://github.com/odoo/odoo/commit/328d34a22168bd9106dde810981bfd838b675a5c) --- connector/tests/test_job.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/connector/tests/test_job.py b/connector/tests/test_job.py index 480e1429a..b456f2409 100644 --- a/connector/tests/test_job.py +++ b/connector/tests/test_job.py @@ -534,6 +534,8 @@ def test_message_when_write_fail(self): self.assertEqual(len(messages), 2) def test_follower_when_write_fail(self): + """Check that inactive users doesn't are not followers even if + they are linked to an active partner""" group = self.env.ref('connector.group_connector_manager') vals = {'name': 'xx', 'login': 'xx', @@ -541,7 +543,7 @@ def test_follower_when_write_fail(self): 'active': False, } inactiveusr = self.user.create(vals) - self.assertTrue(inactiveusr.partner_id.active) + inactiveusr.partner_id.active = True self.assertFalse(inactiveusr in group.users) stored = self._create_job() stored.write({'state': 'failed'}) From a6b8d1d55ffe2cdfaadc528a457cc1719e75e43b Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 15 Feb 2016 18:30:10 +0100 Subject: [PATCH 41/43] [FIX] The first version is 9.0.1.0.0 --- connector/__openerp__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/__openerp__.py b/connector/__openerp__.py index 19918194a..fff173a81 100644 --- a/connector/__openerp__.py +++ b/connector/__openerp__.py @@ -20,7 +20,7 @@ ############################################################################## {'name': 'Connector', - 'version': '9.0.0.1.0', + 'version': '9.0.1.0.0', 'author': 'Camptocamp,Openerp Connector Core Editors,' 'Odoo Community Association (OCA)', 'website': 'http://odoo-connector.com', From a559f441705841bb9efb893fed08735da419f05e Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Thu, 18 Feb 2016 17:16:18 +0100 Subject: [PATCH 42/43] [FIX] fix path of odoo for doc build --- connector/doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/doc/conf.py b/connector/doc/conf.py index 3a5027fcd..c31987c71 100644 --- a/connector/doc/conf.py +++ b/connector/doc/conf.py @@ -25,7 +25,7 @@ if os.environ.get('TRAVIS_BUILD_DIR') and os.environ.get('VERSION'): # build from travis - odoo_folder = 'odoo-8.0' + odoo_folder = 'odoo-' + os.environ.get('VERSION') odoo_root = os.path.join(os.environ['HOME'], odoo_folder) sphinxodoo_root_path = os.path.abspath(odoo_root) sphinxodoo_addons_path = [ From 34cda0ebde4a539fe8e6645d81f943f46105231a Mon Sep 17 00:00:00 2001 From: "Laurent Mignon (ACSONE)" Date: Mon, 29 Feb 2016 10:03:48 +0100 Subject: [PATCH 43/43] [DEL] Remove deprecated APIs * In openerp.addons.connector.session.ConnectorSession: * Remove method 'search' * Remove method 'write' * Remove method 'browse' * Remove method 'read' * Remove method 'create' * Remove method 'unlink' * In openerp.addons.connector.connector.ConnectorUnit: * Remove method 'get_connector_unit_for_model' * Remove method 'get_binder_for_model' * Remove property 'environment' * In openerp.addons.connector.connector.ConnectorEnvironment: * Remove method 'set_lang' * In openerp.addons.connector.connector.connector.MetaConnectorUnit * Remove property 'model_name' * Remove class openerp.addons.connector.connector.Environment * Remove method openerp.addons.connector.connector.intall_in_connector * In openerp.addons.connector.unit.synchronizer: * Remove class 'ExportSynchronizer' * Remove class 'ImportSynchronizer' * Remove class 'DeleteSynchronizer' * Remove module openerp.addons.connector.deprecate. * --- connector/connector.py | 45 --------------------------- connector/deprecate.py | 56 ---------------------------------- connector/session.py | 45 --------------------------- connector/unit/synchronizer.py | 9 ------ 4 files changed, 155 deletions(-) delete mode 100644 connector/deprecate.py diff --git a/connector/connector.py b/connector/connector.py index 46b81dfcf..3537a28b8 100644 --- a/connector/connector.py +++ b/connector/connector.py @@ -26,7 +26,6 @@ from contextlib import contextmanager from openerp import models, fields -from .deprecate import log_deprecate, DeprecatedClass from .exception import RetryableJobError _logger = logging.getLogger(__name__) @@ -51,11 +50,6 @@ def _get_openerp_module_name(module_path): return module_name -def install_in_connector(): - log_deprecate("This call to 'install_in_connector()' has no effect and is " - "not required.") - - def is_module_installed(env, module_name): """ Check if an Odoo addon is installed. @@ -84,11 +78,6 @@ class MetaConnectorUnit(type): the state of the module (installed or not). """ - @property - def model_name(cls): - log_deprecate('renamed to for_model_names') - return cls.for_model_names - @property def for_model_names(cls): """ Returns the list of models on which a @@ -139,11 +128,6 @@ def __init__(self, connector_env): self.backend_record = self.connector_env.backend_record self.session = self.connector_env.session - @property - def environment(self): - log_deprecate('renamed to connector_env') - return self.connector_env - @classmethod def match(cls, session, model): """ Returns True if the current class correspond to the @@ -212,25 +196,11 @@ def unit_for(self, connector_unit_class, model=None): return env.get_connector_unit(connector_unit_class) - def get_connector_unit_for_model(self, connector_unit_class, model=None): - """ Deprecated in favor of :meth:`~unit_for` """ - log_deprecate('renamed to unit_for()') - return self.unit_for(connector_unit_class, model=model) - def binder_for(self, model=None): """ Returns an new instance of the correct ``Binder`` for a model """ return self.unit_for(Binder, model) - def get_binder_for_model(self, model=None): - """ Returns an new instance of the correct ``Binder`` for - a model - - Deprecated, use ``binder_for`` now. - """ - log_deprecate('renamed to binder_for()') - return self.binder_for(model=model) - def advisory_lock_or_retry(self, lock, retry_seconds=1): """ Acquire a Postgres transactional advisory lock or retry job @@ -329,18 +299,6 @@ def pool(self): def env(self): return self.session.env - @contextmanager - def set_lang(self, code): - """ Change the working language in the environment. - - It changes the ``lang`` key in the session's context. - - - """ - raise DeprecationWarning('ConnectorEnvironment.set_lang has been ' - 'deprecated. session.change_context should ' - 'be used instead.') - def get_connector_unit(self, base_class): """ Searches and returns an instance of the :py:class:`~connector.connector.ConnectorUnit` for the current @@ -379,9 +337,6 @@ def create_environment(cls, backend_record, session, model, else: return cls(backend_record, session, model) -Environment = DeprecatedClass('Environment', - ConnectorEnvironment) - class Binder(ConnectorUnit): """ For one record of a model, capable to find an external or diff --git a/connector/deprecate.py b/connector/deprecate.py deleted file mode 100644 index 97c9ceeab..000000000 --- a/connector/deprecate.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -############################################################################## -# -# Author: Guewen Baconnier -# Copyright 2015 Camptocamp SA -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -############################################################################## - -import inspect -import logging - - -def log_deprecate(message): - # get the caller of the deprecated method - frame, __, lineno, funcname, __, __ = inspect.stack()[2] - module = inspect.getmodule(frame) - logger = logging.getLogger(module.__name__) - logger.warning('Deprecated: %s at line %r: %s', funcname, lineno, message) - - -class DeprecatedClass(object): - - def __init__(self, oldname, replacement): - self.oldname = oldname - self.replacement = replacement - - def _warning(self): - frame, __, lineno, funcname, __, __ = inspect.stack()[2] - module = inspect.getmodule(frame) - logger = logging.getLogger(module.__name__) - lineno = lineno - logger.warning('Deprecated: class %s must be replaced by %s ' - 'at line %r', - self.oldname, - self.replacement.__name__, - lineno) - - def __call__(self, *args, **kwargs): - self._warning() - return self.replacement(*args, **kwargs) - - def __getattr__(self, *args, **kwargs): - return getattr(self.replacement, *args, **kwargs) diff --git a/connector/session.py b/connector/session.py index 4bba37c7e..eecdf9f0a 100644 --- a/connector/session.py +++ b/connector/session.py @@ -27,7 +27,6 @@ from openerp.modules.registry import RegistryManager from .connector import is_module_installed -from .deprecate import log_deprecate _logger = logging.getLogger(__name__) @@ -205,50 +204,6 @@ def close(self): """ Close the cursor """ self.cr.close() - def search(self, model, domain, limit=None, offset=0, order=None): - """ Shortcut to :py:class:`openerp.models.BaseModel.search` """ - log_deprecate("'Session(...).search(...)' has been deprecated in " - "favor of 'Session(...).env['model'].search(...)'") - return self.pool[model].search(self.cr, self.uid, domain, - limit=limit, offset=offset, - order=order, context=self.context) - - def browse(self, model, ids): - """ Shortcut to :py:class:`openerp.models.BaseModel.browse` """ - model_obj = self.pool[model] - log_deprecate("'Session(...).browse(...)' has been deprecated in " - "favor of 'Session(...).env['model'].browse(...)'") - return model_obj.browse(self.cr, self.uid, ids, context=self.context) - - def read(self, model, ids, fields): - """ Shortcut to :py:class:`openerp.models.BaseModel.read` """ - log_deprecate("'Session(...).read(...)' has been deprecated in " - "favor of 'Session(...).env['model'].read(...)'") - return self.pool[model].read(self.cr, self.uid, ids, fields, - context=self.context) - - def create(self, model, values): - """ Shortcut to :py:class:`openerp.models.BaseModel.create` """ - log_deprecate("'Session(...).create(...)' has been deprecated in " - "favor of 'Session(...).env['model'].create(...)'") - return self.pool[model].create(self.cr, self.uid, values, - context=self.context) - - def write(self, model, ids, values): - """ Shortcut to :py:class:`openerp.models.BaseModel.write` """ - log_deprecate("'Session(...).write(...)' has been deprecated in " - "favor of 'Session(...).env['model'].write(...)'") - return self.pool[model].write(self.cr, self.uid, ids, values, - context=self.context) - - def unlink(self, model, ids): - model_obj = self.pool[model] - _logger.warning("'Session.unlink()' has been deprecated, prefer " - "'self.env['model'].unlink()' or " - "self.recordset().unlink() if you are in a " - "ConnectorUnit.") - return model_obj.unlink(self.cr, self.uid, ids, context=self.context) - def __repr__(self): return '' % (self.cr.dbname, self.uid, diff --git a/connector/unit/synchronizer.py b/connector/unit/synchronizer.py index 86b3515f3..1302e0086 100644 --- a/connector/unit/synchronizer.py +++ b/connector/unit/synchronizer.py @@ -90,20 +90,11 @@ class Exporter(Synchronizer): _base_mapper = ExportMapper -ExportSynchronizer = Exporter # deprecated - - class Importer(Synchronizer): """ Synchronizer for importing data from a backend to OpenERP """ _base_mapper = ImportMapper -ImportSynchronizer = Importer # deprecated - - class Deleter(Synchronizer): """ Synchronizer for deleting a record on the backend """ - - -DeleteSynchronizer = Deleter # deprecated