diff --git a/integrator/__init__.py b/integrator/__init__.py
new file mode 100644
index 00000000..0650744f
--- /dev/null
+++ b/integrator/__init__.py
@@ -0,0 +1 @@
+from . import models
diff --git a/integrator/__manifest__.py b/integrator/__manifest__.py
new file mode 100644
index 00000000..2dc50220
--- /dev/null
+++ b/integrator/__manifest__.py
@@ -0,0 +1,44 @@
+##############################################################################
+#
+# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
+# All Rights Reserved.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+#
+##############################################################################
+{
+ 'name': 'Odoo Integrator',
+ 'version': "16.0.1.0.0",
+ 'category': 'SaaS',
+ 'author': 'ADHOC SA',
+ 'website': 'www.adhoc.com.ar',
+ 'license': 'AGPL-3',
+ 'depends': [
+ 'base', 'mail',
+ ],
+ 'data': [
+ 'data/ir_server_action.xml',
+ 'security/integrator_security.xml',
+ 'security/ir.model.access.csv',
+ 'views/ir_ui_menuitem.xml',
+ 'views/integrator_account_views.xml',
+ 'views/integrator_integration_views.xml',
+ 'views/integrator_integration_script_views.xml',
+ ],
+ 'installable': True,
+ 'auto_install': False,
+ 'application': True,
+ 'demo': [
+ ],
+}
diff --git a/integrator/data/ir_server_action.xml b/integrator/data/ir_server_action.xml
new file mode 100644
index 00000000..2ad89150
--- /dev/null
+++ b/integrator/data/ir_server_action.xml
@@ -0,0 +1,14 @@
+
+
+
+
+ Adelantar ejecución de cron
+
+ ir.actions.server
+ code
+
+record.cron_id.sudo().write({'nextcall': datetime.datetime.now()})
+
+
+
+
diff --git a/integrator/models/__init__.py b/integrator/models/__init__.py
new file mode 100644
index 00000000..a00768b4
--- /dev/null
+++ b/integrator/models/__init__.py
@@ -0,0 +1,4 @@
+from . import integrator_account
+from . import integrator_integration_script
+from . import integrator_integration
+from . import integrator_integration_script_line
diff --git a/integrator/models/integrator_account.py b/integrator/models/integrator_account.py
new file mode 100644
index 00000000..4bebe4b4
--- /dev/null
+++ b/integrator/models/integrator_account.py
@@ -0,0 +1,84 @@
+from odoo import api, models, fields, _
+from odoo.exceptions import UserError
+from odooly import Client
+import logging
+_logger = logging.getLogger(__name__)
+
+
+class IntegratorAccount(models.Model):
+
+ _name = 'integrator.account'
+ _inherit = ['mail.composer.mixin', 'mail.thread', 'mail.activity.mixin']
+ _description = 'Integration Account'
+ _mailing_enabled = True
+
+ name = fields.Char(required=True, tracking=True, store=True, compute='_compute_name', default=lambda self: _('New'))
+ odoo_hostname = fields.Char("Hostname", required=True, tracking=True)
+ odoo_db_name = fields.Char("Database Name", required=True, tracking=True)
+ odoo_user = fields.Char("Username or E-Mail", required=True, tracking=True)
+ odoo_password = fields.Char("Password", required=True,)
+ state = fields.Selection([('draft', 'Draft'), ('confirmed', 'Confirmed')], copy=False, default='draft',
+ required=True, tracking=True)
+ channel_alias = fields.Char('Alias', default=False)
+
+ @api.depends('odoo_db_name')
+ def _compute_name(self):
+ for rec in self:
+ rec.name = rec.odoo_db_name
+
+ def back_to_draft(self):
+ self.write({'state': 'draft'})
+
+ def test_and_confirm(self):
+ self.test_connection()
+ self.write({'state': 'confirmed'})
+
+ def test_connection(self):
+ """ Odoo Connection Test.
+ Returns True if successful.
+ Raises a UserError otherwise.
+ """
+ self.ensure_one()
+ try:
+ # Attempt to get client
+ client = self._odoo_get_client()
+ except Exception as e:
+ raise UserError("Unable to connect to Odoo. "
+ "The server responded: {}".format(str(e)))
+ # Make sure version is correct
+ self._odoo_ensure_version(client)
+ # Notify Success
+ result = "Connection with Odoo was successful!"
+ return {
+ 'type': 'ir.actions.client',
+ 'tag': 'display_notification',
+ 'params': {
+ 'title': _('Success'),
+ 'type': 'success',
+ 'message': result,
+ 'sticky': False,
+ }
+ }
+
+ def _odoo_get_client(self):
+ self.ensure_one()
+ try:
+ return Client(
+ # Use JSONRPC to prevent error when server responds with None
+ self.odoo_hostname.strip("/") + "/jsonrpc",
+ db=self.odoo_db_name,
+ user=self.odoo_user,
+ password=self.odoo_password,
+ )
+ except Exception as e:
+ raise UserError("Unable to Connect to Database. Error: %s" % e)
+
+ def _odoo_ensure_version(self, client):
+ """ Makes sure Odoo version is supported
+ """
+ odoo_version = int(client.server_version.split(".")[0])
+ if odoo_version < 13:
+ raise UserError(
+ "The Odoo version on the remote system is not supported. "
+ "Please upgrade to v13.0 or higher")
+ return True
diff --git a/integrator/models/integrator_integration.py b/integrator/models/integrator_integration.py
new file mode 100644
index 00000000..da4d05aa
--- /dev/null
+++ b/integrator/models/integrator_integration.py
@@ -0,0 +1,522 @@
+from odoo import models, fields, api, _, SUPERUSER_ID
+from odoo.exceptions import UserError, ValidationError
+from odoo.tools import safe_eval
+import dateutil
+import datetime
+import time
+import pytz
+import logging
+import html
+from lxml.html import fromstring
+import html2text
+from dateutil.relativedelta import relativedelta
+import random
+from odoo.netsvc import ColoredFormatter, RESET_SEQ, COLOR_SEQ, COLOR_PATTERN
+import psycopg2
+
+
+_logger = logging.getLogger(__name__)
+
+MAX_RETRIES = 5
+
+
+class IntegratorIntegration(models.Model):
+
+ _name = 'integrator.integration'
+ _inherit = ['mail.composer.mixin', 'mail.thread', 'mail.activity.mixin']
+ _description = 'Integrator Integration'
+ _mailing_enabled = True
+
+ name = fields.Char(required=True, tracking=True, store=True, compute='_compute_name', default=lambda self: _('New'))
+ state = fields.Selection([('draft', 'Draft'),
+ ('confirmed', 'Confirmed')],
+ copy=False, default='draft', required=True,
+ tracking=True)
+ cron_id = fields.Many2one('ir.cron', 'Cron Task',
+ ondelete='restrict', copy=False)
+ last_sync_start = fields.Datetime(
+ help='Campo auxiliar utilizado para almacenar el momento el en cual inicia la sincro para que, si todo termina'
+ ' bien, sea esta fecha la que se setea como ultima fecha de sincron (para no perder cosas que se hayan '
+ 'actualizado entre el momento de incio de ejecución de y fin, y tmb por el procesamiento en baches)')
+ last_cron_execution = fields.Datetime(string="Last Execution")
+ cron_nextcall = fields.Datetime(related='cron_id.nextcall', string="Next Call Execution")
+
+ # Odoo2odoo Specific Fields
+ odoo_db2 = fields.Many2one(
+ "integrator.account", string="Remote Db",
+ required=True,
+ states={'confirmed': [('readonly', True)]},
+ tracking=True,
+ domain="[('state', '!=', 'draft')]",)
+ script_line_ids = fields.One2many(
+ "integrator.integration.script_line", "integration_id",
+ string="Scripts",
+ copy=True, context={'active_test': False})
+ error_count = fields.Integer()
+ active = fields.Boolean("Active", default=True)
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ res = super().create(vals_list)
+ # res._create_sequence()
+ res._create_cron()
+ return res
+
+ @api.depends('odoo_db2', 'odoo_db2.name')
+ def _compute_name(self):
+ for rec in self:
+ rec.name = _("%s ~ %s") % (self.env.cr.dbname, rec.odoo_db2.name)
+
+ def write(self, vals):
+ for rec in self:
+ if 'active' in vals and not vals['active'] and rec.state != 'draft':
+ raise UserError(_("You cannot archive integrations that are currently active. First stop the active integration so you can archive it."))
+ # if 'active' in vals and rec.state == 'draft':
+ # logs = rec.env["integrator.integration.logging"].with_context({'active_test': False}).search([('integration_id', '=', rec.id)])
+ # if vals['active']:
+ # for log in logs:
+ # log.action_unarchive()
+ # else:
+ # for log in logs:
+ # log.action_archive()
+ res = super().write(vals)
+ return res
+
+ def unlink(self):
+ """ Deletes the integration, then the associated cron task and sequence.
+ """
+ if any(rec.state != 'draft' for rec in self):
+ raise UserError(_('You cannot delete integrations that are currently active. First stop the active integration so you can delete it.'))
+
+ # # Unlink logs
+ # self.env["integrator.integration.logging"].search([('integration_id', 'in', self.ids)]).unlink()
+
+ # Unlink sequence
+ # self.env['ir.sequence'].sudo().search([('code', 'in', ["integration.sync.%s" % id for id in self.ids])]).unlink()
+
+ cron_ids = self.cron_id.ids
+
+ result = super().unlink()
+
+ # Unlink asociated crons
+ self.env["ir.cron"].sudo().browse(cron_ids).unlink()
+
+ return result
+
+ def _create_cron(self, interval=30):
+ """ Create a cron task associated to the given records without one
+ """
+ for rec in self:
+ if not rec.cron_id:
+ # Create a specific cron task for this integration
+ _logger.info(
+ "Creating Cron Task for Integration #{}".format(self.id))
+ code = "model.browse({})._cron_sync()".format(rec.id)
+ dict_data = {
+ "name": "Integrator Sync {}".format(rec.id),
+ "active": True,
+ "code": code,
+ "user_id": self.env.ref("base.user_root").id,
+ "model_id": self.env.ref("integrator.model_integrator_integration").id,
+ "interval_number": interval,
+ "interval_type": "minutes",
+ "numbercall": -1,
+ "doall": False,
+ "nextcall": (datetime.datetime.now() + datetime.timedelta(
+ minutes=interval)).strftime('%Y-%m-%d %H:%M:%S'),
+ "state": "code",
+ "priority": 1000,
+ }
+ cron = self.env["ir.cron"].sudo().with_user(SUPERUSER_ID).create(dict_data)
+
+ # Link them together
+ rec.write({"cron_id": cron.id})
+ return True
+
+ def _cron_sync(self):
+ return self.with_context(is_cron=True).sync()
+
+ def _is_cron_running(self, cron_id):
+ """ With this query we check if a cron is running.
+ The code is taken from the method _try_lock from ir_cron model.
+ We don't call the method directly to manage the exception.
+ """
+ try:
+ self._cr.execute(f"""
+ SELECT id
+ FROM "{cron_id._table}"
+ WHERE id = {cron_id.id}
+ FOR NO KEY UPDATE NOWAIT
+ """, log_exceptions=False)
+ except psycopg2.OperationalError:
+ self._cr.rollback() # early rollback to allow translations to work for the user feedback
+ raise UserError(_("This synchronization is currently being executed. "
+ "Please try again in a few minutes"))
+
+ def back_to_draft(self):
+ for rec in self:
+ rec.write({'state': 'draft'})
+
+ def test_and_confirm(self):
+ res = self.test_connection()
+ if res:
+ return res
+ self.write({'state': 'confirmed'})
+ self.script_line_ids.write({'state': 'pending', 'next_offset': False})
+ self.last_cron_execution = datetime.datetime.now()
+
+ def sync(self):
+ """ Sync for Odoo Integrations.
+ Corremos un script en cada corrida de cron. Cuando todos se corrieron seteamos fecha de ultima corrida
+ """
+ self.ensure_one()
+ if 'is_cron' not in self.env.context:
+ self._is_cron_running(self.cron_id)
+
+ if not self.state == 'confirmed':
+ return
+
+ _logger.info("Syncing Odoo Integration: Integration '%s'", self.name)
+
+ jobs = self.script_line_ids.filtered('active').sorted('sequence')
+ # si todo estan en pending estamos empezando una nueva corrida y los ponemos en cola
+ if all(j.state == 'pending' for j in jobs):
+ _logger.info("New sync execution, enqueuing tasks, integration '%s'", self.name)
+ jobs.write({'state': 'enqueued'})
+ self.last_sync_start = datetime.datetime.now()
+ next_job = jobs[0]
+ else:
+ next_job = jobs.filtered(lambda j: j.state == 'started') or jobs.filtered(
+ lambda j: j.state == 'enqueued')
+ if not next_job:
+ raise ValidationError(_('Error de programación, no hay proximo script para correr'))
+ next_job = next_job[0]
+
+ # corremos el proximo script
+ self.odoo_run_job(next_job)
+
+ # si todos se terminaron reseteamos estado 'pending' para nueva corrida y actualizamos fecha de ejecucion
+ if all(j.state == 'done' for j in jobs):
+ jobs.write({'state': 'pending'})
+ _logger.info('Finish sync execution, last cron execution to %s', self.last_sync_start)
+ self.last_cron_execution = self.last_sync_start
+ elif self._context.get('cron_id'):
+ self.env['ir.cron'].browse(self._context.get('cron_id'))._trigger()
+
+
+
+
+ # TODO: post error Odumbo channel
+ # Post errors
+ # errors = result.get('error')
+
+ # if errors:
+ # template = self.env.ref("integrator.integrator_logging_error_email_template")
+ # if template:
+ # template.send_mail(self.id, force_send=True)
+
+ def action_reset_last_sync(self):
+ """ Resets last sync date to allow sync everything again
+ """
+ self.last_cron_execution = False
+ self.script_line_ids.write({'state': 'pending', 'next_offset': False})
+
+ def test_connection(self):
+ """ Test for Odoo Integrations.
+ """
+ self.ensure_one()
+ user_call = True
+ if 'is_cron' in self.env.context:
+ user_call = False
+
+ if not self.script_line_ids.filtered('active'):
+ _logger.error("Integration %s. No active scripts found", self.id)
+ if user_call:
+ raise UserError(
+ "No hay scripts activos para la integración %s" % self.id)
+
+ try:
+ assert self.odoo_db2
+ assert self.script_line_ids
+ except Exception:
+ _logger.error(
+ "Integration {}: A required field is missing".format(self.id))
+ if user_call:
+ raise UserError("A required field is missing. Please verify.")
+
+ def odoo_run_job(self, job):
+ """ Run a single job """
+ def sync_model(
+ target_db, model_name, common_fields=[], datetime_fields=[],
+ boolean_fields=[], m2o_fields=[], m2m_fields=[], domain=[],
+ sort="id", offset=0, limit=100, target_model_name=None):
+ """ Helper method for allowing users to synchronize a model
+ across two different Odoo clients.
+ """
+
+ def get_external_id(model_name, rec_id):
+ """ Builds an external ID out of a model name and an ID.
+ """
+ return "__odumbo__.{}_{}".format(
+ model_name.replace(".", "_"), rec_id)
+
+ start_time = datetime.datetime.now()
+ _logger.debug("[sync_model] Synchronizing model '{}' with limit %s and offest %s".format(model_name, limit, offset))
+
+ # Remove dots from model name
+ norm_name = model_name.replace(".", "_")
+
+ # Performs a SEARCH operation
+ records = self.env[model_name].search(domain, order=sort, offset=offset, limit=limit)
+ if len(records) == 0:
+ _logger.info("[sync_model] No records were found in model %s meeting the given criteria", model_name)
+ return
+ _logger.info(
+ "[sync_model] Syncing %s records for model %s (limit %s, offset %s)",
+ len(records), model_name, limit, offset)
+
+ #####
+ # Create XML ids for odumbo for any record that has sane XML id on target db and source db (for eg. uoms)
+ # but only for XML ids created by modules (not __import__ or __export__)
+ #####
+
+ # Obtain all automatic XML ids (not imported, exported or nulls)
+ # Keep in mind many XML ids may point to the same record
+ search_fields = ["complete_name", "module", "res_id", "name"]
+ # TODO: se deber agregar sudo() para ir.model.data ??
+ moddata_read = self.env["ir.model.data"].search_read([
+ ("res_id", "in", records.ids),
+ ("module", "not in", ["__import__", "__export__"]),
+ ("model", "=", model_name)], search_fields)
+
+ # We can't search by complete_name since it's a computed field.
+ # If it wasn't, we would just do a recs.mapped("complente_name")
+ # and search for these recs in the target DB
+ if moddata_read:
+ _logger.debug("Found {} records with existing XML ID".format(len(moddata_read)))
+ # Fields we need to bring
+ # Read necessary data from source DB
+ _logger.debug("moddata_read: {}".format(moddata_read))
+
+ # Now build a dict keyed by the complete_name
+ moddata_dict = {}
+ for item in moddata_read:
+ moddata_dict[item["complete_name"]] = item
+
+ _logger.debug("moddata_dict: {}".format(moddata_dict))
+
+ # Now we search for all these records that match our model,
+ # all names, and all modules
+ # TODO: se deber agregar sudo() para ir.model.data ??
+ target_read = target_db.env["ir.model.data"].search_read([
+ ("module", "in", list(set([item["module"] for item in moddata_read]))),
+ ("name", "in", [item["name"] for item in moddata_read]),
+ ("model", "=", target_model_name or model_name)], search_fields)
+
+ _logger.debug("taget_read: {}".format(target_read))
+
+ # Prepare export data
+ data = []
+ for tr in target_read:
+ # If remote key exists in source database
+ if tr["complete_name"] in moddata_dict:
+ # Create a odumbo xmlid
+ name = "{}_{}".format(
+ norm_name,
+ moddata_dict[tr["complete_name"]]["res_id"])
+ module = "__odumbo__"
+ res_id = tr["res_id"]
+ data.append([name, target_model_name or model_name, module, res_id])
+
+ _logger.debug("data: {}".format(data))
+
+ # TODO ver de mejorar, actualmente esto nos está dando error en futuras iteraciones pero
+ # no lo devolvemos, baicamente porque esto lo importamos sin external id y queremos importar external ids
+ # de nuevo y ya existen. Tal vez habria que aprovechar a revisar toda esta lociga, tal vez usar
+ # export_data? y mapear por name o por ese modulo de oca que permite definir otros criterios?
+ if data:
+ # TODO: se deber agregar sudo() para ir.model.data ??
+ outcome = target_db.env["ir.model.data"].load(
+ ["name", "model", "module", "res_id"], data)
+
+ _logger.debug(outcome)
+ #####
+ # Finish creating XML ids for modules data
+ #####
+ # Perform READ operation a single time
+ _logger.debug("[sync_model] Reading remote data for model %s", model_name)
+ all_fields = common_fields + boolean_fields + datetime_fields + m2o_fields + m2m_fields
+ items = records.read(all_fields)
+
+ _logger.debug("[sync_model] Adapting data previous to import for model %s", model_name)
+ # Create a mapping between provided fields and converted fields
+ fields_map = dict()
+ for field in all_fields:
+ if field in m2o_fields or field in m2m_fields:
+ fields_map[field] = field + "/id"
+ else:
+ fields_map[field] = field
+
+ # Collect all values for loading in a single operation
+ values = list()
+
+ for item in items:
+ # Convert to string boolean fields (So that load don't fail because we search_read and load expects
+ # different kind of data)
+ for boolean_field in (boolean_fields + datetime_fields) or []:
+ if item[boolean_field]:
+ item[boolean_field] = str(item[boolean_field])
+
+ # Generate XML ids for M2O Fields
+ for m2o_field in m2o_fields or []:
+ if item[m2o_field]:
+ rec_id = item[m2o_field][0]
+ # esta operacion no tiene ejecuta ninguna ninguna llamada rpc, lo resuelve localmente odooly
+ # rel_model_name = item[m2o_field]._model._name
+ rel_model_name = self.env['ir.model.fields'].search([('name', '=', m2o_field), ('model', '=', model_name)], limit=1).relation
+ item[m2o_field] = get_external_id(rel_model_name, rec_id)
+ else:
+ item[m2o_field] = False
+
+ # Generate XML ids for M2M Fields
+ for m2m_field in m2m_fields or []:
+ if item[m2m_field]:
+ rec_ids = item[m2m_field]
+ # rel_model_name = item[m2m_field]._model._name
+ rel_model_name = self.env['ir.model.fields'].search([('name', '=', m2m_field), ('model', '=', model_name)], limit=1).relation
+ item[m2m_field] = ','.join([get_external_id(rel_model_name, rec_id) for rec_id in rec_ids])
+ else:
+ item[m2m_field] = False
+
+ item["id"] = get_external_id(model_name, item["id"])
+
+ # List comprehension will create a list with "sorted" values
+ result = [item[field] for field in all_fields]
+ values.append(result)
+
+ # Prevent templates from generating a product by themselves
+ with_context = dict()
+ if model_name == 'product.template':
+ # YES, True means False
+ with_context["create_product_product"] = True
+ with_context["tracking_disable"] = True
+ # key that can be used on custom modules to disable constrains or change any behaviour that
+ # could help to speed up process
+ with_context["odumbo_sync"] = True
+ # for compatibility with new v13 bypass
+ with_context["bypass_base_automation"] = True
+
+ _logger.debug("[sync_model] Loading records on target db for model %s", model_name)
+
+ outcome = target_db.env[target_model_name or model_name].with_context(
+ **with_context).load(
+ [fields_map[field] for field in all_fields], values)
+
+ if outcome.get('messages'):
+ for count, msg in enumerate(outcome['messages']):
+ record = msg['record']
+ try:
+ record_id = values[record][all_fields.index('id')]
+ errors.append('[Error %s] %s: %s.' % (count + 1, record_id, msg['message']))
+ except ValueError:
+ errors.append('[Error %s] line %s: "%s".\n' % (count + 1, record, msg['message']))
+
+ errors.append('Error completo: %s' % (outcome.get('messages')))
+ elif not outcome.get('ids'):
+ errors.append('Error no atrapado al sincronizar "%s". No recibimos ni messages ni ids. Recibimos %s' % (
+ model_name, outcome))
+ else:
+ ok.append('Sincronizados "%s" elementos en "%s"' % (model_name, len(outcome.get('ids'))))
+ _logger.info(
+ "[sync_model] Finish syncking records for model %s on %s hours", model_name,
+ str((datetime.datetime.now() - start_time)))
+ _logger.info(
+ "[sync_model] Finish WITH ERRORS syncking records for model %s on %s hours", model_name,
+ str((datetime.datetime.now() - start_time)))
+
+ self.ensure_one()
+ errors = []
+ ok = []
+
+ _logger.info("Running job: '%s'", job.script_id.name)
+ job_start = datetime.datetime.now()
+ if job.state != 'started':
+ job.state = 'started'
+
+ # The following libraries and variables
+ # will be available for any job.
+ locals_dict = {
+ "self": self,
+ "db2": self.odoo_db2._odoo_get_client(),
+ "last_cron_execution": self.last_cron_execution,
+ "last_sync_start": self.last_sync_start,
+ "offset": job.next_offset,
+ "Warning": UserError,
+ "context": dict(self._context),
+ "datetime": safe_eval.datetime,
+ "dateutil": safe_eval.dateutil,
+ "timezone": safe_eval.pytz,
+ "time": safe_eval.time,
+ "errors": errors,
+ "ok": ok,
+ "sync_model": sync_model,
+ }
+
+ try:
+ safe_eval.safe_eval(job.script_id.code, locals_dict, mode="exec", nocopy=True)
+ except Exception as e:
+ errors.append(repr(e))
+
+ if errors:
+ # si hay errores ponemos mensaje y pasamos a borrador
+ if 'is_cron' not in self.env.context:
+ raise ValidationError('Error al sincronizar, esto es lo que obtuvimos:\n%s' % '\n\n'.join(errors))
+ elif self.error_count < MAX_RETRIES:
+ self.error_count += 1
+ _logger.warning('Error (%s try) found while running job "%s" (integration %s)',
+ self.error_count, job.script_id.name, self.name)
+ else:
+ self.sudo().message_post(
+ subtype_id=self.env.ref('integrator.mt_error').id,
+ body='Error al sincroniazar, se vuelve a borrador la integracion. Esto es lo que obtuvimos: %s' % (
+ '
'.join(errors)))
+ job.state = 'failed'
+ _logger.warning(
+ 'Error (last try) found while running job "%s" (integration %s), check chatter for more info',
+ job.script_id.name, self.name)
+ self.back_to_draft()
+ return
+ elif ok:
+ # si hay mensajes de ok quiere decir que algo se sincronizo, si hay limite cambiamos limite y seguimos para
+ # que lo tome proxima corrida
+ message = """
+
Job run '{}' finish sucessfully on {}
+ {}
+ """.format(job.script_id.name, str(datetime.datetime.now() - job_start), html.escape(str(ok)))
+ self.sudo().message_post(body=message)
+ self.error_count = 0
+
+ limit = locals_dict.get('limit', False)
+ if limit:
+ next_offset = job.next_offset + limit
+ _logger.info('Setting new offset = %s for next iteration', next_offset)
+ job.next_offset = next_offset
+ return
+ # si no hay limite o no no hay resultados, parcamos realizada y reseteamos offset
+ _logger.info('Finish sync execution, cleaning offset and setting job done')
+ job.write({'state': 'done', 'next_offset': False})
+
+ def action_add_odoo_account(self):
+ wiz_vals = {
+ 'partner_id': self.partner_id.id,
+ 'integration_id': self.id,
+ }
+ wiz = self.env['integrator.account.wizard'].create(wiz_vals)
+ return {
+ 'name': 'New Odoo Account',
+ 'view_mode': 'form',
+ 'res_model': 'integrator.account.wizard',
+ 'res_id': wiz.id,
+ 'type': 'ir.actions.act_window',
+ 'target': 'new',
+ }
diff --git a/integrator/models/integrator_integration_script.py b/integrator/models/integrator_integration_script.py
new file mode 100644
index 00000000..1e8d19bc
--- /dev/null
+++ b/integrator/models/integrator_integration_script.py
@@ -0,0 +1,53 @@
+import logging
+from odoo import models, fields, api
+from odoo.exceptions import UserError
+from odoo.tools.safe_eval import test_expr, _SAFE_OPCODES
+
+
+_logger = logging.getLogger(__name__)
+
+DEFAULT_CODE = """# Available Context:
+# - db2: Odoo Remote Client Database
+# - Warning: Use "raise Warning('text')" to show a dialog box with the given
+# text as parameter. Useful for debugging the script before activating the
+# integration.
+# - last_cron_execution: A datetime object with the last successful execution
+# of the script.
+# - time, datetime, dateutil, timezone: Useful Python libraries
+# - context: A dictionary with information about current user, timezone, etc
+# - sync_model(target_db, model_name, common_fields=None, boolean_fields=None, m2o_fields=None, m2m_fields=None, domain=[], sort="id", offset=0, limit=None, target_model_name=None)
+#
+# If you want to log the script's result as a message, assign something
+# different than False to the "result" variable.
+result = False
+"""
+
+
+class IntegratorIntegrationScript(models.Model):
+
+ _name = "integrator.integration.script"
+ _description = "integrator.integration.script"
+
+ name = fields.Char(required=True)
+ code = fields.Text(default=DEFAULT_CODE, required=True)
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ for values in vals_list:
+ if "code" in values:
+ self._script_test(code=values["code"])
+ res = super().create(values)
+ return res
+
+ def write(self, values):
+ if "code" in values:
+ self._script_test(code=values["code"])
+ res = super().write(values)
+ return res
+
+ def _script_test(self, code):
+ try:
+ test_expr(code, _SAFE_OPCODES, mode="exec")
+ except Exception as e:
+ raise UserError(e)
+ return True
diff --git a/integrator/models/integrator_integration_script_line.py b/integrator/models/integrator_integration_script_line.py
new file mode 100644
index 00000000..6f511191
--- /dev/null
+++ b/integrator/models/integrator_integration_script_line.py
@@ -0,0 +1,31 @@
+from odoo import api, models, fields
+
+
+class IntegratorScriptLine(models.Model):
+ # TODO rename to job everywhere
+ _name = "integrator.integration.script_line"
+ _description = "Odoo Integration Job"
+
+ active = fields.Boolean(default=True)
+ sequence = fields.Integer()
+ state = fields.Selection([
+ ('pending', 'Pending'), ('enqueued', 'Enqueued'), ('started', 'Started'),
+ ('done', 'Done'), ('failed', 'Failed')], required=True, default='pending')
+ next_offset = fields.Integer()
+ integration_id = fields.Many2one(
+ "integrator.integration", string="Integration", ondelete="cascade",
+ required=True)
+ script_id = fields.Many2one(
+ "integrator.integration.script", string="Script", ondelete="restrict",
+ required=True, context={'active_test': False})
+
+ _sql_constraints = [
+ ('script_line_unique', 'unique(integration_id, script_id)',
+ "You can't add the same script twice")
+ ]
+
+ @api.depends('integration_id')
+ def odoo_run_script_now(self):
+ self.ensure_one()
+ self.integration_id.odoo_run_job(self)
+ return True
diff --git a/integrator/security/integrator_security.xml b/integrator/security/integrator_security.xml
new file mode 100644
index 00000000..1aa31793
--- /dev/null
+++ b/integrator/security/integrator_security.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+ Integrator
+ Integrator
+ 1
+
+
+
+ User
+
+
+
+
+ Manager
+
+
+
+
+
+
diff --git a/integrator/security/ir.model.access.csv b/integrator/security/ir.model.access.csv
new file mode 100644
index 00000000..20e5f6de
--- /dev/null
+++ b/integrator/security/ir.model.access.csv
@@ -0,0 +1,9 @@
+id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
+access_integrator_account_manager,integrator.account.manager,model_integrator_account,group_manager,1,1,1,1
+access_integrator_account_user,integrator.account.user,model_integrator_account,group_user,1,1,0,0
+access_integrator_integration_manager,integrator.integration.manager,model_integrator_integration,group_manager,1,1,1,1
+access_integrator_integration_user,integrator.integration.user,model_integrator_integration,group_user,1,1,0,0
+access_integrator_integration_script_manager,integrator.integration.script.manager,model_integrator_integration_script,group_manager,1,1,1,1
+access_integrator_integration_script_user,integrator.integration.script.user,model_integrator_integration_script,group_user,1,1,0,0
+access_integrator_integration_script_line_manager,integrator.integration.script.line.manager,model_integrator_integration_script_line,group_manager,1,1,1,1
+access_integrator_integration_script_line_user,integrator.integration.script.line.user,model_integrator_integration_script_line,group_user,1,1,0,0
diff --git a/integrator/static/description/icon.png b/integrator/static/description/icon.png
new file mode 100644
index 00000000..08289347
Binary files /dev/null and b/integrator/static/description/icon.png differ
diff --git a/integrator/views/integrator_account_views.xml b/integrator/views/integrator_account_views.xml
new file mode 100644
index 00000000..e0485f44
--- /dev/null
+++ b/integrator/views/integrator_account_views.xml
@@ -0,0 +1,76 @@
+
+
+
+
+ integrator.account.select
+ integrator.account
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ integrator.account.form
+ integrator.account
+
+
+
+
+
+
+
+ integrator.account.tree
+ integrator.account
+
+
+
+
+
+
+
+
+
+ Accounts
+ integrator.account
+ tree,form
+
+
+
+
+
+
diff --git a/integrator/views/integrator_integration_script_views.xml b/integrator/views/integrator_integration_script_views.xml
new file mode 100644
index 00000000..a1ab0e82
--- /dev/null
+++ b/integrator/views/integrator_integration_script_views.xml
@@ -0,0 +1,58 @@
+
+
+
+
+ integrator.integration.script.select
+ integrator.integration.script
+
+
+
+
+
+
+
+
+
+ integrator.integration.script.form
+ integrator.integration.script
+
+
+
+
+
+
+
+ integrator.integration.script.tree
+ integrator.integration.script
+
+
+
+
+
+
+
+
+ Scripts
+ integrator.integration.script
+ tree,form
+
+
+
+
+
+
diff --git a/integrator/views/integrator_integration_views.xml b/integrator/views/integrator_integration_views.xml
new file mode 100644
index 00000000..f9ba9f2c
--- /dev/null
+++ b/integrator/views/integrator_integration_views.xml
@@ -0,0 +1,112 @@
+
+
+
+
+ integrator.integration.select
+ integrator.integration
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ integrator.integration.form
+ integrator.integration
+
+
+
+
+
+
+
+ integrator.integration.tree
+ integrator.integration
+
+
+
+
+
+
+
+
+
+
+ Integrations
+ integrator.integration
+ tree,form
+
+
+
+
+
+
diff --git a/integrator/views/ir_ui_menuitem.xml b/integrator/views/ir_ui_menuitem.xml
new file mode 100644
index 00000000..ac24a765
--- /dev/null
+++ b/integrator/views/ir_ui_menuitem.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
diff --git a/integrator/views/portal_my_account_views.xml b/integrator/views/portal_my_account_views.xml
new file mode 100644
index 00000000..1bfc36e1
--- /dev/null
+++ b/integrator/views/portal_my_account_views.xml
@@ -0,0 +1,16 @@
+
+
+
+ My Account
+ /my/home
+ self
+
+
+
+