From ce89058e6b018e8d26819bb2e037332e4bed625e Mon Sep 17 00:00:00 2001 From: augusto-weiss Date: Thu, 14 Nov 2024 16:09:32 -0300 Subject: [PATCH 1/4] [IMP] integrator: new module --- integrator/__init__.py | 4 + integrator/__manifest__.py | 64 ++ integrator/data/ir_server_action.xml | 14 + integrator/models/__init__.py | 4 + integrator/models/integrator_account.py | 114 +++ integrator/models/integrator_integration.py | 712 ++++++++++++++++++ .../models/integrator_integration_script.py | 60 ++ .../integrator_integration_script_line.py | 31 + integrator/security/integrator_security.xml | 23 + integrator/security/ir.model.access.csv | 9 + integrator/static/description/icon.png | Bin 0 -> 3765 bytes integrator/views/integrator_account_views.xml | 76 ++ .../integrator_integration_script_views.xml | 59 ++ .../views/integrator_integration_views.xml | 119 +++ integrator/views/ir_ui_menuitem.xml | 18 + integrator/views/portal_my_account_views.xml | 19 + 16 files changed, 1326 insertions(+) create mode 100644 integrator/__init__.py create mode 100644 integrator/__manifest__.py create mode 100644 integrator/data/ir_server_action.xml create mode 100644 integrator/models/__init__.py create mode 100644 integrator/models/integrator_account.py create mode 100644 integrator/models/integrator_integration.py create mode 100644 integrator/models/integrator_integration_script.py create mode 100644 integrator/models/integrator_integration_script_line.py create mode 100644 integrator/security/integrator_security.xml create mode 100644 integrator/security/ir.model.access.csv create mode 100644 integrator/static/description/icon.png create mode 100644 integrator/views/integrator_account_views.xml create mode 100644 integrator/views/integrator_integration_script_views.xml create mode 100644 integrator/views/integrator_integration_views.xml create mode 100644 integrator/views/ir_ui_menuitem.xml create mode 100644 integrator/views/portal_my_account_views.xml diff --git a/integrator/__init__.py b/integrator/__init__.py new file mode 100644 index 00000000..0332d022 --- /dev/null +++ b/integrator/__init__.py @@ -0,0 +1,4 @@ +# from .patch import * +from . import models +# from . import controllers +# from . import wizards diff --git a/integrator/__manifest__.py b/integrator/__manifest__.py new file mode 100644 index 00000000..4986b7c7 --- /dev/null +++ b/integrator/__manifest__.py @@ -0,0 +1,64 @@ +############################################################################## +# +# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) +# All Rights Reserved. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## +{ + 'name': 'Odoo Integrator', + 'version': "16.0.1.0.0", + 'category': 'SaaS', + 'author': 'ADHOC SA', + 'website': 'www.adhoc.com.ar', + 'license': 'AGPL-3', + 'depends': [ + 'base', 'mail', + ], + # Lo estamos agregando a la imagen ?? + # 'external_dependencies': { + # 'python': [ + # 'odooly', + # ], + # }, + 'data': [ + # 'data/mail_message_subtype_data.xml', + 'data/ir_server_action.xml', + # 'data/mail_templates.xml', + 'security/integrator_security.xml', + 'security/ir.model.access.csv', + 'views/ir_ui_menuitem.xml', + 'views/integrator_account_views.xml', + 'views/integrator_integration_views.xml', + 'views/integrator_integration_script_views.xml', + # 'data/log_cleaning_cron.xml', + # 'data/integration_type_data.xml', + # 'wizards/integrator_account_wizard_view.xml', + ], + # Este asset lo agregamos para poder usar html en las notificaciones usando el tag odumbo_notification + # En versiones futuras revisar si sigue siendo necesario, si no pasar a display_notification + # 'assets': { + # 'web.assets_backend': [ + # 'integrator/static/src/webclient/actions/client_action.js', + # ] + # }, + # Esto no lo agrego porque entiendo ya lo hacemos en repo de odooly + # 'post_load': 'patch_odooly', + 'installable': True, + 'auto_install': False, + 'application': True, + 'demo': [ + ], +} diff --git a/integrator/data/ir_server_action.xml b/integrator/data/ir_server_action.xml new file mode 100644 index 00000000..2ad89150 --- /dev/null +++ b/integrator/data/ir_server_action.xml @@ -0,0 +1,14 @@ + + + + + Adelantar ejecución de cron + + ir.actions.server + code + +record.cron_id.sudo().write({'nextcall': datetime.datetime.now()}) + + + + diff --git a/integrator/models/__init__.py b/integrator/models/__init__.py new file mode 100644 index 00000000..a00768b4 --- /dev/null +++ b/integrator/models/__init__.py @@ -0,0 +1,4 @@ +from . import integrator_account +from . import integrator_integration_script +from . import integrator_integration +from . import integrator_integration_script_line diff --git a/integrator/models/integrator_account.py b/integrator/models/integrator_account.py new file mode 100644 index 00000000..84c58302 --- /dev/null +++ b/integrator/models/integrator_account.py @@ -0,0 +1,114 @@ +from odoo import api, models, fields, _ +from odoo.exceptions import UserError +from odooly import Client +import logging +_logger = logging.getLogger(__name__) + + +class IntegratorAccount(models.Model): + + _name = 'integrator.account' + _inherit = ['mail.composer.mixin', 'mail.thread', 'mail.activity.mixin'] + _description = 'Integration Account' + _mailing_enabled = True + + name = fields.Char(required=True, tracking=True, store=True, compute='_compute_name', default=lambda self: _('New')) + odoo_hostname = fields.Char("Hostname", required=True, tracking=True) + odoo_db_name = fields.Char("Database Name", required=True, tracking=True) + odoo_user = fields.Char("Username or E-Mail", required=True, tracking=True) + odoo_password = fields.Char("Password", required=True,) + state = fields.Selection([('draft', 'Draft'), ('confirmed', 'Confirmed')], copy=False, default='draft', + required=True, tracking=True) + channel_alias = fields.Char('Alias', default=False) + + @api.depends('odoo_db_name') + def _compute_name(self): + for rec in self: + rec.name = rec.odoo_db_name + + def back_to_draft(self): + self.write({'state': 'draft'}) + + def test_and_confirm(self): + self.test_connection() + self.write({'state': 'confirmed'}) + + # def test_connection(self): + # for rec in self: + # _logger.info("Testing Connection on '%s'" % (rec.name)) + # if hasattr(rec, '%s_test_connection' % rec.account_type): + # getattr(rec, '%s_test_connection' % rec.account_type)() + # else: + # _logger.warning("Account '%s' has no test method!" % + # rec.account_type) + + def test_connection(self): + """ Odoo Connection Test. + Returns True if successful. + Raises a UserError otherwise. + """ + self.ensure_one() + try: + # Attempt to get client + client = self._odoo_get_client() + except Exception as e: + raise UserError("Unable to connect to Odoo. " + "The server responded: {}".format(str(e))) + # Make sure version is correct + self._odoo_ensure_version(client) + # Notify Success + result = "Connection with Odoo was successful!" + return { + 'type': 'ir.actions.client', + 'tag': 'display_notification', + 'params': { + 'title': _('Success'), + 'type': 'success', + 'message': result, + 'sticky': False, + } + } + + def _odoo_get_client(self): + self.ensure_one() + try: + return Client( + # Use JSONRPC to prevent error when server responds with None + self.odoo_hostname.strip("/") + "/jsonrpc", + db=self.odoo_db_name, + user=self.odoo_user, + password=self.odoo_password, + ) + except Exception as e: + raise UserError("Unable to Connect to Database. Error: %s" % e) + + def _odoo_ensure_version(self, client): + """ Makes sure Odoo version is supported + """ + odoo_version = int(client.server_version.split(".")[0]) + if odoo_version < 13: + raise UserError( + "The Odoo version on the remote system is not supported. " + "Please upgrade to v13.0 or higher") + return True + + # def _get_alias(self): + # if not self.channel_alias: + # odoo = self._odoo_get_client() + # channel = odoo.env.ref("__integrator__.odumbo_channel") + # channel_id = channel.id if channel else False + # if not channel: + # fields = ['id', 'name', 'description'] + # data = ["__integrator__.odumbo_channel", 'Odumbo', 'Errors communicated by Odumbo integrations'] + # channel = odoo.env['mail.channel'].load(fields, [data]) + # channel_id = channel.get('ids')[0] + + # alias = odoo.env.ref("__integrator__.odumbo_channel_alias") + # if not alias: + # fields = ['id', 'alias_name', 'alias_model_id/.id', 'alias_force_thread_id'] + # data = ["__integrator__.odumbo_channel_alias", 'odumbo', odoo.env.ref("mail.model_mail_channel").id, channel_id] + # alias = odoo.env['mail.alias'].load(fields, [data]) + # alias_id = alias.get('ids')[0] + # alias = odoo.env['mail.alias'].browse(alias_id) + # self.channel_alias = alias.name_get()[1] + # return self.channel_alias diff --git a/integrator/models/integrator_integration.py b/integrator/models/integrator_integration.py new file mode 100644 index 00000000..e390e957 --- /dev/null +++ b/integrator/models/integrator_integration.py @@ -0,0 +1,712 @@ +from odoo import models, fields, api, _, SUPERUSER_ID +from odoo.exceptions import UserError, ValidationError +from odoo.tools import safe_eval +import dateutil +import datetime +import time +import pytz +import logging +import html +from lxml.html import fromstring +import html2text +from dateutil.relativedelta import relativedelta +import random +from odoo.netsvc import ColoredFormatter, RESET_SEQ, COLOR_SEQ, COLOR_PATTERN +import psycopg2 + + +_logger = logging.getLogger(__name__) + +MAX_RETRIES = 5 + + +class IntegratorIntegration(models.Model): + + _name = 'integrator.integration' + _inherit = ['mail.composer.mixin', 'mail.thread', 'mail.activity.mixin'] + _description = 'Integrator Integration' + _mailing_enabled = True + + name = fields.Char(required=True, tracking=True, store=True, compute='_compute_name', default=lambda self: _('New')) + # sync_number = fields.Char('Synchronization Number', copy=False, index=True, default='NaN') + # version = fields.Selection([ + # ('stable', 'Stable'), + # ('development', 'Development')], + # tracking=True, required=True, default='stable', states={'confirmed': [('readonly', True)]}) + # integration_type_id = fields.Many2one('integration.type', required=True) + # is_odoo_odoo = fields.Boolean(compute='_compute_is_odoo_odoo', store=True) + # partner_id = fields.Many2one( + # 'res.partner', string='Partner', tracking=True, + # ondelete='restrict', default=lambda self: self.env.user.partner_id) + # commercial_partner_id = fields.Many2one( + # 'res.partner', string='Commercial Entity', compute_sudo=True, + # related='partner_id.commercial_partner_id', store=True, readonly=True,) + state = fields.Selection([('draft', 'Draft'), + ('confirmed', 'Confirmed')], + copy=False, default='draft', required=True, + tracking=True) + cron_id = fields.Many2one('ir.cron', 'Cron Task', + ondelete='restrict', copy=False) + last_sync_start = fields.Datetime( + help='Campo auxiliar utilizado para almacenar el momento el en cual inicia la sincro para que, si todo termina' + ' bien, sea esta fecha la que se setea como ultima fecha de sincron (para no perder cosas que se hayan ' + 'actualizado entre el momento de incio de ejecución de y fin, y tmb por el procesamiento en baches)') + last_cron_execution = fields.Datetime(string="Last Execution") + cron_nextcall = fields.Datetime(related='cron_id.nextcall', string="Next Call Execution") + + # Odoo2odoo Specific Fields + + # odoo_db1 = fields.Many2one( + # "integrator.account", string="Odoo Database 1", + # states={'confirmed': [('readonly', True)]}, + # tracking=True, + # domain="[('account_type', '=', 'odoo'), ('commercial_partner_id', '=', commercial_partner_id), ('state', '!=', 'draft')]", + # context={'default_account_type': 'odoo'}) + odoo_db2 = fields.Many2one( + "integrator.account", string="Remote Db", + required=True, + states={'confirmed': [('readonly', True)]}, + tracking=True, + domain="[('state', '!=', 'draft')]",) + script_line_ids = fields.One2many( + "integrator.integration.script_line", "integration_id", + string="Scripts", + copy=True, context={'active_test': False}) + error_count = fields.Integer() + active = fields.Boolean("Active", default=True) + + # @api.depends('integration_type_id') + # def _compute_is_odoo_odoo(self): + # for rec in self: + # rec.is_odoo_odoo = rec.integration_type_id.application == 'odoo' + + # def _get_sync(self, sandbox_mode=False, context={}): + # if self.odoo_db2: + # odoo2 = self.odoo_db2._odoo_get_client() + # ctx = self._get_sync_context() + # ctx.update(context) + # return Sync(odoo, odoo2, sandbox_mode, ctx) if self.version == 'stable' else SyncDev(odoo, odoo2, sandbox_mode, ctx) + + # def _get_sync_context(self): + # self.ensure_one() + # fields = [ + # 'id', + # 'sync_number', + # ] + # ctx = self.read(fields)[0] + # ctx.update(user_call='is_cron' not in self.env.context) + # return ctx + + @api.model_create_multi + def create(self, vals_list): + res = super().create(vals_list) + # res._create_sequence() + res._create_cron() + return res + + @api.depends('odoo_db2', 'odoo_db2.name') + def _compute_name(self): + for rec in self: + rec.name = _("%s ~ %s") % (self.env.cr.dbname, rec.odoo_db2.name) + + def write(self, vals): + for rec in self: + if 'active' in vals and not vals['active'] and rec.state != 'draft': + raise UserError(_("You cannot archive integrations that are currently active. First stop the active integration so you can archive it.")) + # if 'active' in vals and rec.state == 'draft': + # logs = rec.env["integrator.integration.logging"].with_context({'active_test': False}).search([('integration_id', '=', rec.id)]) + # if vals['active']: + # for log in logs: + # log.action_unarchive() + # else: + # for log in logs: + # log.action_archive() + res = super().write(vals) + return res + + def unlink(self): + """ Deletes the integration, then the associated cron task and sequence. + """ + if any(rec.state != 'draft' for rec in self): + raise UserError(_('You cannot delete integrations that are currently active. First stop the active integration so you can delete it.')) + + # # Unlink logs + # self.env["integrator.integration.logging"].search([('integration_id', 'in', self.ids)]).unlink() + + # Unlink sequence + # self.env['ir.sequence'].sudo().search([('code', 'in', ["integration.sync.%s" % id for id in self.ids])]).unlink() + + cron_ids = self.cron_id.ids + + result = super().unlink() + + # Unlink asociated crons + self.env["ir.cron"].sudo().browse(cron_ids).unlink() + + return result + + # def _create_sequence(self): + # for rec in self: + # if not self.env['ir.sequence'].sudo().search([('code', '=', 'integration.sync.%s' % rec.id)], limit=1): + # self.env['ir.sequence'].sudo().create({ + # 'name': "Integrator Sync %s" % rec.id, + # 'code': "integration.sync.%s" % rec.id, + # 'prefix': '#', + # 'padding': 6, + # }) + # return True + + def _create_cron(self, interval=30): + """ Create a cron task associated to the given records without one + """ + for rec in self: + if not rec.cron_id: + # Create a specific cron task for this integration + _logger.info( + "Creating Cron Task for Integration #{}".format(self.id)) + code = "model.browse({})._cron_sync()".format(rec.id) + dict_data = { + "name": "Integrator Sync {}".format(rec.id), + "active": True, + "code": code, + "user_id": self.env.ref("base.user_root").id, + "model_id": self.env.ref("integrator.model_integrator_integration").id, + "interval_number": interval, + "interval_type": "minutes", + "numbercall": -1, + "doall": False, + "nextcall": (datetime.datetime.now() + datetime.timedelta( + minutes=interval)).strftime('%Y-%m-%d %H:%M:%S'), + "state": "code", + "priority": 1000, + } + cron = self.env["ir.cron"].sudo().with_user(SUPERUSER_ID).create(dict_data) + + # Link them together + rec.write({"cron_id": cron.id}) + return True + + def _cron_sync(self): + return self.with_context(is_cron=True).sync() + + def _is_cron_running(self, cron_id): + """ With this query we check if a cron is running. + The code is taken from the method _try_lock from ir_cron model. + We don't call the method directly to manage the exception. + """ + try: + self._cr.execute(f""" + SELECT id + FROM "{cron_id._table}" + WHERE id = {cron_id.id} + FOR NO KEY UPDATE NOWAIT + """, log_exceptions=False) + except psycopg2.OperationalError: + self._cr.rollback() # early rollback to allow translations to work for the user feedback + raise UserError(_("This synchronization is currently being executed. " + "Please try again in a few minutes")) + + # @api.model + # def _gc_integration_logs(self): + # Logs = self.env['integrator.integration.logging'] + # logs_to_archive = Logs + # logs_to_remove = Logs + + # logs_to_archive |= Logs.search([ + # ('create_date', '<', fields.Datetime.now() - relativedelta(days=15)), + # ]) + # logs_to_remove |= Logs.with_context({'active_test': False}).search([ + # ('create_date', '<', fields.Datetime.now() - relativedelta(days=30)), + # ]) + # logs_to_archive.action_archive() + # logs_to_remove.unlink() + + def back_to_draft(self): + for rec in self: + rec.write({'state': 'draft'}) + + def test_and_confirm(self): + res = self.test_connection() + if res: + return res + self.write({'state': 'confirmed'}) + self.script_line_ids.write({'state': 'pending', 'next_offset': False}) + self.last_cron_execution = datetime.datetime.now() + + # def test(self): + # for rec in self: + # if hasattr(rec, '%s_test' % rec.integration_type_id.application): + # return getattr(rec, '%s_test' % rec.integration_type_id.application)() + # else: + # _logger.warning( + # "Integration '%s' has no %s_test method!" % + # (rec.name, rec.integration_type_id.application)) + + def sync(self): + """ Sync for Odoo Integrations. + Corremos un script en cada corrida de cron. Cuando todos se corrieron seteamos fecha de ultima corrida + """ + self.ensure_one() + if 'is_cron' not in self.env.context: + self._is_cron_running(self.cron_id) + + if not self.state == 'confirmed': + return + + _logger.info("Syncing Odoo Integration: Integration '%s'", self.name) + + jobs = self.script_line_ids.filtered('active').sorted('sequence') + # si todo estan en pending estamos empezando una nueva corrida y los ponemos en cola + if all(j.state == 'pending' for j in jobs): + _logger.info("New sync execution, enqueuing tasks, integration '%s'", self.name) + jobs.write({'state': 'enqueued'}) + self.last_sync_start = datetime.datetime.now() + next_job = jobs[0] + else: + next_job = jobs.filtered(lambda j: j.state == 'started') or jobs.filtered( + lambda j: j.state == 'enqueued') + if not next_job: + raise ValidationError(_('Error de programación, no hay proximo script para correr')) + next_job = next_job[0] + + # corremos el proximo script + self.odoo_run_job(next_job) + + # si todos se terminaron reseteamos estado 'pending' para nueva corrida y actualizamos fecha de ejecucion + if all(j.state == 'done' for j in jobs): + jobs.write({'state': 'pending'}) + _logger.info('Finish sync execution, last cron execution to %s', self.last_sync_start) + self.last_cron_execution = self.last_sync_start + elif self._context.get('cron_id'): + self.env['ir.cron'].browse(self._context.get('cron_id'))._trigger() + + # def _format_summary_sync_msg(self, result): + # if result['info'] or result['warning'] or result['error']: + # return _( + # """ + #

Synchronization {sync_number}

+ #

Summary of updated data:

+ # + #

Check all the log lines here View logs. + # """).format( + # sync_number=self.sync_number, + # info=len(result['info']), + # warnings=len(result['warning']), + # errors=len(result['error']), + # ) + # else: + # return _( + # """ + #

Synchronization {sync_number}

+ #

No changes were made on the synchronized accounts.

+ # """).format(sync_number=self.sync_number) + + # def _log_sync_result(self, result): + # ''' Create integration logs from results. + # Expected format: { + # 'info': [msg], + # 'warning': [msg], + # 'error': [msg], + # 'metrics': {} + # } + # ''' + # if 'metrics' in result: + # del(result['metrics']) + # # Create logs + # log_vals = [] + # for type, messages in result.items(): + # for msg in messages: + # log_vals.append({ + # 'integration_id': self.id, + # 'sync_number': self.sync_number, + # 'log_type': type, + # 'message': msg, + # }) + # self.env['integrator.integration.logging'].sudo().create(log_vals) + # # Create a summary of changes + # message = self._format_summary_sync_msg(result) + # self.sudo().message_post(subtype_id=self.env.ref('integrator.mt_log').id, body=message) + + # TODO: post error Odumbo channel + # Post errors + # errors = result.get('error') + + # if errors: + # template = self.env.ref("integrator.integrator_logging_error_email_template") + # if template: + # template.send_mail(self.id, force_send=True) + + def action_reset_last_sync(self): + """ Resets last sync date to allow sync everything again + """ + self.last_cron_execution = False + self.script_line_ids.write({'state': 'pending', 'next_offset': False}) + + def test_connection(self): + """ Test for Odoo Integrations. + """ + self.ensure_one() + user_call = True + if 'is_cron' in self.env.context: + user_call = False + + if not self.script_line_ids.filtered('active'): + _logger.error("Integration %s. No active scripts found", self.id) + if user_call: + raise UserError( + "No hay scripts activos para la integración %s" % self.id) + + try: + assert self.odoo_db2 + assert self.script_line_ids + except Exception: + _logger.error( + "Integration {}: A required field is missing".format(self.id)) + if user_call: + raise UserError("A required field is missing. Please verify.") + + def odoo_run_job(self, job): + """ Run a single job """ + def sync_model( + target_db, model_name, common_fields=[], + boolean_fields=[], m2o_fields=[], m2m_fields=[], domain=[], + sort="id", offset=0, limit=None, target_model_name=None): + """ Helper method for allowing users to synchronize a model + across two different Odoo clients. + """ + + def get_external_id(model_name, rec_id): + """ Builds an external ID out of a model name and an ID. + """ + return "__odumbo__.{}_{}".format( + model_name.replace(".", "_"), rec_id) + + start_time = datetime.datetime.now() + _logger.debug("[sync_model] Synchronizing model '{}' with limit %s and offest %s".format(model_name, limit, offset)) + + # Remove dots from model name + norm_name = model_name.replace(".", "_") + + # Performs a SEARCH operation + records = self.env[model_name].search(domain, order=sort, offset=offset, limit=limit) + if len(records) == 0: + _logger.info("[sync_model] No records were found in model %s meeting the given criteria", model_name) + return + _logger.info( + "[sync_model] Syncing %s records for model %s (limit %s, offset %s)", + len(records), model_name, limit, offset) + + ##### + # Create XML ids for odumbo for any record that has sane XML id on target db and source db (for eg. uoms) + # but only for XML ids created by modules (not __import__ or __export__) + ##### + + # Obtain all automatic XML ids (not imported, exported or nulls) + # Keep in mind many XML ids may point to the same record + search_fields = ["complete_name", "module", "res_id", "name"] + # TODO: se deber agregar sudo() para ir.model.data ?? + moddata_read = self.env["ir.model.data"].search_read([ + ("res_id", "in", records.ids), + ("module", "not in", ["__import__", "__export__"]), + ("model", "=", model_name)], search_fields) + + # We can't search by complete_name since it's a computed field. + # If it wasn't, we would just do a recs.mapped("complente_name") + # and search for these recs in the target DB + if moddata_read: + _logger.debug("Found {} records with existing XML ID".format(len(moddata_read))) + # Fields we need to bring + # Read necessary data from source DB + _logger.debug("moddata_read: {}".format(moddata_read)) + + # Now build a dict keyed by the complete_name + moddata_dict = {} + for item in moddata_read: + moddata_dict[item["complete_name"]] = item + + _logger.debug("moddata_dict: {}".format(moddata_dict)) + + # Now we search for all these records that match our model, + # all names, and all modules + # TODO: se deber agregar sudo() para ir.model.data ?? + target_read = target_db.env["ir.model.data"].search_read([ + ("module", "in", list(set([item["module"] for item in moddata_read]))), + ("name", "in", [item["name"] for item in moddata_read]), + ("model", "=", target_model_name or model_name)], search_fields) + + _logger.debug("taget_read: {}".format(target_read)) + + # Prepare export data + data = [] + for tr in target_read: + # If remote key exists in source database + if tr["complete_name"] in moddata_dict: + # Create a odumbo xmlid + name = "{}_{}".format( + norm_name, + moddata_dict[tr["complete_name"]]["res_id"]) + module = "__odumbo__" + res_id = tr["res_id"] + data.append([name, target_model_name or model_name, module, res_id]) + + _logger.debug("data: {}".format(data)) + + # TODO ver de mejorar, actualmente esto nos está dando error en futuras iteraciones pero + # no lo devolvemos, baicamente porque esto lo importamos sin external id y queremos importar external ids + # de nuevo y ya existen. Tal vez habria que aprovechar a revisar toda esta lociga, tal vez usar + # export_data? y mapear por name o por ese modulo de oca que permite definir otros criterios? + if data: + # TODO: se deber agregar sudo() para ir.model.data ?? + outcome = target_db.env["ir.model.data"].load( + ["name", "model", "module", "res_id"], data) + + _logger.debug(outcome) + ##### + # Finish creating XML ids for modules data + ##### + + # Perform READ operation a single time + _logger.debug("[sync_model] Reading remote data for model %s", model_name) + all_fields = common_fields + boolean_fields + m2o_fields + m2m_fields + items = records.read(all_fields) + + _logger.debug("[sync_model] Adapting data previous to import for model %s", model_name) + # Create a mapping between provided fields and converted fields + fields_map = dict() + for field in all_fields: + if field in m2o_fields or field in m2m_fields: + fields_map[field] = field + "/id" + else: + fields_map[field] = field + + # Collect all values for loading in a single operation + values = list() + + for item in items: + # Convert to string boolean fields (So that load don't fail because we search_read and load expects + # different kind of data) + for boolean_field in boolean_fields or []: + if item[boolean_field]: + item[boolean_field] = str(item[boolean_field]) + + # Generate XML ids for M2O Fields + for m2o_field in m2o_fields or []: + if item[m2o_field]: + rec_id = item[m2o_field].id + # esta operacion no tiene ejecuta ninguna ninguna llamada rpc, lo resuelve localmente odooly + rel_model_name = item[m2o_field]._model._name + item[m2o_field] = get_external_id(rel_model_name, rec_id) + else: + item[m2o_field] = False + + # Generate XML ids for M2M Fields + for m2m_field in m2m_fields or []: + if item[m2m_field]: + rec_ids = item[m2m_field] + rel_model_name = item[m2m_field]._model._name + item[m2m_field] = ','.join([get_external_id(rel_model_name, rec_id) for rec_id in rec_ids.ids]) + else: + item[m2m_field] = False + + item["id"] = get_external_id(model_name, item["id"]) + + # List comprehension will create a list with "sorted" values + result = [item[field] for field in all_fields] + values.append(result) + + # Prevent templates from generating a product by themselves + with_context = dict() + if model_name == 'product.template': + # YES, True means False + with_context["create_product_product"] = True + with_context["tracking_disable"] = True + # key that can be used on custom modules to disable constrains or change any behaviour that + # could help to speed up process + with_context["odumbo_sync"] = True + # for compatibility with new v13 bypass + with_context["bypass_base_automation"] = True + + _logger.debug("[sync_model] Loading records on target db for model %s", model_name) + outcome = target_db.env[target_model_name or model_name].with_context( + **with_context).load( + [fields_map[field] for field in all_fields], values) + + if outcome.get('messages'): + for count, msg in enumerate(outcome['messages']): + record = msg['record'] + try: + record_id = values[record][all_fields.index('id')] + errors.append('[Error %s] %s: %s.' % (count + 1, record_id, msg['message'])) + except ValueError: + errors.append('[Error %s] line %s: "%s".\n' % (count + 1, record, msg['message'])) + + errors.append('Error completo: %s' % (outcome.get('messages'))) + elif not outcome.get('ids'): + errors.append('Error no atrapado al sincronizar "%s". No recibimos ni messages ni ids. Recibimos %s' % ( + model_name, outcome)) + else: + ok.append('Sincronizados "%s" elementos en "%s"' % (model_name, len(outcome.get('ids')))) + _logger.info( + "[sync_model] Finish syncking records for model %s on %s hours", model_name, + str((datetime.datetime.now() - start_time))) + _logger.info( + "[sync_model] Finish WITH ERRORS syncking records for model %s on %s hours", model_name, + str((datetime.datetime.now() - start_time))) + + self.ensure_one() + errors = [] + ok = [] + + _logger.info("Running job: '%s'", job.script_id.name) + job_start = datetime.datetime.now() + if job.state != 'started': + job.state = 'started' + + # The following libraries and variables + # will be available for any job. + locals_dict = { + "db2": self.odoo_db2._odoo_get_client(), + "last_cron_execution": self.last_cron_execution, + "last_sync_start": self.last_sync_start, + "offset": job.next_offset, + "Warning": UserError, + "context": dict(self._context), + "datetime": safe_eval.datetime, + "dateutil": safe_eval.dateutil, + "timezone": safe_eval.pytz, + "time": safe_eval.time, + "errors": errors, + "ok": ok, + "sync_model": sync_model, + } + + try: + safe_eval.safe_eval(job.script_id.code, locals_dict, mode="exec", nocopy=True) + except Exception as e: + errors.append(repr(e)) + + if errors: + # si hay errores ponemos mensaje y pasamos a borrador + if 'is_cron' not in self.env.context: + raise ValidationError('Error al sincronizar, esto es lo que obtuvimos:\n%s' % '\n\n'.join(errors)) + elif self.error_count < MAX_RETRIES: + self.error_count += 1 + _logger.warning('Error (%s try) found while running job "%s" (integration %s)', + self.error_count, job.script_id.name, self.name) + else: + self.sudo().message_post( + subtype_id=self.env.ref('integrator.mt_error').id, + body='Error al sincroniazar, se vuelve a borrador la integracion. Esto es lo que obtuvimos: %s' % ( + '
'.join(errors))) + job.state = 'failed' + _logger.warning( + 'Error (last try) found while running job "%s" (integration %s), check chatter for more info', + job.script_id.name, self.name) + self.back_to_draft() + return + elif ok: + # si hay mensajes de ok quiere decir que algo se sincronizo, si hay limite cambiamos limite y seguimos para + # que lo tome proxima corrida + message = """ +

Job run '{}' finish sucessfully on {}

+

{}

+ """.format(job.script_id.name, str(datetime.datetime.now() - job_start), html.escape(str(ok))) + self.sudo().message_post(body=message) + self.error_count = 0 + + limit = locals_dict.get('limit', False) + if limit: + next_offset = job.next_offset + limit + _logger.info('Setting new offset = %s for next iteration', next_offset) + job.next_offset = next_offset + return + # si no hay limite o no no hay resultados, parcamos realizada y reseteamos offset + _logger.info('Finish sync execution, cleaning offset and setting job done') + job.write({'state': 'done', 'next_offset': False}) + + def action_add_odoo_account(self): + wiz_vals = { + 'partner_id': self.partner_id.id, + 'integration_id': self.id, + } + wiz = self.env['integrator.account.wizard'].create(wiz_vals) + return { + 'name': 'New Odoo Account', + 'view_mode': 'form', + 'res_model': 'integrator.account.wizard', + 'res_id': wiz.id, + 'type': 'ir.actions.act_window', + 'target': 'new', + } + + # def _get_error_log_url(self): + # base_url = self.get_base_url() + # action = self.env.ref("integrator.action_integrator_integration_logging_errors") + # path = '/web#&action=%s&model=integrator.integration.logging&view_type=' % (action.id) + # return base_url + path + + # def test_synchronization(self): + # """ Mthod to be override by specifics integrations tests. + # Return an empty dict if no errors. Otherwise return a dict with errors + # """ + # return dict() + + +# class IntegratorColoredFormatter(ColoredFormatter): +# def format(self, record): +# record.prefix = COLOR_PATTERN % (30 + record.color, 40, record.prefix) +# return ColoredFormatter.format(self, record) + + +# class IntegrationType(models.Model): +# _name = 'integration.type' +# _description = 'Integration Type' + +# name = fields.Char() +# application = fields.Selection([('odoo', 'Odoo')], required=True,) + + +# class Sync(): +# ''' Class that represents a generic synchronizations between two accounts ''' + +# def __init__(self, odoo, odoo2=None, sandbox_mode=False, ctx={}): +# self.odoo = odoo +# if odoo2: +# self.odoo2 = odoo2 +# self.sandbox_mode = sandbox_mode +# self.ctx = ctx +# self.result = {'info': [], 'warning': [], 'error': []} +# # Logger +# sync_logger = logging.getLogger(self.__class__.__name__) +# myHandler = logging.StreamHandler() +# # Change Odoo log format +# format = '%(asctime)s %(pid)s %(levelname)s %(name)s:: %(prefix)s %(message)s' +# myFormatter = IntegratorColoredFormatter(format) +# myHandler.setFormatter(myFormatter) +# sync_logger.handlers.clear() +# sync_logger.addHandler(myHandler) +# sync_logger.propagate = False +# self.logger = logging.LoggerAdapter(sync_logger, extra={ +# 'prefix': "[SANDBOX][Integration %s]" % ctx.get('id') if self.sandbox_mode else "[Integration %s]" % ctx.get('id'), +# 'color': random.randint(1, 7), +# }) + +# def _ensure_field(self, field_model, field_name): +# if self.odoo.env[field_model]._fields.get(field_name): +# return True +# else: +# if hasattr(self, '_create_field_%s' % field_name): +# return getattr(self, '_create_field_%s' % field_name)() +# else: +# self.result['error'].append("Field %s no found in the model %s." % (field_name, field_model)) + + +# class SyncDev(Sync): +# ''' Class that represents a generic Dev synchronizations between two accounts ''' + +# pass diff --git a/integrator/models/integrator_integration_script.py b/integrator/models/integrator_integration_script.py new file mode 100644 index 00000000..a88780ac --- /dev/null +++ b/integrator/models/integrator_integration_script.py @@ -0,0 +1,60 @@ +import logging +from odoo import models, fields, api +from odoo.exceptions import UserError +from odoo.tools.safe_eval import test_expr, _SAFE_OPCODES + + +_logger = logging.getLogger(__name__) + +DEFAULT_CODE = """# Available Context: +# - db2: Odoo Remote Client Database +# - Warning: Use "raise Warning('text')" to show a dialog box with the given +# text as parameter. Useful for debugging the script before activating the +# integration. +# - last_cron_execution: A datetime object with the last successful execution +# of the script. +# - time, datetime, dateutil, timezone: Useful Python libraries +# - context: A dictionary with information about current user, timezone, etc +# - sync_model(target_db, model_name, common_fields=None, boolean_fields=None, m2o_fields=None, m2m_fields=None, domain=[], sort="id", offset=0, limit=None, target_model_name=None) +# +# If you want to log the script's result as a message, assign something +# different than False to the "result" variable. +result = False +""" + + +class IntegratorIntegrationScript(models.Model): + + _name = "integrator.integration.script" + # _inherit = ["portal.mixin"] + _description = "integrator.integration.script" + + name = fields.Char(required=True) + # partner_id = fields.Many2one( + # "res.partner", string='Partner', + # ondelete='restrict', default=lambda self: self.env.user.partner_id) + # commercial_partner_id = fields.Many2one( + # 'res.partner', string='Commercial Entity', compute_sudo=True, + # related='partner_id.commercial_partner_id', store=True, readonly=True,) + code = fields.Text(default=DEFAULT_CODE, required=True) + + @api.model_create_multi + def create(self, vals_list): + for values in vals_list: + if "code" in values: + self._script_test(code=values["code"]) + res = super().create(values) + return res + + def write(self, values): + if "code" in values: + self._script_test(code=values["code"]) + res = super().write(values) + return res + + def _script_test(self, code): + try: + test_expr(code, _SAFE_OPCODES, mode="exec") + except Exception as e: + raise UserError(e) + return True diff --git a/integrator/models/integrator_integration_script_line.py b/integrator/models/integrator_integration_script_line.py new file mode 100644 index 00000000..6f511191 --- /dev/null +++ b/integrator/models/integrator_integration_script_line.py @@ -0,0 +1,31 @@ +from odoo import api, models, fields + + +class IntegratorScriptLine(models.Model): + # TODO rename to job everywhere + _name = "integrator.integration.script_line" + _description = "Odoo Integration Job" + + active = fields.Boolean(default=True) + sequence = fields.Integer() + state = fields.Selection([ + ('pending', 'Pending'), ('enqueued', 'Enqueued'), ('started', 'Started'), + ('done', 'Done'), ('failed', 'Failed')], required=True, default='pending') + next_offset = fields.Integer() + integration_id = fields.Many2one( + "integrator.integration", string="Integration", ondelete="cascade", + required=True) + script_id = fields.Many2one( + "integrator.integration.script", string="Script", ondelete="restrict", + required=True, context={'active_test': False}) + + _sql_constraints = [ + ('script_line_unique', 'unique(integration_id, script_id)', + "You can't add the same script twice") + ] + + @api.depends('integration_id') + def odoo_run_script_now(self): + self.ensure_one() + self.integration_id.odoo_run_job(self) + return True diff --git a/integrator/security/integrator_security.xml b/integrator/security/integrator_security.xml new file mode 100644 index 00000000..1aa31793 --- /dev/null +++ b/integrator/security/integrator_security.xml @@ -0,0 +1,23 @@ + + + + + + Integrator + Integrator + 1 + + + + User + + + + + Manager + + + + + + diff --git a/integrator/security/ir.model.access.csv b/integrator/security/ir.model.access.csv new file mode 100644 index 00000000..20e5f6de --- /dev/null +++ b/integrator/security/ir.model.access.csv @@ -0,0 +1,9 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_integrator_account_manager,integrator.account.manager,model_integrator_account,group_manager,1,1,1,1 +access_integrator_account_user,integrator.account.user,model_integrator_account,group_user,1,1,0,0 +access_integrator_integration_manager,integrator.integration.manager,model_integrator_integration,group_manager,1,1,1,1 +access_integrator_integration_user,integrator.integration.user,model_integrator_integration,group_user,1,1,0,0 +access_integrator_integration_script_manager,integrator.integration.script.manager,model_integrator_integration_script,group_manager,1,1,1,1 +access_integrator_integration_script_user,integrator.integration.script.user,model_integrator_integration_script,group_user,1,1,0,0 +access_integrator_integration_script_line_manager,integrator.integration.script.line.manager,model_integrator_integration_script_line,group_manager,1,1,1,1 +access_integrator_integration_script_line_user,integrator.integration.script.line.user,model_integrator_integration_script_line,group_user,1,1,0,0 diff --git a/integrator/static/description/icon.png b/integrator/static/description/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..08289347e15ad585d9bd3aa724761579aac8013a GIT binary patch literal 3765 zcmV;m4odNfP)o1`rV;TAUST000hDNkl;*8R}Dc>I-U=>;sH;b!@!VmjM!QTfFOAF#&GYM zHV6g*0FWpI3awZmM%?QWN*Wo$gL!-i0DxivFBLI@AUF))3(b}mrd?{I^y-b_{&VK) zw}cX2*N6cCfJR7wAgD1)br!i&C*cWT004?8mW*ci_tGJ6l8%1wYvlh=;O^-#Xi@ zx~soFm_dUiAPDLn)msO22fhmJ`73)d6j5yHo+puvH^`ty5->s< zgFuXc5CGt<+Zxbp>B${eCYPdwK`kU?N@2B8s6_;IHja%La_Pd8+wNQ;ZLylJ-WaBY zK{W{w1l{YA>Xiu)1PxXNUr4StvMChDh}OiAisl#Z?-WXTiAZ+&ox$hUDkvdgpD>A} z{5@x^Y=(tmq^(PBv1`bVTd`nyc6s!!d%YE|{8bGnU(43Q`-@U`1=#UJSXNzUN+aEV)CTq^V1( z(2C2;&PJP3uBNw%96cDIxnkIx^zX!|UDsmDis&PU8gBmNX)719I@nFKmF<;>(7`jUQMaQ3ThtVCBjOg>wnyCZ5Q zhU9*3?)GNBK*3C7<=b7~R+OgeldXUU9Why^qA_Z%gxSeg7Z)T(kR_ga{y%2H^> zGV<2?1PsvuSA1=L=joS}E5|R}+U**TBXalFV%^O@=%H%}m8nD)yT&o^XQpdn2!q!V z-CS{(|8KFHuKaSS|C||u!Jctrz!i5ax$7&okS`Gg0RXfZx{iKrK3j0pIT2vAQ`%Yx zR5Q4GV|eg_rEG@zLd4Xms=MTJ>MC7T1A}$scCfKE;25znyBF9CRT#V&!_hPl(N*ds zv|fLMnP;rPfR*s&`@7L-^autJ49Ybk1m;!7?b-!Hh{>j;dx?n|5_EM+MHE|@b{);& zX=^aaq|$mCgxRLI4l>bW)R%eId(NuLiI3kvb@ZiCh&15}9->7Zul9kCtZ(2!Ej3K$7o4>P#7IVk;Mb+v* zXP$Vyr}nex^xBI*u}c}WS9Fu05<}RVoV&YG8Ek9MtKbWf$!q-s6E&@v8ce!3elbLO zsz7r|!0-L+cT}0=)yLbFKg$eiuPmujsJAEzSs>^=beRZ(ppo-ESKb{OdZis`o?rx$ zYs3UjB&dtQQG=nJ>RUI1r|So5QO}rBu8^P^BAYCh^E1j4@ul5EZI`YOU;6ogp+f;Q zTa=TaAo{jB7x23M4Kk>9eAyK0+loefsZ0t*c(6hv(wb%Zc17KZ6f~)jFFBtCJq`-| zL__xN#VDpyq58qEh8yX)AsCb@g?(q+>=Q)g6@ZoE3lZCpE}KFd4-nNqL}Mj<@vi&1 zNb8A^qFtN5FK;e7p9GuI7v2A4W9IXv8b2S4#lHR25f5jMEp~av<9Ed=-$dq6 z4CN>$*>r1`kZ`SrzWa2G)FIBb-R1kcM@*k^j98d<1wFKExZv z@bI&(cqB{TMt;v@NJaB=Q_fr=O>r;5APLuMsJ@i_9GZD?c)&5@!KaQyG)rq1y}_hwq=fE)cB4(nWQDZgR_Pj(Z1lDI@0zw)O&yHf z)6jejVu8#^TTVO#gXe$PJ^He}j$?ItZBy3gn}7j_Cz2=xrcPD=*|zb^HoCuUTes>*zZqBRBn#87TCx|!0@=u9 zN3+sOXvd(_YwZ&z+n`q4CKt>3Ql*fNdFsKZw#eW9?U{k|Z5xmM?zM0{ltodrh#j!^ zh=3;=yJRizir#7%_=xcQTfOJr?ABT2&H9x=Fle7Jb&nd$nE>f{J{m}GFNYRp92FQa zgp5wwgc2UmJkf#lXu)8yn$Eq|MYbwq!OZFdZ@?25 zhhDM&4YZ~nDB{qcfMucdFXofQ~(2%qfYa3YdDFy(S{#r@UIWvXI!Wd`5;dtNS6 zG|Q$j*C`PM>HRump>!&K@ZC$>8@u>Tyz!V_g@KSK8Gf~$ZQ1|Sw}v8y#zJXQ6%-3* z3Z+9ITGV(PyTX763z}=wn8SeMIN9|2;BMmR;A;}cnTRG^Fhs)p55L$d6|q`AyfM4y-KnW^$&#orz;T=< zHto>qtO;1%{}VFmQ=&ukB)iimio2HG__>w)%FcX%P%Kfp4$+)B7cXh$q*G001nv z3>+g0Xlec7-t>p7%d^fpNDy3Eo!C*9) z@ow(B)`CaeLs#240lerZsu?I1s%GtQLRR1(uKk(0lYwB_-%(YdwcGs0yMyHi6_E?3p~U=nMyqT=9!5W0`o>EZ44xyvfVc{vy%luHruq*f}wVs#;V;6S_(t&NUhN{ zw(TguwPY29vI9RDW`>zZEd3)#}=98Vj33D@<5r5(TMf8asF0 zI@zs3#Fw5do7RFD5STYI*>SRLTB~7zAm}98v{u6~_);gEqiLrCL*J-PXOROfq?3oi zW;eHWtAN(h3BzDA8)&^-i^EWpP3wKh1hk?~3nx#84d7(Ul6QwxDs7ppN0Omu46q`)Z|5zS)%^~{sS!3{GT1500000NkvXXu0mjfqzo~i literal 0 HcmV?d00001 diff --git a/integrator/views/integrator_account_views.xml b/integrator/views/integrator_account_views.xml new file mode 100644 index 00000000..e0485f44 --- /dev/null +++ b/integrator/views/integrator_account_views.xml @@ -0,0 +1,76 @@ + + + + + integrator.account.select + integrator.account + + + + + + + + + + + + + + + integrator.account.form + integrator.account + +
+
+
+ + +
+ + + +
+
+
+
+ + + + integrator.account.tree + integrator.account + + + + + + + + + + Accounts + integrator.account + tree,form + + + + + +
diff --git a/integrator/views/integrator_integration_script_views.xml b/integrator/views/integrator_integration_script_views.xml new file mode 100644 index 00000000..a6eb4412 --- /dev/null +++ b/integrator/views/integrator_integration_script_views.xml @@ -0,0 +1,59 @@ + + + + + integrator.integration.script.select + integrator.integration.script + + + + + + + + + + integrator.integration.script.form + integrator.integration.script + +
+ + +
+
+
+ + + + integrator.integration.script.tree + integrator.integration.script + + + + + + + + + + Scripts + integrator.integration.script + tree,form + + + + + +
diff --git a/integrator/views/integrator_integration_views.xml b/integrator/views/integrator_integration_views.xml new file mode 100644 index 00000000..2d28ab98 --- /dev/null +++ b/integrator/views/integrator_integration_views.xml @@ -0,0 +1,119 @@ + + + + + integrator.integration.select + integrator.integration + + + + + + + + + + + + + + + integrator.integration.form + integrator.integration + +
+ +
+
+ + + +
+ + + + +
+