Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ def import_file_button(self, wizard_data=None):

def split_base64_excel(self, header_rows_count, rows_per_file_limit):
"""Split Excel file into multiple parts to avoid overloading the system.
Returns empty list if file is not a valid Excel or if split is not needed."""
Returns empty list if file is not a valid Excel or if split is not needed.
Only processes rows where the date column is not empty."""
if not self.statement_file:
return []

Expand All @@ -143,6 +144,16 @@ def split_base64_excel(self, header_rows_count, rows_per_file_limit):

header_rows = all_rows[:header_rows_count]
data_rows = all_rows[header_rows_count:]

# Get the date column index from the sheet mapping using the parser's method
parser = self.env["account.statement.import.sheet.parser"]
header = parser.parse_header((input_workbook, input_worksheet), self.sheet_mapping_id)
date_column_indexes = parser._get_column_indexes(header, "timestamp_column", self.sheet_mapping_id)
date_column_index = date_column_indexes[0] if date_column_indexes else None

# Filter out rows where the date column is empty
data_rows = self._filter_rows_with_date(data_rows, date_column_index)

start_row_index = 0
total_data_rows = len(data_rows)

Expand All @@ -169,3 +180,20 @@ def split_base64_excel(self, header_rows_count, rows_per_file_limit):

start_row_index = end_row_index
return output_base64_list

def _filter_rows_with_date(self, data_rows, date_column_index):
"""Filter data rows to only include rows where the date column is not empty.
If date_column_index is None, return all rows."""
if date_column_index is None:
return data_rows

filtered_rows = []
for row in data_rows:
# Check if the row has enough columns and the date column is not empty
if len(row) > date_column_index and row[date_column_index].value:
filtered_rows.append(row)
elif len(row) > date_column_index and not row[date_column_index].value:
# Stop processing when we find the first empty date
break

return filtered_rows
2 changes: 1 addition & 1 deletion base_bg/__manifest__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
##############################################################################
{
"name": "Base Background Jobs",
"version": "18.0.1.0.1",
"version": "18.0.1.0.2",
"category": "Technical",
"author": "ADHOC SA",
"website": "https://www.adhoc.com.ar",
Expand Down
98 changes: 91 additions & 7 deletions base_bg/demo/bg_job_demo.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<!-- Demo Partners -->
<record id="bg_job_demo_partner_1" model="res.partner">
<field name="name">BG Demo Customer</field>
<field name="email">bg.demo.customer@example.com</field>
Expand All @@ -10,26 +11,109 @@
<field name="email">bg.demo.vendor@example.com</field>
</record>

<record id="bg_job_demo_data_cleanup" model="bg.job">
<field name="name">Demo Data Cleanup</field>
<!-- Single Job -->
<record id="bg_job_demo_single" model="bg.job">
<field name="name">Single Job - Data Cleanup</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">5</field>
<field name="state">done</field>
<field name="batch_key">single-batch-001</field>
<field name="start_time">2024-01-01 08:00:00</field>
<field name="end_time">2024-01-01 08:05:00</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_1')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
</record>

<!-- Batch 1: Completed (3 jobs, all done) -->
<record id="bg_job_batch1_job3" model="bg.job">
<field name="name">Export Batch - Part 3/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">10</field>
<field name="state">done</field>
<field name="batch_key">batch-completed-001</field>
<field name="start_time">2024-01-02 09:20:00</field>
<field name="end_time">2024-01-02 09:30:00</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_1'), ref('bg_job_demo_partner_2')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="create_date">2024-01-02 08:00:00</field>
</record>

<record id="bg_job_batch1_job2" model="bg.job">
<field name="name">Export Batch - Part 2/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">10</field>
<field name="state">done</field>
<field name="batch_key">batch-completed-001</field>
<field name="start_time">2024-01-02 09:10:00</field>
<field name="end_time">2024-01-02 09:20:00</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_2')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="next_job_id" ref="bg_job_batch1_job3"/>
<field name="create_date">2024-01-02 07:00:00</field>
</record>

<record id="bg_job_batch1_job1" model="bg.job">
<field name="name">Export Batch - Part 1/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">10</field>
<field name="state">done</field>
<field name="batch_key">batch-completed-001</field>
<field name="start_time">2024-01-02 09:00:00</field>
<field name="end_time">2024-01-02 09:10:00</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_1')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="next_job_id" ref="bg_job_batch1_job2"/>
<field name="create_date">2024-01-02 06:00:00</field>
</record>

<!-- Batch 2: Failed (3 jobs, 1 done, 1 failed, 1 canceled) -->
<record id="bg_job_batch2_job3" model="bg.job">
<field name="name">Validation Batch - Part 3/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">15</field>
<field name="state">canceled</field>
<field name="batch_key">batch-failed-002</field>
<field name="cancel_time">2024-01-03 14:10:00</field>
<field name="error_message">Canceled due to previous job failure in batch</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_1'), ref('bg_job_demo_partner_2')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="create_date">2024-01-03 14:00:00</field>
</record>

<record id="bg_job_demo_follow_up" model="bg.job">
<field name="name">Demo Follow Up</field>
<record id="bg_job_batch2_job2" model="bg.job">
<field name="name">Validation Batch - Part 2/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">15</field>
<field name="state">failed</field>
<field name="start_time">2024-01-02 10:00:00</field>
<field name="end_time">2024-01-02 12:00:00</field>
<field name="error_message">Timeout while sending follow up</field>
<field name="batch_key">batch-failed-002</field>
<field name="start_time">2024-01-03 14:05:00</field>
<field name="end_time">2024-01-03 14:10:00</field>
<field name="error_message">Validation error: Invalid data format</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_2')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="next_job_id" ref="bg_job_batch2_job3"/>
<field name="create_date">2024-01-03 13:00:00</field>
</record>

<record id="bg_job_batch2_job1" model="bg.job">
<field name="name">Validation Batch - Part 1/3</field>
<field name="model">res.partner</field>
<field name="method">exists</field>
<field name="priority">15</field>
<field name="state">done</field>
<field name="batch_key">batch-failed-002</field>
<field name="start_time">2024-01-03 14:00:00</field>
<field name="end_time">2024-01-03 14:05:00</field>
<field name="kwargs_json" eval="{'_record_ids': [ref('bg_job_demo_partner_1')]}"/>
<field name="context_json" eval="{'lang': 'en_US'}"/>
<field name="next_job_id" ref="bg_job_batch2_job2"/>
<field name="create_date">2024-01-03 12:00:00</field>
</record>

</odoo>
31 changes: 31 additions & 0 deletions base_bg/migrations/18.0.1.0.2/post-migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
import logging
import uuid

_logger = logging.getLogger(__name__)


def migrate(cr, version):
"""
Migration script to set batch_key for existing jobs.

Each existing job will be treated as a single-job batch:
- batch_key: unique UUID for each job
"""
cr.execute("""
SELECT id FROM bg_job
WHERE batch_key IS NULL
""")
job_ids = [row[0] for row in cr.fetchall()]
updates = [(str(uuid.uuid4()), job_id) for job_id in job_ids]
cr.executemany(
"""
UPDATE bg_job
SET batch_key = %s
WHERE id = %s
""",
updates,
)
155 changes: 120 additions & 35 deletions base_bg/models/base_bg.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,56 +3,111 @@
# directory
##############################################################################

import json
import uuid
from typing import TYPE_CHECKING, Any

from odoo import _, api, models

if TYPE_CHECKING:
from base_bg.models.bg_job import BgJob


class BaseBg(models.AbstractModel):
_name = "base.bg"
_description = "Background Job Mixin"

@api.model
def bg_enqueue(self, method: str, *args, **kwargs):
def bg_enqueue_records(
self, records: models.BaseModel, method: str, threshold: int | None = None, *args, **kwargs
) -> tuple[dict, "BgJob"]:
"""
Enqueue a background job for execution.

:param method: The method name to execute
Enqueue background jobs in batches based on record threshold.

Special kwargs:
:param max_retries: Maximum retry attempts (default: 3)
This is a model/API method and must be called on the model, passing
the target records as the first argument. Example:
self.env['base.bg'].bg_enqueue_records(records, 'method_name', threshold=..., ...)

:return: A display notification
:param records: recordset to process; can be empty for no specific targets
:param method: The method name to execute on each batch
:param threshold: Maximum number of records per job
:param args: Positional arguments for the method
:param kwargs: Keyword arguments for the method
Special kwargs:
:param priority: Job priority (default: 10)
:param max_retries: Maximum retries for the job (default: 3)
:param name: Base name for the job(s) (default: model.method-uuid)
:return: A display notification and the created jobs
"""
# Normalize records into ids; allow None/empty to mean no targets
jobs = self.env["bg.job"]
model = records._name
record_ids = records.ids if records else []
context = {k: v for k, v in self.env.context.items() if self.is_serializable(v)}
priority = max(kwargs.pop("priority", 10), 0)
max_retries = kwargs.pop("max_retries", 3)
name = kwargs.pop("name", f"{self._name}.{method}")
job_vals = {
"name": name,
"model": self._name,
"method": method,
"max_retries": max_retries,
"context_json": dict(self.env.context),
}

# Handle recordset: store IDs for later reconstruction
if self:
kwargs["_record_ids"] = self.ids

# Serialize arguments
job_vals["args_json"] = list(args) if args else []
job_vals["kwargs_json"] = kwargs
self.env["bg.job"].create(job_vals)
name = kwargs.pop("name", "")

def _get_name(batch_key: str, queue_order: int) -> str:
return name or "%s.%s-%s-%s" % (model, method, batch_key[0:8], queue_order)

batch_key = str(uuid.uuid4())
total = len(record_ids) or 1 # Ensure at least one job if no records
threshold = max(1, threshold or total)
prev_job = None
for i in range(0, total, threshold):
chunk_ids = record_ids[i : i + threshold]
queue_order = i // threshold
job_vals = {
"name": _get_name(batch_key, queue_order),
"model": model,
"method": method,
"priority": priority,
"max_retries": max_retries,
"context_json": context,
"batch_key": batch_key,
"state": "enqueued" if queue_order == 0 else "waiting",
}
job_kwargs = kwargs.copy()
job_kwargs["_record_ids"] = list(chunk_ids) if chunk_ids else []
job_vals["args_json"] = self.check_serializable(list(args)) if args else []
job_vals["kwargs_json"] = self.check_serializable(job_kwargs)
job = self.env["bg.job"].create(job_vals)
jobs |= job
# Link previous job to current so sequence is established in one pass
if prev_job:
prev_job.next_job_id = job.id
prev_job = job

self.sudo()._trigger_crons()
title = _("Process sent to background successfully")
message = _("You will be notified when it is done.")
return {
"type": "ir.actions.client",
"tag": "display_notification",
"params": {
"title": title,
"type": "success",
"message": message,
"next": {"type": "ir.actions.act_window_close"},
title = _("Processes sent to background successfully")
message = _("You will be notified when they are done.")
return (
{
"type": "ir.actions.client",
"tag": "display_notification",
"params": {
"title": title,
"type": "success",
"message": message,
"next": {"type": "ir.actions.act_window_close"},
},
},
}
jobs,
)

def bg_enqueue(self, method: str, threshold: int | None = None, *args, **kwargs) -> tuple[dict, "BgJob"]:
"""
Instance-style enqueuing helper.

Usage:
_inherit = ['base.bg', ...]
...
records.bg_enqueue('method_name', threshold=..., ...)

Delegates to the model API `bg_enqueue_records` using the calling recordset as the `records` parameter.
"""
return self.bg_enqueue_records(self, method, threshold, *args, **kwargs)

def _trigger_crons(self):
"""
Expand All @@ -62,3 +117,33 @@ def _trigger_crons(self):
crons = self.env["ir.cron"].search([("code", "ilike", code)])
for cron in crons:
cron._trigger()

@api.model
def is_serializable(self, value: Any) -> bool:
"""
Checks if a value is JSON serializable.

:param value: The value to check
:return: True if serializable, False otherwise
"""
if isinstance(value, dict):
return all(self.is_serializable(k) and self.is_serializable(v) for k, v in value.items())
if isinstance(value, (list, tuple)):
return all(self.is_serializable(item) for item in value)
try:
json.dumps(value)
return True
except Exception:
return False

@api.model
def check_serializable(self, value: Any) -> Any:
"""
Ensures a value is JSON serializable.

:param value: The value to check
:raises ValueError: If the value is not serializable
"""
if not self.is_serializable(value):
raise ValueError(_("Value %s is not JSON serializable") % repr(value))
return value
Loading