From e1cce8c4692a6eb85178f2e3c95df111be9d01db Mon Sep 17 00:00:00 2001 From: Simone Orsi Date: Mon, 16 Oct 2023 11:45:54 +0200 Subject: [PATCH 1/4] connector_importer: allow rename keys via conf You can now configure mapper options' key source_key_rename to specify a mapping of keys to be renamed to match odoo field names. --- connector_importer/components/dynamicmapper.py | 17 +++++++++++++++++ connector_importer/tests/test_mapper.py | 13 +++++++++++++ 2 files changed, 30 insertions(+) diff --git a/connector_importer/components/dynamicmapper.py b/connector_importer/components/dynamicmapper.py index ea4be5d9..4d118b6f 100644 --- a/connector_importer/components/dynamicmapper.py +++ b/connector_importer/components/dynamicmapper.py @@ -42,6 +42,11 @@ def dynamic_fields(self, record): # Eg: prefix all supplier fields w/ `supplier.` fname = fname[len(prefix) :] clean_record[fname] = clean_record.pop(prefix + fname) + final_fname = self._get_field_name(fname, clean_record) + if final_fname != fname: + clean_record[final_fname] = clean_record.pop(fname) + fname = final_fname + if available_fields.get(fname): fspec = available_fields.get(fname) ftype = fspec["type"] @@ -113,6 +118,18 @@ def _source_key_empty_skip(self): def _source_key_prefix(self): return self.work.options.mapper.get("source_key_prefix", "") + @property + def _source_key_rename(self): + return self.work.options.mapper.get("source_key_rename", {}) + + def _get_field_name(self, fname, clean_record): + """Return final field name. + + Field names can be manipulated via mapper option `source_key_rename` + which must be a dictionary w/ source name -> destination name. + """ + return self._source_key_rename.get(fname, fname) + def _is_xmlid_key(self, fname, ftype): return fname.startswith("xid::") and ftype in ( "many2one", diff --git a/connector_importer/tests/test_mapper.py b/connector_importer/tests/test_mapper.py index 1a82a00a..75de7a39 100644 --- a/connector_importer/tests/test_mapper.py +++ b/connector_importer/tests/test_mapper.py @@ -185,3 +185,16 @@ def test_rel_create_if_missing(self): self.assertEqual(cat.name, "New category") self.assertEqual(res["parent_id"], parent.id) self.assertEqual(res["category_id"], [(6, 0, [cat.id])]) + + def test_dynamic_mapper_rename_keys(self): + rec = { + "another_name": "John Doe", + } + # Whitelist + expected = { + "name": "John Doe", + } + mapper = self._get_dynamyc_mapper( + options=dict(source_key_rename={"another_name": "name"}) + ) + self.assertEqual(mapper.dynamic_fields(rec), expected) From 3e218f4ece4cd1617e1cfadb550ff7800247f21f Mon Sep 17 00:00:00 2001 From: Simone Orsi Date: Tue, 28 Feb 2023 17:41:34 +0100 Subject: [PATCH 2/4] connector_importer: use delayable recordset --- connector_importer/models/record.py | 18 +++++++++--------- .../tests/test_record_importer.py | 15 ++++++++++----- .../tests/test_record_importer_xmlid.py | 3 ++- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/connector_importer/models/record.py b/connector_importer/models/record.py index c61337a5..7ee2d5c9 100644 --- a/connector_importer/models/record.py +++ b/connector_importer/models/record.py @@ -88,22 +88,22 @@ def import_record(self, importer_config): def run_import(self): """Queue a job for importing data stored in to self""" self.ensure_one() - use_job = self.recordset_id.import_type_id.use_job - # TODO: use ctx key to disable job instead - job_method = self.with_delay().import_record - if self.debug_mode(): + debug_mode = self.debug_mode() + if debug_mode: logger.warning("### DEBUG MODE ACTIVE: WILL NOT USE QUEUE ###") - if self.debug_mode() or not use_job: - job_method = self.import_record - result = self._run_import(job_method, use_job) + use_job = self.recordset_id.import_type_id.use_job + if debug_mode: + use_job = False + result = self._run_import(use_job=use_job) return result - def _run_import(self, job_method, use_job): + def _run_import(self, use_job=True): res = {} # we create a record and a job for each model name # that needs to be imported + new_self = self.with_context(queue_job__no_delay=not use_job) for config in self.recordset_id.available_importers(): - result = job_method(config) + result = new_self.with_delay().import_record(config) res[config.model] = result if self.debug_mode() or not use_job: # debug mode, no job here: reset it! diff --git a/connector_importer/tests/test_record_importer.py b/connector_importer/tests/test_record_importer.py index f1f588e4..ba446d0f 100644 --- a/connector_importer/tests/test_record_importer.py +++ b/connector_importer/tests/test_record_importer.py @@ -42,7 +42,8 @@ def test_importer_create(self): expected = { model: {"created": 10, "errored": 0, "updated": 0, "skipped": 0}, } - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) self.assertEqual(self.env[model].search_count([("ref", "like", "id_%")]), 10) @@ -60,7 +61,8 @@ def test_importer_skip(self): report = self.recordset.get_report() model = "res.partner" expected = {model: {"created": 8, "errored": 0, "updated": 0, "skipped": 2}} - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) skipped_msg1 = report[model]["skipped"][0]["message"] @@ -80,7 +82,8 @@ def test_importer_update(self): report = self.recordset.get_report() model = "res.partner" expected = {model: {"created": 10, "errored": 0, "updated": 0, "skipped": 0}} - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) # now run it a second time @@ -90,7 +93,8 @@ def test_importer_update(self): res = self.record.run_import() report = self.recordset.get_report() expected = {model: {"created": 0, "errored": 0, "updated": 10, "skipped": 0}} - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) # now run it a second time @@ -100,7 +104,8 @@ def test_importer_update(self): res = self.record.run_import() report = self.recordset.get_report() expected = {model: {"created": 0, "errored": 0, "updated": 0, "skipped": 10}} - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) skipped_msg1 = report[model]["skipped"][0]["message"] diff --git a/connector_importer/tests/test_record_importer_xmlid.py b/connector_importer/tests/test_record_importer_xmlid.py index f21515b7..14092c97 100644 --- a/connector_importer/tests/test_record_importer_xmlid.py +++ b/connector_importer/tests/test_record_importer_xmlid.py @@ -46,7 +46,8 @@ def test_importer_create(self): report = self.recordset.get_report() model = "res.partner" expected = {model: {"created": 10, "errored": 0, "updated": 0, "skipped": 0}} - self.assertEqual(res, expected) + delayable = res[model] + self.assertEqual(delayable.result, expected[model]) for k, v in expected[model].items(): self.assertEqual(len(report[model][k]), v) self.assertEqual(self.env[model].search_count([("ref", "like", "id_%")]), 10) From e2760919cab038d343f5d376650eb72809612121 Mon Sep 17 00:00:00 2001 From: Simone Orsi Date: Mon, 6 Mar 2023 17:30:01 +0100 Subject: [PATCH 3/4] connector_importer: improve job details --- connector_importer/models/record.py | 30 +++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/connector_importer/models/record.py b/connector_importer/models/record.py index 7ee2d5c9..eac11471 100644 --- a/connector_importer/models/record.py +++ b/connector_importer/models/record.py @@ -72,6 +72,16 @@ def debug_mode(self): self.ensure_one() return self.backend_id.debug_mode or os.environ.get("IMPORTER_DEBUG_MODE") + def _should_use_jobs(self): + self.ensure_one() + debug_mode = self.debug_mode() + if debug_mode: + logger.warning("### DEBUG MODE ACTIVE: WILL NOT USE QUEUE ###") + use_job = self.recordset_id.import_type_id.use_job + if debug_mode: + use_job = False + return use_job + def import_record(self, importer_config): """This job will import a record. @@ -88,13 +98,7 @@ def import_record(self, importer_config): def run_import(self): """Queue a job for importing data stored in to self""" self.ensure_one() - debug_mode = self.debug_mode() - if debug_mode: - logger.warning("### DEBUG MODE ACTIVE: WILL NOT USE QUEUE ###") - use_job = self.recordset_id.import_type_id.use_job - if debug_mode: - use_job = False - result = self._run_import(use_job=use_job) + result = self._run_import(use_job=self._should_use_jobs()) return result def _run_import(self, use_job=True): @@ -103,7 +107,9 @@ def _run_import(self, use_job=True): # that needs to be imported new_self = self.with_context(queue_job__no_delay=not use_job) for config in self.recordset_id.available_importers(): - result = new_self.with_delay().import_record(config) + result = new_self.with_delay( + **self._run_import_job_params(config) + ).import_record(config) res[config.model] = result if self.debug_mode() or not use_job: # debug mode, no job here: reset it! @@ -114,3 +120,11 @@ def _run_import(self, use_job=True): # we keep the reference on w/ the last job. self.write({"job_id": result.db_record().id}) return res + + def _run_import_job_params(self, config): + params = { + "description": ( + f"recordset {self.recordset_id.name}: import {config['model']}" + ) + } + return params From 706d1dc7e0437dd1fd8f1a5fb5c0b0c256648d5f Mon Sep 17 00:00:00 2001 From: Simone Orsi Date: Mon, 6 Mar 2023 15:55:27 +0100 Subject: [PATCH 4/4] connector_importer: fix _compute_docs_html --- connector_importer/models/recordset.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/connector_importer/models/recordset.py b/connector_importer/models/recordset.py index f061279f..2d198bbb 100644 --- a/connector_importer/models/recordset.py +++ b/connector_importer/models/recordset.py @@ -8,6 +8,7 @@ from odoo import api, fields, models from odoo.addons.base_sparse_field.models.fields import Serialized +from odoo.addons.component.utils import is_component_registry_ready from odoo.addons.queue_job.job import DONE, STATES from ..log import logger @@ -342,7 +343,11 @@ def _get_importers(self): @api.depends("import_type_id") def _compute_docs_html(self): - template = self.env.ref("connector_importer.recordset_docs") + if not is_component_registry_ready(self.env.cr.dbname): + # We cannot render anything if we cannot load components + self.docs_html = False + return + qweb = self.env["ir.qweb"].sudo() for item in self: item.docs_html = False if isinstance(item.id, models.NewId) or not item.backend_id: @@ -352,7 +357,7 @@ def _compute_docs_html(self): continue importers = item._get_importers() data = {"recordset": item, "importers": importers} - item.docs_html = template._render(data) + item.docs_html = qweb._render("connector_importer.recordset_docs", data) # TODO