From 095e3113d3ed9f5c63c386548d1d95e6915b7233 Mon Sep 17 00:00:00 2001
From: SilvioC2C
Date: Tue, 25 Jun 2024 16:59:14 +0200
Subject: [PATCH] [WIP][MIG] connector_jira: Migration to 17.0
---
connector_jira/__init__.py | 1 +
connector_jira/__manifest__.py | 49 ++-
connector_jira/components/__init__.py | 57 ++-
.../components/{mapper.py => common.py} | 47 +--
.../jira_analytic_line_batch_importer.py | 76 ++++
.../components/jira_analytic_line_importer.py | 175 +++++++++
.../components/jira_analytic_line_mapper.py | 109 ++++++
..._analytic_line_timestamp_batch_deleter.py} | 16 +-
.../components/jira_backend_adapter.py | 17 +
.../components/{base.py => jira_base.py} | 2 +-
.../components/jira_base_exporter.py | 103 +++++
.../components/jira_batch_importer.py | 43 ++
connector_jira/components/jira_binder.py | 26 ++
.../components/jira_delayed_batch_importer.py | 30 ++
connector_jira/components/jira_deleter.py | 28 ++
.../components/jira_direct_batch_importer.py | 30 ++
.../{exporter.py => jira_exporter.py} | 106 +----
.../components/jira_import_mapper.py | 17 +
.../{importer.py => jira_importer.py} | 209 ++--------
.../components/jira_issue_type_adapter.py | 18 +
.../jira_issue_type_batch_importer.py} | 21 +-
.../components/jira_issue_type_mapper.py | 17 +
.../components/jira_mapper_from_attrs.py | 17 +
.../{binder.py => jira_model_binder.py} | 31 +-
.../components/jira_project_adapter.py | 91 +++++
.../jira_project_binder.py} | 17 +-
.../jira_project_project_exporter.py | 51 +++
.../jira_project_project_listener.py | 21 +
.../components/jira_project_task_adapter.py | 25 ++
.../jira_project_task_batch_importer.py | 17 +
.../components/jira_project_task_importer.py | 101 +++++
.../components/jira_project_task_mapper.py | 131 +++++++
.../components/jira_res_users_adapter.py | 35 ++
.../jira_res_users_importer.py} | 12 +-
.../components/jira_task_project_matcher.py | 20 +
.../jira_timestamp_batch_importer.py | 91 +++++
..._adapter.py => jira_webservice_adapter.py} | 22 +-
.../components/jira_worklog_adapter.py | 80 ++++
.../components/project_project_listener.py | 30 ++
connector_jira/controllers/__init__.py | 3 +-
.../jira_connect_app_controller.py | 193 +++++++++
.../controllers/jira_webhook_controller.py | 109 ++++++
connector_jira/controllers/main.py | 287 --------------
connector_jira/data/cron.xml | 16 +-
connector_jira/data/queue_job_channel.xml | 8 +
...ue_job_data.xml => queue_job_function.xml} | 13 -
connector_jira/fields.py | 9 +-
connector_jira/i18n/connector_jira.pot | 2 +-
.../migrations/15.0.1.0.0/pre-migrate.py | 24 --
.../migrations/15.0.2.0.0/pre-migrate.py | 32 --
connector_jira/models/__init__.py | 12 +-
.../models/account_analytic_line.py | 152 ++++++++
.../models/account_analytic_line/__init__.py | 5 -
.../models/account_analytic_line/common.py | 322 ---------------
.../models/account_analytic_line/importer.py | 365 -----------------
.../models/jira_account_analytic_line.py | 89 +++++
.../common.py => jira_backend.py} | 252 +++---------
.../models/jira_backend/__init__.py | 3 -
.../models/jira_backend_timestamp.py | 89 +++++
.../common.py => jira_binding.py} | 4 +-
.../models/jira_binding/__init__.py | 3 -
connector_jira/models/jira_issue_type.py | 26 ++
.../models/jira_issue_type/__init__.py | 4 -
.../models/jira_issue_type/common.py | 46 ---
.../models/jira_project_base_mixin.py | 54 +++
connector_jira/models/jira_project_project.py | 142 +++++++
connector_jira/models/jira_project_task.py | 70 ++++
connector_jira/models/jira_res_users.py | 20 +
connector_jira/models/project_project.py | 42 ++
.../models/project_project/__init__.py | 6 -
.../models/project_project/common.py | 367 ------------------
.../models/project_project/exporter.py | 92 -----
connector_jira/models/project_task.py | 164 ++++++++
.../models/project_task/__init__.py | 5 -
connector_jira/models/project_task/common.py | 274 -------------
.../models/project_task/importer.py | 262 -------------
.../{queue_job/common.py => queue_job.py} | 12 +-
connector_jira/models/queue_job/__init__.py | 3 -
.../{res_users/common.py => res_users.py} | 129 ++----
connector_jira/models/res_users/__init__.py | 4 -
connector_jira/reports/__init__.py | 1 +
.../reports/timesheet_analysis_report.py | 22 ++
connector_jira/tests/test_backend.py | 4 +-
connector_jira/tests/test_import_task.py | 2 +-
...tic_line.xml => account_analytic_line.xml} | 14 +-
connector_jira/views/jira_backend.xml | 318 +++++++++++++++
connector_jira/views/jira_backend_views.xml | 295 --------------
...sue_type_views.xml => jira_issue_type.xml} | 9 +-
connector_jira/views/jira_project_project.xml | 60 +++
connector_jira/views/jira_project_task.xml | 38 ++
...res_users_views.xml => jira_res_users.xml} | 21 -
connector_jira/views/project_project.xml | 69 ++++
.../views/project_project_views.xml | 145 -------
...roject_task_views.xml => project_task.xml} | 59 +--
connector_jira/views/res_users.xml | 24 ++
connector_jira/wizards/__init__.py | 2 +
.../jira_account_analytic_line_import.py | 26 +-
... => jira_account_analytic_line_import.xml} | 14 +-
.../project_link_jira.py | 84 ++--
.../project_link_jira.xml} | 24 +-
.../task_link_jira.py | 25 +-
.../task_link_jira.xml} | 6 +-
requirements.txt | 2 +-
103 files changed, 3468 insertions(+), 3499 deletions(-)
rename connector_jira/components/{mapper.py => common.py} (76%)
create mode 100644 connector_jira/components/jira_analytic_line_batch_importer.py
create mode 100644 connector_jira/components/jira_analytic_line_importer.py
create mode 100644 connector_jira/components/jira_analytic_line_mapper.py
rename connector_jira/{models/account_analytic_line/deleter.py => components/jira_analytic_line_timestamp_batch_deleter.py} (87%)
create mode 100644 connector_jira/components/jira_backend_adapter.py
rename connector_jira/components/{base.py => jira_base.py} (87%)
create mode 100644 connector_jira/components/jira_base_exporter.py
create mode 100644 connector_jira/components/jira_batch_importer.py
create mode 100644 connector_jira/components/jira_binder.py
create mode 100644 connector_jira/components/jira_delayed_batch_importer.py
create mode 100644 connector_jira/components/jira_deleter.py
create mode 100644 connector_jira/components/jira_direct_batch_importer.py
rename connector_jira/components/{exporter.py => jira_exporter.py} (71%)
create mode 100644 connector_jira/components/jira_import_mapper.py
rename connector_jira/components/{importer.py => jira_importer.py} (66%)
create mode 100644 connector_jira/components/jira_issue_type_adapter.py
rename connector_jira/{models/jira_issue_type/importer.py => components/jira_issue_type_batch_importer.py} (50%)
create mode 100644 connector_jira/components/jira_issue_type_mapper.py
create mode 100644 connector_jira/components/jira_mapper_from_attrs.py
rename connector_jira/components/{binder.py => jira_model_binder.py} (59%)
create mode 100644 connector_jira/components/jira_project_adapter.py
rename connector_jira/{models/project_project/binder.py => components/jira_project_binder.py} (84%)
create mode 100644 connector_jira/components/jira_project_project_exporter.py
create mode 100644 connector_jira/components/jira_project_project_listener.py
create mode 100644 connector_jira/components/jira_project_task_adapter.py
create mode 100644 connector_jira/components/jira_project_task_batch_importer.py
create mode 100644 connector_jira/components/jira_project_task_importer.py
create mode 100644 connector_jira/components/jira_project_task_mapper.py
create mode 100644 connector_jira/components/jira_res_users_adapter.py
rename connector_jira/{models/res_users/importer.py => components/jira_res_users_importer.py} (82%)
create mode 100644 connector_jira/components/jira_task_project_matcher.py
create mode 100644 connector_jira/components/jira_timestamp_batch_importer.py
rename connector_jira/components/{backend_adapter.py => jira_webservice_adapter.py} (83%)
create mode 100644 connector_jira/components/jira_worklog_adapter.py
create mode 100644 connector_jira/components/project_project_listener.py
create mode 100644 connector_jira/controllers/jira_connect_app_controller.py
create mode 100644 connector_jira/controllers/jira_webhook_controller.py
delete mode 100644 connector_jira/controllers/main.py
create mode 100644 connector_jira/data/queue_job_channel.xml
rename connector_jira/data/{queue_job_data.xml => queue_job_function.xml} (91%)
delete mode 100644 connector_jira/migrations/15.0.1.0.0/pre-migrate.py
delete mode 100644 connector_jira/migrations/15.0.2.0.0/pre-migrate.py
create mode 100644 connector_jira/models/account_analytic_line.py
delete mode 100644 connector_jira/models/account_analytic_line/__init__.py
delete mode 100644 connector_jira/models/account_analytic_line/common.py
delete mode 100644 connector_jira/models/account_analytic_line/importer.py
create mode 100644 connector_jira/models/jira_account_analytic_line.py
rename connector_jira/models/{jira_backend/common.py => jira_backend.py} (73%)
delete mode 100644 connector_jira/models/jira_backend/__init__.py
create mode 100644 connector_jira/models/jira_backend_timestamp.py
rename connector_jira/models/{jira_binding/common.py => jira_binding.py} (96%)
delete mode 100644 connector_jira/models/jira_binding/__init__.py
create mode 100644 connector_jira/models/jira_issue_type.py
delete mode 100644 connector_jira/models/jira_issue_type/__init__.py
delete mode 100644 connector_jira/models/jira_issue_type/common.py
create mode 100644 connector_jira/models/jira_project_base_mixin.py
create mode 100644 connector_jira/models/jira_project_project.py
create mode 100644 connector_jira/models/jira_project_task.py
create mode 100644 connector_jira/models/jira_res_users.py
create mode 100644 connector_jira/models/project_project.py
delete mode 100644 connector_jira/models/project_project/__init__.py
delete mode 100644 connector_jira/models/project_project/common.py
delete mode 100644 connector_jira/models/project_project/exporter.py
create mode 100644 connector_jira/models/project_task.py
delete mode 100644 connector_jira/models/project_task/__init__.py
delete mode 100644 connector_jira/models/project_task/common.py
delete mode 100644 connector_jira/models/project_task/importer.py
rename connector_jira/models/{queue_job/common.py => queue_job.py} (76%)
delete mode 100644 connector_jira/models/queue_job/__init__.py
rename connector_jira/models/{res_users/common.py => res_users.py} (58%)
delete mode 100644 connector_jira/models/res_users/__init__.py
create mode 100644 connector_jira/reports/__init__.py
create mode 100644 connector_jira/reports/timesheet_analysis_report.py
rename connector_jira/views/{timesheet_account_analytic_line.xml => account_analytic_line.xml} (86%)
create mode 100644 connector_jira/views/jira_backend.xml
delete mode 100644 connector_jira/views/jira_backend_views.xml
rename connector_jira/views/{jira_issue_type_views.xml => jira_issue_type.xml} (78%)
create mode 100644 connector_jira/views/jira_project_project.xml
create mode 100644 connector_jira/views/jira_project_task.xml
rename connector_jira/views/{res_users_views.xml => jira_res_users.xml} (51%)
create mode 100644 connector_jira/views/project_project.xml
delete mode 100644 connector_jira/views/project_project_views.xml
rename connector_jira/views/{project_task_views.xml => project_task.xml} (65%)
create mode 100644 connector_jira/views/res_users.xml
rename connector_jira/wizards/{jira_account_analytic_line_import_views.xml => jira_account_analytic_line_import.xml} (71%)
rename connector_jira/{models/project_project => wizards}/project_link_jira.py (68%)
rename connector_jira/{views/project_link_jira_views.xml => wizards/project_link_jira.xml} (76%)
rename connector_jira/{models/project_task => wizards}/task_link_jira.py (71%)
rename connector_jira/{views/task_link_jira_views.xml => wizards/task_link_jira.xml} (85%)
diff --git a/connector_jira/__init__.py b/connector_jira/__init__.py
index b5699491..bc1168d2 100644
--- a/connector_jira/__init__.py
+++ b/connector_jira/__init__.py
@@ -4,4 +4,5 @@
from . import components
from . import controllers
from . import models
+from . import reports
from . import wizards
diff --git a/connector_jira/__manifest__.py b/connector_jira/__manifest__.py
index d5f27910..75948119 100644
--- a/connector_jira/__manifest__.py
+++ b/connector_jira/__manifest__.py
@@ -2,18 +2,23 @@
{
"name": "JIRA Connector",
- "version": "15.0.2.0.0",
+ "version": "17.0.1.0.0",
"author": "Camptocamp,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Connector",
"depends": [
- "connector",
+ # Odoo community
"project",
"hr_timesheet",
- "queue_job",
"web",
- "web_widget_url_advanced",
+ # OCA/connector
+ "connector",
+ # OCA/queue
+ "queue_job",
+ # OCA/server-ux
"multi_step_wizard",
+ # OCA/web
+ "web_widget_url_advanced",
],
"external_dependencies": {
"python": [
@@ -24,26 +29,36 @@
"requests-toolbelt>=0.9.1",
"requests-jwt>=0.6.0",
"PyJWT>=1.7.1,<2.9.0",
- "cryptography<37",
+ "cryptography>=38,<39", # Compatibility w/ Odoo 17.0 requirements
"atlassian_jwt>=3.0.0",
],
},
"website": "https://github.com/OCA/connector-jira",
"data": [
- "views/jira_menus.xml",
- "views/project_link_jira_views.xml",
- "views/task_link_jira_views.xml",
- "views/jira_backend_views.xml",
- "views/jira_backend_report_templates.xml",
- "views/project_project_views.xml",
- "views/project_task_views.xml",
- "views/res_users_views.xml",
- "views/jira_issue_type_views.xml",
- "views/timesheet_account_analytic_line.xml",
- "wizards/jira_account_analytic_line_import_views.xml",
+ # SECURITY
"security/ir.model.access.csv",
+ # DATA
"data/cron.xml",
- "data/queue_job_data.xml",
+ "data/queue_job_channel.xml",
+ "data/queue_job_function.xml",
+ # VIEWS
+ # This file contains the root menu, import it first
+ "views/jira_menus.xml",
+ # Views, actions, menus
+ "views/account_analytic_line.xml",
+ "views/jira_backend.xml",
+ "views/jira_backend_report_templates.xml",
+ "views/jira_issue_type.xml",
+ "views/jira_project_project.xml",
+ "views/jira_project_task.xml",
+ "views/jira_res_users.xml",
+ "views/project_project.xml",
+ "views/project_task.xml",
+ "views/res_users.xml",
+ # Wizard views
+ "wizards/jira_account_analytic_line_import.xml",
+ "wizards/project_link_jira.xml",
+ "wizards/task_link_jira.xml",
],
"demo": ["demo/jira_backend_demo.xml"],
"installable": True,
diff --git a/connector_jira/components/__init__.py b/connector_jira/components/__init__.py
index f30a2ad2..c866b2b2 100644
--- a/connector_jira/components/__init__.py
+++ b/connector_jira/components/__init__.py
@@ -1,8 +1,53 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-from . import base
-from . import backend_adapter
-from . import binder
-from . import exporter
-from . import importer
-from . import mapper
+# ⚠️⚠️⚠️
+# 1) in order to ease readability and maintainability, components have been split into
+# multiple files, each containing exactly 1 component
+# 2) components' import is sorted so that no dependency issue should arise
+# 3) next to each import, a comment will describe the components' dependencies
+# 4) when adding new components, please make sure it inherits (directly or indirectly)
+# from ``jira.base``
+# ⚠️⚠️⚠️
+
+# Base abstract component
+from . import jira_base # base.connector
+
+# Inheriting abstract components
+from . import jira_base_exporter # base.exporter, jira.base
+from . import jira_batch_importer # base.importer, jira.base
+from . import jira_delayed_batch_importer # jira.batch.importer
+from . import jira_direct_batch_importer # jira.batch.importer
+from . import jira_import_mapper # base.import.mapper, jira.base
+from . import jira_timestamp_batch_importer # base.importer, jira.base
+
+# Generic components
+from . import jira_binder # base.binder, jira.base
+from . import jira_deleter # base.deleter, jira.base
+from . import jira_exporter # jira.base.exporter
+from . import jira_importer # base.importer, jira.base
+from . import jira_webservice_adapter # base.backend.adapter.crud, jira.base
+
+# Specific components
+from . import jira_analytic_line_batch_importer # jira.timestamp.batch.importer
+from . import jira_analytic_line_importer # jira.importer
+from . import jira_analytic_line_mapper # jira.import.mapper
+from . import jira_analytic_line_timestamp_batch_deleter # base.synchronizer, jira.base
+from . import jira_backend_adapter # jira.webservice.adapter
+from . import jira_issue_type_adapter # jira.webservice.adapter
+from . import jira_issue_type_batch_importer # jira.direct.batch.importer
+from . import jira_issue_type_mapper # jira.import.mapper
+from . import jira_mapper_from_attrs # jira.base
+from . import jira_model_binder # base.binder, jira.base
+from . import jira_project_adapter # jira.webservice.adapter
+from . import jira_project_binder # jira.binder
+from . import jira_project_project_listener # base.connector.listener, jira.base
+from . import jira_project_project_exporter # jira.exporter
+from . import jira_project_task_adapter # jira.webservice.adapter
+from . import jira_project_task_batch_importer # jira.timestamp.batch.importer
+from . import jira_project_task_importer # jira.importer
+from . import jira_project_task_mapper # jira.import.mapper
+from . import jira_res_users_adapter # jira.webservice.adapter
+from . import jira_res_users_importer # jira.importer
+from . import jira_task_project_matcher # jira.base
+from . import jira_worklog_adapter # jira.webservice.adapter
+from . import project_project_listener # base.connector.listener, jira.base
diff --git a/connector_jira/components/mapper.py b/connector_jira/components/common.py
similarity index 76%
rename from connector_jira/components/mapper.py
rename to connector_jira/components/common.py
index 31c84b1d..bb6c7cc9 100644
--- a/connector_jira/components/mapper.py
+++ b/connector_jira/components/common.py
@@ -1,4 +1,4 @@
-# Copyright 2016-2019 Camptocamp SA
+# Copyright 2016 Camptocamp SA
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from datetime import datetime
@@ -8,20 +8,14 @@
from odoo import fields
-from odoo.addons.component.core import AbstractComponent, Component
-from odoo.addons.connector.components.mapper import mapping
-
-
-class JiraImportMapper(AbstractComponent):
- """Base Import Mapper for Jira"""
-
- _name = "jira.import.mapper"
- _inherit = ["base.import.mapper", "jira.base"]
-
- @mapping
- def jira_updated_at(self, record):
- if self.options.external_updated_at:
- return {"jira_updated_at": self.options.external_updated_at}
+JIRA_JQL_DATETIME_FORMAT = "%Y-%m-%d %H:%M" # no seconds :-(
+RETRY_ON_ADVISORY_LOCK = 1 # seconds
+RETRY_WHEN_CONCURRENT_DETECTED = 1 # seconds
+# when we import using JQL, we always import tasks from
+# slightly before the last batch import, because Jira
+# does not send the results from the past minute and
+# maybe sometimes more
+IMPORT_DELTA = 300 # seconds
def iso8601_to_utc_datetime(isodate):
@@ -33,9 +27,8 @@ def iso8601_to_utc_datetime(isodate):
parsed = parser.parse(isodate)
if not parsed.tzinfo:
return parsed
- utc = pytz.timezone("UTC")
# set as UTC and then remove the tzinfo so the date becomes naive
- return parsed.astimezone(utc).replace(tzinfo=None)
+ return parsed.astimezone(pytz.UTC).replace(tzinfo=None)
def utc_datetime_to_iso8601(dt):
@@ -44,8 +37,7 @@ def utc_datetime_to_iso8601(dt):
Example: 2013-11-04 12:52:01 → 2013-11-04T12:52:01+0000
"""
- utc = pytz.timezone("UTC")
- utc_dt = utc.localize(dt, is_dst=False) # UTC = no DST
+ utc_dt = pytz.UTC.localize(dt, is_dst=False) # UTC = no DST
return utc_dt.isoformat()
@@ -85,8 +77,7 @@ def iso8601_to_naive_date(isodate):
Example with 2014-10-07T00:34:59+0200: we want 2014-10-07 and not
2014-10-06 that we would have using the timestamp converted to UTC.
"""
- naive_date = isodate[:10]
- return datetime.strptime(naive_date, "%Y-%m-%d").date()
+ return datetime.strptime(isodate[:10], "%Y-%m-%d").date()
def iso8601_naive_date(field):
@@ -168,17 +159,3 @@ def modifier(self, record, to_attr):
return value
return modifier
-
-
-class FromFields(Component):
- _name = "jira.mapper.from.attrs"
- _inherit = ["jira.base"]
- _usage = "map.from.attrs"
-
- def values(self, record, mapper_):
- values = {}
- from_fields_mappings = getattr(mapper_, "from_fields", [])
- fields_values = record.get("fields", {})
- for source, target in from_fields_mappings:
- values[target] = mapper_._map_direct(fields_values, source, target)
- return values
diff --git a/connector_jira/components/jira_analytic_line_batch_importer.py b/connector_jira/components/jira_analytic_line_batch_importer.py
new file mode 100644
index 00000000..285d61ce
--- /dev/null
+++ b/connector_jira/components/jira_analytic_line_batch_importer.py
@@ -0,0 +1,76 @@
+# Copyright 2016 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+from ..fields import MilliDatetime
+
+
+class JiraAnalyticLineBatchImporter(Component):
+ """Import the Jira worklogs
+
+ For every ID in the list, a delayed job is created.
+ Import is executed starting from a given date.
+ """
+
+ _name = "jira.analytic.line.batch.importer"
+ _inherit = "jira.timestamp.batch.importer"
+ _apply_on = ["jira.account.analytic.line"]
+
+ def _search(self, timestamp):
+ unix_timestamp = MilliDatetime.to_timestamp(timestamp.last_timestamp)
+ result = self.backend_adapter.updated_since(since=unix_timestamp)
+ worklog_ids = self._filter_update(result.updated_worklogs)
+ # We need issue_id + worklog_id for the worklog importer (the jira
+ # "read" method for worklogs asks both), get it from yield_read.
+ # TODO we might consider to optimize the import process here:
+ # yield_read reads worklogs data, then the individual
+ # import will do a request again (and 2 with the tempo module)
+ next_timestamp = MilliDatetime.from_timestamp(result.until)
+ return (next_timestamp, self.backend_adapter.yield_read(worklog_ids))
+
+ def _handle_records(self, records, force=False):
+ for worklog in records:
+ self._import_record(worklog["issueId"], worklog["id"], force=force)
+ return len(records)
+
+ def _filter_update(self, updated_worklogs):
+ """Filter only the worklogs needing an update
+
+ The result from Jira contains the worklog id and
+ the last update on Jira. So we keep only the worklog
+ ids with a sync_date before the Jira last update.
+ """
+ if not updated_worklogs:
+ return []
+ self.env.cr.execute(
+ """
+ SELECT external_id, jira_updated_at
+ FROM jira_account_analytic_line
+ WHERE external_id IN %s
+ """,
+ (tuple(str(r.worklog_id) for r in updated_worklogs),),
+ )
+ bindings = dict(self.env.cr.fetchall())
+ td, ft = MilliDatetime.to_datetime, MilliDatetime.from_timestamp
+ worklog_ids = []
+ for worklog in updated_worklogs:
+ worklog_id = worklog.worklog_id
+ # we store the latest "updated_at" value on the binding
+ # so we can check if we already know the latest value,
+ # for instance because we imported the record from a
+ # webhook before, we can skip the import
+ binding_updated_at = bindings.get(str(worklog_id))
+ if not binding_updated_at or td(binding_updated_at) < ft(worklog.updated):
+ worklog_ids.append(worklog_id)
+ return worklog_ids
+
+ def _import_record(self, issue_id, worklog_id, force=False, **kwargs):
+ """Delay the import of the records"""
+ self.model.with_delay(**kwargs).import_record(
+ self.backend_record,
+ issue_id,
+ worklog_id,
+ force=force,
+ )
diff --git a/connector_jira/components/jira_analytic_line_importer.py b/connector_jira/components/jira_analytic_line_importer.py
new file mode 100644
index 00000000..bb3d16ba
--- /dev/null
+++ b/connector_jira/components/jira_analytic_line_importer.py
@@ -0,0 +1,175 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+import logging
+
+from odoo import _
+
+from odoo.addons.component.core import Component
+
+from .common import iso8601_to_utc_datetime
+
+_logger = logging.getLogger(__name__)
+
+
+class JiraAnalyticLineImporter(Component):
+ _name = "jira.analytic.line.importer"
+ _inherit = "jira.importer"
+ _apply_on = ["jira.account.analytic.line"]
+
+ def __init__(self, work_context):
+ super().__init__(work_context)
+ self.external_issue_id = None
+ self.task_binding = None
+ self.project_binding = None
+ self.fallback_project = None
+
+ def _get_external_updated_at(self):
+ assert self.external_record
+ external_updated_at = self.external_record.get("updated")
+ if not external_updated_at:
+ return None
+ return iso8601_to_utc_datetime(external_updated_at)
+
+ @property
+ def _issue_fields_to_read(self):
+ epic_field_name = self.backend_record.epic_link_field_name
+ return ["issuetype", "project", "parent", epic_field_name]
+
+ def _recurse_import_task(self):
+ """Import and return the task of proper type for the worklog
+
+ As we decide which type of issues are imported for a project,
+ a worklog could be linked to an issue that we don't import.
+ In that case, we climb the parents of the issue until we find
+ a issue of a type we synchronize.
+
+ It ensures that the 'to-be-linked' issue is imported and return it.
+
+ """
+ issue_adapter = self.component(
+ usage="backend.adapter", model_name="jira.project.task"
+ )
+ issue_binder = self.binder_for("jira.project.task")
+ issue_type_binder = self.binder_for("jira.issue.type")
+ jira_issue_id = self.external_record["issueId"]
+ epic_field_name = self.backend_record.epic_link_field_name
+ project_matcher = self.component(usage="jira.task.project.matcher")
+ current_project_id = self.external_issue["fields"]["project"]["id"]
+ while jira_issue_id:
+ issue = issue_adapter.read(jira_issue_id, fields=self._issue_fields_to_read)
+ jira_project_id = issue["fields"]["project"]["id"]
+ jira_issue_type_id = issue["fields"]["issuetype"]["id"]
+ project_binding = project_matcher.find_project_binding(issue)
+ issue_type_binding = issue_type_binder.to_internal(jira_issue_type_id)
+ # JIRA allows to set an EPIC of a different project.
+ # If it happens, we discard it.
+ if (
+ jira_project_id == current_project_id
+ and issue_type_binding.is_sync_for_project(project_binding)
+ ):
+ break
+ if issue["fields"].get("parent"):
+ # 'parent' is used on sub-tasks relating to their parent task
+ jira_issue_id = issue["fields"]["parent"]["id"]
+ elif issue["fields"].get(epic_field_name):
+ # the epic link is set on a jira custom field
+ epic_key = issue["fields"][epic_field_name]
+ epic = issue_adapter.read(epic_key, fields="id")
+ # we got the key of the epic issue, so we translate
+ # it to the ID with a call to the API
+ jira_issue_id = epic["id"]
+ else:
+ # no parent issue of a type we are synchronizing has been
+ # found, the worklog will be assigned to no task
+ jira_issue_id = None
+
+ if jira_issue_id:
+ self._import_dependency(jira_issue_id, "jira.project.task")
+ return issue_binder.to_internal(jira_issue_id)
+
+ def _create_data(self, map_record, **kwargs):
+ return super()._create_data(
+ map_record,
+ **dict(
+ kwargs or [],
+ task_binding=self.task_binding,
+ project_binding=self.project_binding,
+ fallback_project=self.fallback_project,
+ linked_issue=self.external_issue,
+ ),
+ )
+
+ def _update_data(self, map_record, **kwargs):
+ return super()._update_data(
+ map_record,
+ **dict(
+ kwargs or [],
+ task_binding=self.task_binding,
+ project_binding=self.project_binding,
+ fallback_project=self.fallback_project,
+ linked_issue=self.external_issue,
+ ),
+ )
+
+ def run(self, external_id, force=False, record=None, **kwargs):
+ assert "issue_id" in kwargs
+ self.external_issue_id = kwargs.pop("issue_id")
+ return super().run(external_id, force=force, record=record, **kwargs)
+
+ def _handle_record_missing_on_jira(self):
+ """Hook called when we are importing a record missing on Jira
+
+ For worklogs, we drop the analytic line if we discover it doesn't exist
+ on Jira, as the latter is the master.
+ """
+ binding = self._get_binding()
+ if binding:
+ record = binding.odoo_id
+ binding.unlink()
+ record.unlink()
+ return _("Record does no longer exist in Jira")
+
+ def _get_external_data(self):
+ """Return the raw Jira data for ``self.external_id``"""
+ issue_adapter = self.component(
+ usage="backend.adapter", model_name="jira.project.task"
+ )
+ self.external_issue = issue_adapter.read(self.external_issue_id)
+ return self.backend_adapter.read(self.external_issue_id, self.external_id)
+
+ def _before_import(self):
+ task_binding = self._recurse_import_task()
+ if task_binding and task_binding.active:
+ self.task_binding = task_binding
+ if not self.task_binding:
+ # when no task exists in Odoo (because we don't synchronize
+ # the issue type for instance), we link the line directly
+ # to the corresponding project, not linked to any task
+ issue = self.external_issue
+ assert issue
+ matcher = self.component(usage="jira.task.project.matcher")
+ project_binding = matcher.find_project_binding(issue)
+ if project_binding and project_binding.active:
+ self.project_binding = project_binding
+ else:
+ self.fallback_project = matcher.fallback_project_for_worklogs()
+
+ def _import(self, binding, **kwargs):
+ if not (self.task_binding or self.project_binding or self.fallback_project):
+ _logger.debug(
+ "No task or project synchronized for attaching worklog %s",
+ self.external_record["id"],
+ )
+ return
+ return super()._import(binding, **kwargs)
+
+ def _import_dependency_assignee(self):
+ jira_assignee = self.external_record["author"]
+ jira_key = jira_assignee.get("accountId")
+ self._import_dependency(jira_key, "jira.res.users", record=jira_assignee)
+
+ def _import_dependencies(self):
+ """Import the dependencies for the record"""
+ self._import_dependency_assignee()
diff --git a/connector_jira/components/jira_analytic_line_mapper.py b/connector_jira/components/jira_analytic_line_mapper.py
new file mode 100644
index 00000000..cf912507
--- /dev/null
+++ b/connector_jira/components/jira_analytic_line_mapper.py
@@ -0,0 +1,109 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from pytz import timezone, utc
+
+from odoo import _
+
+from odoo.addons.component.core import Component
+from odoo.addons.connector.components.mapper import mapping
+from odoo.addons.connector.exception import MappingError
+
+from .common import iso8601_to_naive_date, iso8601_to_utc_datetime, whenempty
+
+
+class JiraAnalyticLineMapper(Component):
+ _name = "jira.analytic.line.mapper"
+ _inherit = "jira.import.mapper"
+ _apply_on = ["jira.account.analytic.line"]
+
+ direct = [(whenempty("comment", _("missing description")), "name")]
+
+ @mapping
+ def issue(self, record):
+ issue = self.options.linked_issue
+ assert issue
+ refs = {"jira_issue_id": record["issueId"], "jira_issue_key": issue["key"]}
+ task_mapper = self.component(
+ usage="import.mapper",
+ model_name="jira.project.task",
+ )
+ issue_type_dict = task_mapper.issue_type(issue)
+ refs.update(issue_type_dict)
+ epic_field_name = self.backend_record.epic_link_field_name
+ if epic_field_name and epic_field_name in issue["fields"]:
+ refs["jira_epic_issue_key"] = issue["fields"][epic_field_name]
+ if self.backend_record.epic_link_on_epic:
+ issue_type_id = issue_type_dict.get("jira_issue_type_id")
+ issue_type = self.env["jira.issue.type"].browse(issue_type_id)
+ if issue_type.exists() and issue_type.name == "Epic":
+ refs["jira_epic_issue_key"] = issue.get("key")
+ return refs
+
+ @mapping
+ def date(self, record):
+ mode = self.backend_record.worklog_date_timezone_mode
+ started = record["started"]
+ if not mode or mode == "naive":
+ return {"date": iso8601_to_naive_date(started)}
+ started = iso8601_to_utc_datetime(started).replace(tzinfo=utc)
+ if mode == "user":
+ tz = timezone(record["author"]["timeZone"])
+ elif mode == "specific":
+ tz = timezone(self.backend_record.worklog_date_timezone)
+ else:
+ raise NotImplementedError("Cannot parse date with mode '%s'", mode)
+ return {"date": started.astimezone(tz).date()}
+
+ @mapping
+ def duration(self, record):
+ # amount is in float in odoo... 9000.00s = 2h30m00s = 2.5h
+ return {"unit_amount": float(record["timeSpentSeconds"]) / 3600}
+
+ @mapping
+ def author(self, record):
+ author = record["author"]
+ key = author["accountId"]
+ user = self.binder_for("jira.res.users").to_internal(key, unwrap=True)
+ if not user:
+ raise MappingError(
+ _(
+ "No user found with login '%(key)s' or email '%(mail)s'."
+ " You must create a user or link it manually if the"
+ " login/email differs.",
+ key=key,
+ mail=author.get("emailAddress", ""),
+ )
+ )
+ # NB: in v15.0, the employee was retrieved via a ``search()`` on ``hr.employee``
+ # with no constraints on the company; we change this to accessing field
+ # ``employee_id`` which is a computed field whose value depend on the
+ # environment's company to fetch the correct employee and avoids multi-company
+ # consistency issues.
+ # (We keep the ``active_test=False`` anyway)
+ employee = user.with_context(active_test=False).employee_id
+ return {"user_id": user.id, "employee_id": employee.id}
+
+ @mapping
+ def project_and_task(self, record):
+ if self.options.task_binding:
+ task_binding = self.options.task_binding
+ return {
+ "task_id": task_binding.odoo_id.id,
+ "project_id": task_binding.project_id.id,
+ "jira_project_bind_id": task_binding.jira_project_bind_id.id,
+ }
+ elif self.options.project_binding:
+ project_binding = self.options.project_binding
+ return {
+ "project_id": project_binding.odoo_id.id,
+ "jira_project_bind_id": project_binding.id,
+ }
+ elif self.options.fallback_project:
+ return {"project_id": self.options.fallback_project.id}
+ raise ValueError("No task binding, project binding or fallback project found.")
+
+ @mapping
+ def backend_id(self, record):
+ return {"backend_id": self.backend_record.id}
diff --git a/connector_jira/models/account_analytic_line/deleter.py b/connector_jira/components/jira_analytic_line_timestamp_batch_deleter.py
similarity index 87%
rename from connector_jira/models/account_analytic_line/deleter.py
rename to connector_jira/components/jira_analytic_line_timestamp_batch_deleter.py
index 56ccf81b..f97ff7f1 100644
--- a/connector_jira/models/account_analytic_line/deleter.py
+++ b/connector_jira/components/jira_analytic_line_timestamp_batch_deleter.py
@@ -8,12 +8,12 @@
from odoo.addons.component.core import Component
from odoo.addons.queue_job.exception import RetryableJobError
-from ...fields import MilliDatetime
+from ..fields import MilliDatetime
_logger = logging.getLogger(__name__)
-class AnalyticLineBatchDeleter(Component):
+class JiraAnalyticLineTimestampBatchDeleter(Component):
"""Batch Deleter working with a jira.backend.timestamp.record
It locks the timestamp to ensure no other job is working on it,
@@ -34,14 +34,11 @@ def run(self, timestamp, **kwargs):
self._handle_lock_failed(timestamp)
next_timestamp_value, records = self._search(timestamp)
-
timestamp._update_timestamp(next_timestamp_value)
-
- self._handle_records(records)
-
+ number = self._handle_records(records)
return _(
f"Batch from {original_timestamp_value} UTC to {next_timestamp_value} UTC "
- "generated {number} delete jobs"
+ f"generated {number} delete jobs"
)
def _handle_records(self, records):
@@ -60,12 +57,11 @@ def _handle_lock_failed(self, timestamp):
def _search(self, timestamp):
unix_timestamp = MilliDatetime.to_timestamp(timestamp.last_timestamp)
result = self.backend_adapter.deleted_since(since=unix_timestamp)
- worklog_ids = result.deleted_worklog_ids
- next_timestamp = MilliDatetime.from_timestamp(result.until)
- return (next_timestamp, worklog_ids)
+ return MilliDatetime.from_timestamp(result.until), result.deleted_worklog_ids
def _delete_record(self, record_id, **kwargs):
"""Delay the delete of the records"""
+ kwargs.pop("description", None)
self.model.with_delay(
description=_("Delete a local worklog which has " "been deleted on JIRA"),
**kwargs,
diff --git a/connector_jira/components/jira_backend_adapter.py b/connector_jira/components/jira_backend_adapter.py
new file mode 100644
index 00000000..d897ffb6
--- /dev/null
+++ b/connector_jira/components/jira_backend_adapter.py
@@ -0,0 +1,17 @@
+# Copyright: 2015 LasLabs, Inc.
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraBackendAdapter(Component):
+ _name = "jira.backend.adapter"
+ _inherit = "jira.webservice.adapter"
+ _apply_on = ["jira.backend"]
+
+ webhook_base_path = "{server}/rest/webhooks/1.0/{path}"
+
+ def list_fields(self):
+ return self.client._get_json("field")
diff --git a/connector_jira/components/base.py b/connector_jira/components/jira_base.py
similarity index 87%
rename from connector_jira/components/base.py
rename to connector_jira/components/jira_base.py
index dab31f13..64f53c90 100644
--- a/connector_jira/components/base.py
+++ b/connector_jira/components/jira_base.py
@@ -4,7 +4,7 @@
from odoo.addons.component.core import AbstractComponent
-class BaseJiraConnectorComponent(AbstractComponent):
+class JiraBase(AbstractComponent):
"""Base Jira Connector Component
All components of this connector should inherit from it.
diff --git a/connector_jira/components/jira_base_exporter.py b/connector_jira/components/jira_base_exporter.py
new file mode 100644
index 00000000..d46f0bae
--- /dev/null
+++ b/connector_jira/components/jira_base_exporter.py
@@ -0,0 +1,103 @@
+# Copyright 2016-2022 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Exporters for Jira.
+
+In addition to its export job, an exporter has to:
+
+* check in Jira if the record has been updated more recently than the
+ last sync date and if yes, delay an import
+* call the ``bind`` method of the binder to update the last sync date
+
+"""
+
+from odoo import _, fields, tools
+
+from odoo.addons.component.core import AbstractComponent
+
+from .common import iso8601_to_utc_datetime
+
+
+class JiraBaseExporter(AbstractComponent):
+ """Base exporter for Jira"""
+
+ _name = "jira.base.exporter"
+ _inherit = ["base.exporter", "jira.base"]
+ _usage = "record.exporter"
+
+ def __init__(self, work_context):
+ super().__init__(work_context)
+ self.binding = None
+ self.external_id = None
+
+ def _delay_import(self):
+ """Schedule an import of the record.
+
+ Adapt in the sub-classes when the model is not imported
+ using ``import_record``.
+ """
+ assert self.external_id
+ # force is True because the sync_date will be more recent
+ # so the import would be skipped if it was not forced
+ self.binding.import_record(self.backend_record, self.external_id, force=True)
+
+ def _should_import(self):
+ """Before the export, compare the update date
+ in Jira and the last sync date in Odoo,
+ if the former is more recent, schedule an import
+ to not miss changes done in Jira.
+ """
+ if not self.external_id:
+ return False
+ assert self.binding
+ sync = self.binder.sync_date(self.binding)
+ if not sync:
+ return True
+ vals = self.backend_adapter.read(self.external_id, fields=["updated"])
+ jira_updated = vals["fields"]["updated"]
+ return fields.Datetime.to_datetime(sync) < iso8601_to_utc_datetime(jira_updated)
+
+ def _lock(self):
+ """Lock the binding record.
+
+ Lock the binding record so we are sure that only one export
+ job is running for this record if concurrent jobs have to export the
+ same record.
+
+ When concurrent jobs try to export the same record, the first one
+ will lock and proceed, the others will fail to lock and will be
+ retried later.
+
+ This behavior works also when the export becomes multilevel
+ with :meth:`_export_dependencies`. Each level will set its own lock
+ on the binding record it has to export.
+ """
+ self.component("record.locker").lock(self.binding)
+
+ def run(self, binding, *args, **kwargs):
+ """Run the synchronization
+
+ :param binding: binding record to export
+ """
+ self.binding = binding
+ if not self.binding.exists():
+ return _("Record to export does no longer exist.")
+
+ # prevent other jobs to export the same record
+ # will be released on commit (or rollback)
+ self._lock()
+
+ self.external_id = self.binder.to_external(self.binding)
+ result = self._run(*args, **kwargs)
+ self.binder.bind(self.external_id, self.binding)
+ # commit so we keep the external ID if several exports
+ # are called and one of them fails
+ if not tools.config["test_enable"]:
+ self.env.cr.commit() # pylint: disable=invalid-commit
+ return result
+
+ def _run(self, *args, **kwargs):
+ """Flow of the synchronization, implemented in inherited classes"""
+ raise NotImplementedError
diff --git a/connector_jira/components/jira_batch_importer.py b/connector_jira/components/jira_batch_importer.py
new file mode 100644
index 00000000..5e335042
--- /dev/null
+++ b/connector_jira/components/jira_batch_importer.py
@@ -0,0 +1,43 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Importers for Jira.
+
+An import can be skipped if the last sync date is more recent than
+the last update in Jira.
+
+They should call the ``bind`` method if the binder even if the records
+are already bound, to update the last sync date.
+
+"""
+
+from odoo.addons.component.core import AbstractComponent
+
+
+class JiraBatchImporter(AbstractComponent):
+ """The role of a BatchImporter is to search for a list of
+ items to import, then it can either import them directly or delay
+ the import of each item separately.
+ """
+
+ _name = "jira.batch.importer"
+ _inherit = ["base.importer", "jira.base"]
+ _usage = "batch.importer"
+
+ def run(self):
+ """Run the synchronization, search all JIRA records"""
+ for record_id in self._search():
+ self._import_record(record_id)
+
+ def _search(self):
+ return self.backend_adapter.search()
+
+ def _import_record(self, record_id, **kwargs):
+ """Import a record directly or delay the import of the record.
+
+ Method to implement in sub-classes.
+ """
+ raise NotImplementedError
diff --git a/connector_jira/components/jira_binder.py b/connector_jira/components/jira_binder.py
new file mode 100644
index 00000000..5b790c2a
--- /dev/null
+++ b/connector_jira/components/jira_binder.py
@@ -0,0 +1,26 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import fields
+
+from odoo.addons.component.core import Component
+
+
+class JiraBinder(Component):
+ """Binder for Odoo models
+
+ Where we create an additional model holding the external id.
+ The advantages to have a second models are:
+ * we can link more than 1 JIRA instance to the same record
+ * we can work with, lock, edit the jira binding without touching the
+ normal record
+
+ Default binder when no specific binder is defined for a model.
+ """
+
+ _name = "jira.binder"
+ _inherit = ["base.binder", "jira.base"]
+
+ def sync_date(self, binding):
+ assert self._sync_date_field
+ return fields.Datetime.to_datetime(binding[self._sync_date_field])
diff --git a/connector_jira/components/jira_delayed_batch_importer.py b/connector_jira/components/jira_delayed_batch_importer.py
new file mode 100644
index 00000000..e38d97de
--- /dev/null
+++ b/connector_jira/components/jira_delayed_batch_importer.py
@@ -0,0 +1,30 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Importers for Jira.
+
+An import can be skipped if the last sync date is more recent than
+the last update in Jira.
+
+They should call the ``bind`` method if the binder even if the records
+are already bound, to update the last sync date.
+
+"""
+
+from odoo.addons.component.core import AbstractComponent
+
+
+class JiraDelayedBatchImporter(AbstractComponent):
+ """Delay import of the records"""
+
+ _name = "jira.delayed.batch.importer"
+ _inherit = ["jira.batch.importer"]
+
+ def _import_record(self, record_id, force=False, record=None, **kwargs):
+ """Delay the import of the records"""
+ self.model.with_delay(**kwargs).import_record(
+ self.backend_record, record_id, force=force, record=record
+ )
diff --git a/connector_jira/components/jira_deleter.py b/connector_jira/components/jira_deleter.py
new file mode 100644
index 00000000..c4a121e2
--- /dev/null
+++ b/connector_jira/components/jira_deleter.py
@@ -0,0 +1,28 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _
+
+from odoo.addons.component.core import Component
+
+
+class JiraDeleter(Component):
+ _name = "jira.deleter"
+ _inherit = ["base.deleter", "jira.base"]
+ _usage = "record.deleter"
+
+ def run(self, external_id, only_binding=False, set_inactive=False):
+ binding = self.binder.to_internal(external_id)
+ if not binding.exists():
+ return _("Binding not found")
+ if set_inactive:
+ binding.active = False
+ else:
+ record = binding.odoo_id
+ # emptying the external_id allows to unlink the binding
+ binding.external_id = False
+ binding.unlink()
+ if not only_binding:
+ record.unlink()
+ return _("Record deleted")
diff --git a/connector_jira/components/jira_direct_batch_importer.py b/connector_jira/components/jira_direct_batch_importer.py
new file mode 100644
index 00000000..8783f6f2
--- /dev/null
+++ b/connector_jira/components/jira_direct_batch_importer.py
@@ -0,0 +1,30 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Importers for Jira.
+
+An import can be skipped if the last sync date is more recent than
+the last update in Jira.
+
+They should call the ``bind`` method if the binder even if the records
+are already bound, to update the last sync date.
+
+"""
+
+from odoo.addons.component.core import AbstractComponent
+
+
+class JiraDirectBatchImporter(AbstractComponent):
+ """Import the records directly, without delaying the jobs."""
+
+ _name = "jira.direct.batch.importer"
+ _inherit = ["jira.batch.importer"]
+
+ def _import_record(self, record_id, force=False, record=None):
+ """Import the record directly"""
+ self.model.import_record(
+ self.backend_record, record_id, force=force, record=record
+ )
diff --git a/connector_jira/components/exporter.py b/connector_jira/components/jira_exporter.py
similarity index 71%
rename from connector_jira/components/exporter.py
rename to connector_jira/components/jira_exporter.py
index 218b0b6d..978a2cdb 100644
--- a/connector_jira/components/exporter.py
+++ b/connector_jira/components/jira_exporter.py
@@ -13,108 +13,15 @@
"""
-import logging
from contextlib import contextmanager
import psycopg2
-from odoo import _, fields, tools
+from odoo import _, tools
-from odoo.addons.component.core import AbstractComponent, Component
+from odoo.addons.component.core import Component
from odoo.addons.queue_job.exception import RetryableJobError
-from .mapper import iso8601_to_utc_datetime
-
-_logger = logging.getLogger(__name__)
-
-
-class JiraBaseExporter(AbstractComponent):
- """Base exporter for Jira"""
-
- _name = "jira.base.exporter"
- _inherit = ["base.exporter", "jira.base"]
- _usage = "record.exporter"
-
- def __init__(self, work_context):
- super().__init__(work_context)
- self.binding = None
- self.external_id = None
-
- def _delay_import(self):
- """Schedule an import of the record.
-
- Adapt in the sub-classes when the model is not imported
- using ``import_record``.
- """
- # force is True because the sync_date will be more recent
- # so the import would be skipped if it was not forced
- assert self.external_id
- self.binding.import_record(self.backend_record, self.external_id, force=True)
-
- def _should_import(self):
- """Before the export, compare the update date
- in Jira and the last sync date in Odoo,
- if the former is more recent, schedule an import
- to not miss changes done in Jira.
- """
- assert self.binding
- if not self.external_id:
- return False
- sync = self.binder.sync_date(self.binding)
- if not sync:
- return True
- jira_updated = self.backend_adapter.read(self.external_id, fields=["updated"])[
- "fields"
- ]["updated"]
-
- sync_date = fields.Datetime.from_string(sync)
- jira_date = iso8601_to_utc_datetime(jira_updated)
- return sync_date < jira_date
-
- def _lock(self):
- """Lock the binding record.
-
- Lock the binding record so we are sure that only one export
- job is running for this record if concurrent jobs have to export the
- same record.
-
- When concurrent jobs try to export the same record, the first one
- will lock and proceed, the others will fail to lock and will be
- retried later.
-
- This behavior works also when the export becomes multilevel
- with :meth:`_export_dependencies`. Each level will set its own lock
- on the binding record it has to export.
- """
- self.component("record.locker").lock(self.binding)
-
- def run(self, binding, *args, **kwargs):
- """Run the synchronization
-
- :param binding: binding record to export
- """
- self.binding = binding
-
- if not self.binding.exists():
- return _("Record to export does no longer exist.")
-
- # prevent other jobs to export the same record
- # will be released on commit (or rollback)
- self._lock()
-
- self.external_id = self.binder.to_external(self.binding)
- result = self._run(*args, **kwargs)
- self.binder.bind(self.external_id, self.binding)
- # commit so we keep the external ID if several exports
- # are called and one of them fails
- if not tools.config["test_enable"]:
- self.env.cr.commit() # pylint: disable=invalid-commit
- return result
-
- def _run(self, *args, **kwargs):
- """Flow of the synchronization, implemented in inherited classes"""
- raise NotImplementedError
-
class JiraExporter(Component):
"""Common exporter flow for Jira
@@ -161,8 +68,7 @@ def _retry_unique_violation(self):
raise
def _export_dependency(self, relation, binding_model, component=None):
- """
- Export a dependency.
+ """Export a dependency.
.. warning:: a commit is done at the end of the export of each
dependency. The reason for that is that we pushed a record
@@ -300,12 +206,12 @@ def _run(self, fields=None):
"""
assert self.binding
- if not self.external_id:
- fields = None # should be created with all the fields
-
if self._has_to_skip():
return
+ if not self.external_id:
+ fields = None # should be created with all the fields
+
# export the missing linked resources
self._export_dependencies()
diff --git a/connector_jira/components/jira_import_mapper.py b/connector_jira/components/jira_import_mapper.py
new file mode 100644
index 00000000..8a45374e
--- /dev/null
+++ b/connector_jira/components/jira_import_mapper.py
@@ -0,0 +1,17 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import AbstractComponent
+from odoo.addons.connector.components.mapper import mapping
+
+
+class JiraImportMapper(AbstractComponent):
+ """Base Import Mapper for Jira"""
+
+ _name = "jira.import.mapper"
+ _inherit = ["base.import.mapper", "jira.base"]
+
+ @mapping
+ def jira_updated_at(self, record):
+ if self.options.external_updated_at:
+ return {"jira_updated_at": self.options.external_updated_at}
diff --git a/connector_jira/components/importer.py b/connector_jira/components/jira_importer.py
similarity index 66%
rename from connector_jira/components/importer.py
rename to connector_jira/components/jira_importer.py
index 4714fe08..05703fb2 100644
--- a/connector_jira/components/importer.py
+++ b/connector_jira/components/jira_importer.py
@@ -16,30 +16,24 @@
import logging
from contextlib import closing, contextmanager
-from datetime import datetime, timedelta
from psycopg2 import IntegrityError, errorcodes
import odoo
from odoo import _, tools
-from odoo.addons.component.core import AbstractComponent, Component
+from odoo.addons.component.core import Component
from odoo.addons.connector.exception import IDMissingInBackend
from odoo.addons.queue_job.exception import RetryableJobError
-from .backend_adapter import JIRA_JQL_DATETIME_FORMAT
-from .mapper import iso8601_to_utc_datetime
+from .common import (
+ RETRY_ON_ADVISORY_LOCK,
+ RETRY_WHEN_CONCURRENT_DETECTED,
+ iso8601_to_utc_datetime,
+)
_logger = logging.getLogger(__name__)
-RETRY_ON_ADVISORY_LOCK = 1 # seconds
-RETRY_WHEN_CONCURRENT_DETECTED = 1 # seconds
-# when we import using JQL, we always import tasks from
-# slightly before the last batch import, because Jira
-# does not send the results from the past minute and
-# maybe sometimes more
-IMPORT_DELTA = 300 # seconds
-
class JiraImporter(Component):
"""Base importer for Jira
@@ -68,8 +62,7 @@ def must_skip(self, force=False):
assert self.external_record
def _before_import(self):
- """Hook called before the import, when we have the Jira
- data"""
+ """Hook called before the import, when we have the Jira data"""
def _get_external_updated_at(self):
assert self.external_record
@@ -80,20 +73,16 @@ def _get_external_updated_at(self):
return iso8601_to_utc_datetime(external_updated_at)
def _is_uptodate(self, binding):
- """Return True if the import should be skipped because
- it is already up-to-date in Odoo"""
+ """Return True if the binding is already up-to-date in Odoo"""
external_date = self._get_external_updated_at()
- if not external_date:
- return False # no update date on Jira, always import it.
- if not binding:
- return # it does not exist so it should not be skipped
# We store the jira "updated_at" field in the binding,
# so for further imports, we can check accurately if the
# record is already up-to-date (this field has a millisecond
# precision).
- if binding.jira_updated_at:
- return external_date < binding.jira_updated_at
- return False
+ internal_date = bool(binding) and binding.jira_updated_at
+ # No update date on Jira, no binding or no last update on the binding => the
+ # record does not exist or is not up-to-date, so it should be imported
+ return external_date and internal_date and external_date < internal_date
def _import_dependency(
self, external_id, binding_model, component=None, record=None, always=False
@@ -119,15 +108,14 @@ def _import_dependency(
it is still skipped if it has not been modified on Jira
:type always: boolean
"""
- if not external_id:
- return
- binder = self.binder_for(binding_model)
- if always or not binder.to_internal(external_id):
- if component is None:
- component = self.component(
- usage="record.importer", model_name=binding_model
- )
- component.run(external_id, record=record, force=True)
+ if external_id:
+ binder = self.binder_for(binding_model)
+ if always or not binder.to_internal(external_id):
+ if component is None:
+ component = self.component(
+ usage="record.importer", model_name=binding_model
+ )
+ component.run(external_id, record=record, force=True)
def _import_dependencies(self):
"""Import the dependencies for the record"""
@@ -154,15 +142,9 @@ def _filter_data(self, binding, data):
"""Filter values that aren't actually changing"""
binding.ensure_one()
fields = list(data.keys())
- new_values = binding._convert_to_write(
- data,
- )
- old_values = binding._convert_to_write(
- binding.read(
- fields,
- load="_classic_write",
- )[0],
- )
+ new_values = binding._convert_to_write(data)
+ old_binding_values = binding.read(fields, load="_classic_write")[0]
+ old_values = binding._convert_to_write(old_binding_values)
new_data = {}
for field in fields:
if new_values[field] == old_values[field]:
@@ -407,148 +389,3 @@ def _import(self, binding, **kwargs):
self.binder.bind(self.external_id, binding)
self._after_import(binding)
-
-
-class BatchImporter(AbstractComponent):
- """The role of a BatchImporter is to search for a list of
- items to import, then it can either import them directly or delay
- the import of each item separately.
- """
-
- _name = "jira.batch.importer"
- _inherit = ["base.importer", "jira.base"]
- _usage = "batch.importer"
-
- def run(self):
- """Run the synchronization, search all JIRA records"""
- record_ids = self._search()
- for record_id in record_ids:
- self._import_record(record_id)
-
- def _search(self):
- return self.backend_adapter.search()
-
- def _import_record(self, record_id, **kwargs):
- """Import a record directly or delay the import of the record.
-
- Method to implement in sub-classes.
- """
- raise NotImplementedError
-
-
-class DirectBatchImporter(AbstractComponent):
- """Import the records directly, without delaying the jobs."""
-
- _name = "jira.direct.batch.importer"
- _inherit = ["jira.batch.importer"]
-
- def _import_record(self, record_id, force=False, record=None):
- """Import the record directly"""
- self.model.import_record(
- self.backend_record, record_id, force=force, record=record
- )
-
-
-class DelayedBatchImporter(AbstractComponent):
- """Delay import of the records"""
-
- _name = "jira.delayed.batch.importer"
- _inherit = ["jira.batch.importer"]
-
- def _import_record(self, record_id, force=False, record=None, **kwargs):
- """Delay the import of the records"""
- self.model.with_delay(**kwargs).import_record(
- self.backend_record, record_id, force=force, record=record
- )
-
-
-class TimestampBatchImporter(AbstractComponent):
- """Batch Importer working with a jira.backend.timestamp.record
-
- It locks the timestamp to ensure no other job is working on it,
- and uses the latest timestamp value as reference for the search.
-
- The role of a BatchImporter is to search for a list of
- items to import, then it can either import them directly or delay
- the import of each item separately.
- """
-
- _name = "jira.timestamp.batch.importer"
- _inherit = ["base.importer", "jira.base"]
- _usage = "timestamp.batch.importer"
-
- def run(self, timestamp, force=False, **kwargs):
- """Run the synchronization using the timestamp"""
- original_timestamp_value = timestamp.last_timestamp
- if not timestamp._lock():
- self._handle_lock_failed(timestamp)
-
- next_timestamp_value, records = self._search(timestamp)
-
- timestamp._update_timestamp(next_timestamp_value)
-
- number = self._handle_records(records, force=force)
-
- return _(
- f"Batch from {original_timestamp_value} UTC to {next_timestamp_value} "
- f"UTC generated {number} imports"
- )
-
- def _handle_records(self, records, force=False):
- """Handle the records to import and return the number handled"""
- for record_id in records:
- self._import_record(record_id, force=force)
- return len(records)
-
- def _handle_lock_failed(self, timestamp):
- _logger.warning("Failed to acquire timestamps %s", timestamp, exc_info=True)
- raise RetryableJobError(
- "Concurrent job / process already syncing",
- ignore_retry=True,
- )
-
- def _search(self, timestamp):
- """Return a tuple (next timestamp value, jira record ids)"""
- until = datetime.now()
-
- parts = []
- if timestamp.last_timestamp:
- since = timestamp.last_timestamp
- from_date = since.strftime(JIRA_JQL_DATETIME_FORMAT)
- parts.append('updated >= "%s"' % from_date)
- to_date = until.strftime(JIRA_JQL_DATETIME_FORMAT)
- parts.append('updated <= "%s"' % to_date)
-
- next_timestamp = max(until - timedelta(seconds=IMPORT_DELTA), since)
- record_ids = self.backend_adapter.search(" and ".join(parts))
- return (next_timestamp, record_ids)
-
- def _import_record(self, record_id, force=False, record=None, **kwargs):
- """Delay the import of the records"""
- self.model.with_delay(**kwargs).import_record(
- self.backend_record,
- record_id,
- force=force,
- record=record,
- )
-
-
-class JiraDeleter(Component):
- _name = "jira.deleter"
- _inherit = ["base.deleter", "jira.base"]
- _usage = "record.deleter"
-
- def run(self, external_id, only_binding=False, set_inactive=False):
- binding = self.binder.to_internal(external_id)
- if not binding.exists():
- return _("Binding not found")
- if set_inactive:
- binding.active = False
- else:
- record = binding.odoo_id
- # emptying the external_id allows to unlink the binding
- binding.external_id = False
- binding.unlink()
- if not only_binding:
- record.unlink()
- return _("Record deleted")
diff --git a/connector_jira/components/jira_issue_type_adapter.py b/connector_jira/components/jira_issue_type_adapter.py
new file mode 100644
index 00000000..4d706427
--- /dev/null
+++ b/connector_jira/components/jira_issue_type_adapter.py
@@ -0,0 +1,18 @@
+# Copyright 2016-2022 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraIssueTypeAdapter(Component):
+ _name = "jira.issue.type.adapter"
+ _inherit = ["jira.webservice.adapter"]
+ _apply_on = ["jira.issue.type"]
+
+ def read(self, id_):
+ # pylint: disable=W8106
+ with self.handle_404():
+ return self.client.issue_type(id_).raw
+
+ def search(self):
+ return [issue.id for issue in self.client.issue_types()]
diff --git a/connector_jira/models/jira_issue_type/importer.py b/connector_jira/components/jira_issue_type_batch_importer.py
similarity index 50%
rename from connector_jira/models/jira_issue_type/importer.py
rename to connector_jira/components/jira_issue_type_batch_importer.py
index 557df523..4ca5eb52 100644
--- a/connector_jira/models/jira_issue_type/importer.py
+++ b/connector_jira/components/jira_issue_type_batch_importer.py
@@ -2,25 +2,9 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo.addons.component.core import Component
-from odoo.addons.connector.components.mapper import mapping
-class IssueTypeMapper(Component):
- _name = "jira.issue.type.mapper"
- _inherit = ["jira.import.mapper"]
- _apply_on = "jira.issue.type"
-
- direct = [
- ("name", "name"),
- ("description", "description"),
- ]
-
- @mapping
- def backend_id(self, record):
- return {"backend_id": self.backend_record.id}
-
-
-class IssueTypeBatchImporter(Component):
+class JiraIssueTypeBatchImporter(Component):
"""Import the Jira Issue Types
For every id in in the list of issue types, a direct import is done.
@@ -33,6 +17,5 @@ class IssueTypeBatchImporter(Component):
def run(self):
"""Run the synchronization"""
- record_ids = self.backend_adapter.search()
- for record_id in record_ids:
+ for record_id in self.backend_adapter.search():
self._import_record(record_id)
diff --git a/connector_jira/components/jira_issue_type_mapper.py b/connector_jira/components/jira_issue_type_mapper.py
new file mode 100644
index 00000000..a16d6643
--- /dev/null
+++ b/connector_jira/components/jira_issue_type_mapper.py
@@ -0,0 +1,17 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+from odoo.addons.connector.components.mapper import mapping
+
+
+class JiraIssueTypeMapper(Component):
+ _name = "jira.issue.type.mapper"
+ _inherit = ["jira.import.mapper"]
+ _apply_on = "jira.issue.type"
+
+ direct = [("name", "name"), ("description", "description")]
+
+ @mapping
+ def backend_id(self, record):
+ return {"backend_id": self.backend_record.id}
diff --git a/connector_jira/components/jira_mapper_from_attrs.py b/connector_jira/components/jira_mapper_from_attrs.py
new file mode 100644
index 00000000..e96bf1bd
--- /dev/null
+++ b/connector_jira/components/jira_mapper_from_attrs.py
@@ -0,0 +1,17 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraMapperFromAttrs(Component):
+ _name = "jira.mapper.from.attrs"
+ _inherit = ["jira.base"]
+ _usage = "map.from.attrs"
+
+ def values(self, record, mapper_):
+ fields_values = record.get("fields", {})
+ return {
+ target: mapper_._map_direct(fields_values, source, target)
+ for source, target in getattr(mapper_, "from_fields", [])
+ }
diff --git a/connector_jira/components/binder.py b/connector_jira/components/jira_model_binder.py
similarity index 59%
rename from connector_jira/components/binder.py
rename to connector_jira/components/jira_model_binder.py
index 85e13518..be5e5203 100644
--- a/connector_jira/components/binder.py
+++ b/connector_jira/components/jira_model_binder.py
@@ -3,36 +3,13 @@
import logging
-from odoo import fields, models
+from odoo import models
from odoo.addons.component.core import Component
_logger = logging.getLogger(__name__)
-class JiraBinder(Component):
- """Binder for Odoo models
-
- Where we create an additional model holding the external id.
- The advantages to have a second models are:
- * we can link more than 1 JIRA instance to the same record
- * we can work with, lock, edit the jira binding without touching the
- normal record
-
- Default binder when no specific binder is defined for a model.
- """
-
- _name = "jira.binder"
- _inherit = ["base.binder", "jira.base"]
-
- def sync_date(self, binding):
- assert self._sync_date_field
- sync_date = binding[self._sync_date_field]
- if not sync_date:
- return
- return fields.Datetime.from_string(sync_date)
-
-
class JiraModelBinder(Component):
"""Binder for standalone models
@@ -44,11 +21,7 @@ class JiraModelBinder(Component):
_name = "jira.model.binder"
_inherit = ["base.binder", "jira.base"]
-
- _apply_on = [
- "jira.issue.type",
- ]
-
+ _apply_on = ["jira.issue.type"]
_odoo_field = "id"
def to_internal(self, external_id, unwrap=False):
diff --git a/connector_jira/components/jira_project_adapter.py b/connector_jira/components/jira_project_adapter.py
new file mode 100644
index 00000000..84fac67e
--- /dev/null
+++ b/connector_jira/components/jira_project_adapter.py
@@ -0,0 +1,91 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+import json
+import logging
+import tempfile
+
+from odoo import _, exceptions
+
+from odoo.addons.component.core import Component
+
+_logger = logging.getLogger(__name__)
+
+try:
+ from jira import JIRAError
+ from jira.utils import json_loads
+except ImportError as err:
+ _logger.debug(err)
+
+
+class JiraProjectAdapter(Component):
+ _name = "jira.project.adapter"
+ _inherit = ["jira.webservice.adapter"]
+ _apply_on = ["jira.project.project"]
+
+ def read(self, id_):
+ # pylint: disable=W8106
+ with self.handle_404():
+ return self.get(id_).raw
+
+ def get(self, id_):
+ with self.handle_404():
+ return self.client.project(id_)
+
+ def write(self, id_, values):
+ super().write(id_, values)
+ with self.handle_404():
+ return self.get(id_).update(values)
+
+ def create(self, key=None, name=None, template_name=None, values=None):
+ super().create(key=key, name=name, template_name=template_name, values=values)
+ project = self.client.create_project(
+ key=key,
+ name=name,
+ template_name=template_name,
+ )
+ if values:
+ project.update(values)
+ return project
+
+ def create_shared(self, key=None, name=None, shared_key=None, lead=None):
+ assert key and name and shared_key
+ # There is no public method for creating a shared project:
+ # https://jira.atlassian.com/browse/JRA-45929
+ # People found a private method for doing so, which is explained on:
+ # https://jira.atlassian.com/browse/JRASERVER-27256
+
+ try:
+ project = self.read(shared_key)
+ project_id = project["id"]
+ except JIRAError as err:
+ if err.status_code == 404:
+ raise exceptions.UserError(
+ _('Project template with key "%s" not found.') % shared_key
+ ) from err
+ else:
+ raise
+
+ server_url = self.client._options["server"]
+ url = server_url + "/rest/project-templates/1.0/createshared/%s" % project_id
+ payload = {"name": name, "key": key, "lead": lead}
+
+ r = self.client._session.post(url, data=json.dumps(payload))
+ if r.status_code == 200:
+ return json_loads(r)
+
+ f = tempfile.NamedTemporaryFile(
+ prefix="python-jira-error-create-shared-project-",
+ suffix=".html",
+ delete=False,
+ )
+ f.write(r.text)
+
+ if self.logging:
+ _logger.error(
+ "Unexpected result while running create shared project."
+ f" Server response saved in {f.name} for further investigation"
+ f" [HTTP response={r.status_code}]."
+ )
+ return False
diff --git a/connector_jira/models/project_project/binder.py b/connector_jira/components/jira_project_binder.py
similarity index 84%
rename from connector_jira/models/project_project/binder.py
rename to connector_jira/components/jira_project_binder.py
index 5474bc34..cc68f974 100644
--- a/connector_jira/models/project_project/binder.py
+++ b/connector_jira/components/jira_project_binder.py
@@ -1,22 +1,16 @@
# Copyright 2016-2019 Camptocamp SA
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-import logging
-
from odoo import models
from odoo.addons.component.core import Component
-_logger = logging.getLogger(__name__)
-
class JiraProjectBinder(Component):
_name = "jira.project.binder"
_inherit = "jira.binder"
- _apply_on = [
- "jira.project.project",
- ]
+ _apply_on = ["jira.project.project"]
def _domain_to_external(self, binding):
return [
@@ -45,11 +39,8 @@ def to_external(self, binding, wrap=False):
else:
binding = self.model.browse(binding)
if wrap:
- binding = self.model.with_context(active_test=False).search(
- self._domain_to_external()
- )
+ domain = self._domain_to_external(binding)
+ binding = self.model.with_context(active_test=False).search(domain, limit=1)
if not binding:
- return None
- binding.ensure_one()
- return binding[self._external_field]
+ return
return binding[self._external_field]
diff --git a/connector_jira/components/jira_project_project_exporter.py b/connector_jira/components/jira_project_project_exporter.py
new file mode 100644
index 00000000..dd7c4d41
--- /dev/null
+++ b/connector_jira/components/jira_project_project_exporter.py
@@ -0,0 +1,51 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraProjectProjectExporter(Component):
+ _name = "jira.project.project.exporter"
+ _inherit = ["jira.exporter"]
+ _apply_on = ["jira.project.project"]
+
+ def _create_project(self, adapter, key, name, template, values):
+ project = adapter.create(
+ key=key,
+ name=name,
+ template_name=template,
+ values=values,
+ )
+ return project["projectId"]
+
+ def _create_shared_project(self, adapter, key, name, shared_key, lead):
+ project = adapter.create_shared(
+ key=key,
+ name=name,
+ shared_key=shared_key,
+ lead=lead,
+ )
+ return project["projectId"]
+
+ def _update_project(self, adapter, values):
+ adapter.write(self.external_id, values)
+
+ def _run(self, fields=None):
+ adapter = self.component(usage="backend.adapter")
+
+ key = self.binding.jira_key
+ name = self.binding.name[:80]
+ template = self.binding.project_template
+ # TODO: add lead
+
+ if self.external_id:
+ self._update_project(adapter, {"name": name, "key": key})
+ else:
+ if template == "shared":
+ self.external_id = self._create_shared_project(
+ adapter, key, name, self.binding.project_template_shared, lead=None
+ )
+ else:
+ self.external_id = self._create_project(
+ adapter, key, name, template, {}
+ )
diff --git a/connector_jira/components/jira_project_project_listener.py b/connector_jira/components/jira_project_project_listener.py
new file mode 100644
index 00000000..79b0cad3
--- /dev/null
+++ b/connector_jira/components/jira_project_project_listener.py
@@ -0,0 +1,21 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+from odoo.addons.component_event import skip_if
+
+
+class JiraProjectProjectListener(Component):
+ _name = "jira.project.project.listener"
+ _inherit = ["base.connector.listener", "jira.base"]
+ _apply_on = ["jira.project.project"]
+
+ @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
+ def on_record_create(self, record, fields=None):
+ if record.sync_action == "export":
+ record.with_delay(priority=10).export_record(fields=fields)
+
+ @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
+ def on_record_write(self, record, fields=None):
+ if record.sync_action == "export":
+ record.with_delay(priority=10).export_record(fields=fields)
diff --git a/connector_jira/components/jira_project_task_adapter.py b/connector_jira/components/jira_project_task_adapter.py
new file mode 100644
index 00000000..b8cd022a
--- /dev/null
+++ b/connector_jira/components/jira_project_task_adapter.py
@@ -0,0 +1,25 @@
+# Copyright 2016-2019 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraProjectTaskAdapter(Component):
+ _name = "jira.project.task.adapter"
+ _inherit = ["jira.webservice.adapter"]
+ _apply_on = ["jira.project.task"]
+
+ def read(self, id_, fields=None):
+ # pylint: disable=W8106
+ return self.get(id_, fields=fields).raw
+
+ def get(self, id_, fields=None):
+ with self.handle_404():
+ return self.client.issue(id_, fields=fields, expand=["renderedFields"])
+
+ def search(self, jql):
+ # we need to have at least one field which is not 'id' or 'key'
+ # due to this bug: https://github.com/pycontribs/jira/pull/289
+ issues = self.client.search_issues(jql, fields="id,updated", maxResults=None)
+ return [issue.id for issue in issues]
diff --git a/connector_jira/components/jira_project_task_batch_importer.py b/connector_jira/components/jira_project_task_batch_importer.py
new file mode 100644
index 00000000..0818e2f4
--- /dev/null
+++ b/connector_jira/components/jira_project_task_batch_importer.py
@@ -0,0 +1,17 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+
+
+class JiraProjectTaskBatchImporter(Component):
+ """Import the Jira tasks
+
+ For every id in the list of tasks, a delayed job is created.
+ Import from a given date.
+ """
+
+ _name = "jira.project.task.batch.importer"
+ _inherit = ["jira.timestamp.batch.importer"]
+ _apply_on = ["jira.project.task"]
diff --git a/connector_jira/components/jira_project_task_importer.py b/connector_jira/components/jira_project_task_importer.py
new file mode 100644
index 00000000..7b79e013
--- /dev/null
+++ b/connector_jira/components/jira_project_task_importer.py
@@ -0,0 +1,101 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _
+
+from odoo.addons.component.core import Component
+
+
+class JiraProjectTaskImporter(Component):
+ _name = "jira.project.task.importer"
+ _inherit = ["jira.importer"]
+ _apply_on = ["jira.project.task"]
+
+ def __init__(self, work_context):
+ super().__init__(work_context)
+ self.jira_epic = None
+ self.project_binding = None
+
+ def _get_external_data(self):
+ # OVERRIDE: return the raw Jira data for ``self.external_id``
+ result = super()._get_external_data()
+ epic_field_name = self.backend_record.epic_link_field_name
+ if epic_field_name:
+ issue_adapter = self.component(
+ usage="backend.adapter", model_name="jira.project.task"
+ )
+ epic_key = result["fields"][epic_field_name]
+ if epic_key:
+ self.jira_epic = issue_adapter.read(epic_key)
+ return result
+
+ def _find_project_binding(self):
+ matcher = self.component(usage="jira.task.project.matcher")
+ self.project_binding = matcher.find_project_binding(self.external_record)
+
+ def _is_issue_type_sync(self):
+ task_sync_type_binding = self.binder_for("jira.issue.type").to_internal(
+ self.external_record["fields"]["issuetype"]["id"]
+ )
+ return task_sync_type_binding.is_sync_for_project(self.project_binding)
+
+ def _create_data(self, map_record, **kwargs):
+ return super()._create_data(
+ map_record,
+ **dict(
+ kwargs or [],
+ jira_epic=self.jira_epic,
+ project_binding=self.project_binding,
+ ),
+ )
+
+ def _update_data(self, map_record, **kwargs):
+ return super()._update_data(
+ map_record,
+ **dict(
+ kwargs or [],
+ jira_epic=self.jira_epic,
+ project_binding=self.project_binding,
+ ),
+ )
+
+ def _import(self, binding, **kwargs):
+ # called at the beginning of _import because we must be sure
+ # that dependencies are there (project and issue type)
+ self._find_project_binding()
+ if not self._is_issue_type_sync():
+ return _("Project or issue type is not synchronized.")
+ return super()._import(binding, **kwargs)
+
+ def _import_dependency_assignee(self):
+ jira_assignee = self.external_record["fields"].get("assignee") or {}
+ if jira_assignee:
+ jira_key = jira_assignee.get("accountId")
+ self._import_dependency(jira_key, "jira.res.users", record=jira_assignee)
+
+ def _import_dependency_issue_type(self):
+ jira_issue_type = self.external_record["fields"]["issuetype"]
+ jira_issue_type_id = jira_issue_type["id"]
+ self._import_dependency(
+ jira_issue_type_id, "jira.issue.type", record=jira_issue_type
+ )
+
+ def _import_dependency_parent(self):
+ jira_parent = self.external_record["fields"].get("parent")
+ if jira_parent:
+ jira_parent_id = jira_parent["id"]
+ self._import_dependency(jira_parent_id, "jira.project.task")
+
+ def _import_dependency_epic(self):
+ if self.jira_epic:
+ self._import_dependency(
+ self.jira_epic["id"], "jira.project.task", record=self.jira_epic
+ )
+
+ def _import_dependencies(self):
+ """Import the dependencies for the record"""
+ self._import_dependency_assignee()
+ self._import_dependency_issue_type()
+ self._import_dependency_parent()
+ self._import_dependency_epic()
diff --git a/connector_jira/components/jira_project_task_mapper.py b/connector_jira/components/jira_project_task_mapper.py
new file mode 100644
index 00000000..d35ca418
--- /dev/null
+++ b/connector_jira/components/jira_project_task_mapper.py
@@ -0,0 +1,131 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _
+
+from odoo.addons.component.core import Component
+from odoo.addons.connector.components.mapper import mapping
+from odoo.addons.connector.exception import MappingError
+
+
+class JiraProjectTaskMapper(Component):
+ _name = "jira.project.task.mapper"
+ _inherit = "jira.import.mapper"
+ _apply_on = ["jira.project.task"]
+
+ direct = [("key", "jira_key")]
+
+ from_fields = [("duedate", "date_deadline")]
+
+ @mapping
+ def from_attributes(self, record):
+ return self.component(usage="map.from.attrs").values(record, self)
+
+ @mapping
+ def name(self, record):
+ # On an Epic, you have 2 fields:
+
+ # a field like 'customfield_10003' labelled "Epic Name"
+ # a field 'summary' labelled "Summary"
+
+ # The other types of tasks have only the 'summary' field, the other is
+ # empty. To simplify, we always try to read the Epic Name, which
+ # will always be empty for other types.
+ epic_name_field = self.backend_record.epic_name_field_name
+ name = False
+ if epic_name_field:
+ name = record["fields"].get(epic_name_field)
+ if not name:
+ name = record["fields"]["summary"]
+ return {"name": name}
+
+ @mapping
+ def issue_type(self, record):
+ jira_type_id = record["fields"]["issuetype"]["id"]
+ binding = self.binder_for("jira.issue.type").to_internal(jira_type_id)
+ return {"jira_issue_type_id": binding.id}
+
+ @mapping
+ def assignee(self, record):
+ assignee = record["fields"].get("assignee")
+ if not assignee:
+ return {"user_ids": False}
+ jira_key = assignee["accountId"]
+ user = self.binder_for("jira.res.users").to_internal(jira_key, unwrap=True)
+ if not user:
+ raise MappingError(
+ _(
+ 'No user found with accountId "%(jira_key)s" or email "%(email)s".'
+ "You must create a user or link it manually if the "
+ "login/email differs.",
+ jira_key=jira_key,
+ email=assignee.get("emailAddress"),
+ )
+ )
+ return {"user_id": user.id}
+
+ @mapping
+ def description(self, record):
+ return {"description": record["renderedFields"]["description"]}
+
+ @mapping
+ def project(self, record):
+ proj_binding = self.options.project_binding
+ binder = self.binder_for("jira.project.project")
+ project = binder.unwrap_binding(proj_binding)
+ values = {
+ "project_id": project.id,
+ "company_id": project.company_id.id,
+ "jira_project_bind_id": proj_binding.id,
+ }
+ if not project.active:
+ values["active"] = False
+ return values
+
+ @mapping
+ def epic(self, record):
+ if not self.options.jira_epic:
+ return {}
+ binder = self.binder_for("jira.project.task")
+ binding = binder.to_internal(self.options.jira_epic["id"])
+ return {"jira_epic_link_id": binding.id}
+
+ @mapping
+ def parent(self, record):
+ jira_parent = record["fields"].get("parent")
+ if not jira_parent:
+ return {}
+ binding = self.binder_for("jira.project.task").to_internal(jira_parent["id"])
+ return {"jira_parent_id": binding.id}
+
+ @mapping
+ def backend_id(self, record):
+ return {"backend_id": self.backend_record.id}
+
+ @mapping
+ def status(self, record):
+ status_name = record["fields"].get("status", {}).get("name")
+ if not status_name:
+ return {"stage_id": False}
+ project_binder = self.binder_for("jira.project.project")
+ project = project_binder.unwrap_binding(self.options.project_binding)
+ domain = [("name", "=", status_name), ("project_ids", "=", project.id)]
+ return {"stage_id": self.env["project.task.type"].search(domain, limit=1).id}
+
+ @mapping
+ def time_estimate(self, record):
+ original_estimate = record["fields"].get("timeoriginalestimate")
+ if not original_estimate:
+ return {"allocated_hours": False}
+ return {"allocated_hours": float(original_estimate) / 3600.0}
+
+ def finalize(self, map_record, values):
+ values = values.copy()
+ if values.get("odoo_id"):
+ # If a mapping binds the issue to an existing odoo
+ # task, we should not change the project.
+ # It's not only unexpected, but would fail as soon
+ # as we have invoiced timesheet lines on the task.
+ values.pop("project_id")
+ return values
diff --git a/connector_jira/components/jira_res_users_adapter.py b/connector_jira/components/jira_res_users_adapter.py
new file mode 100644
index 00000000..f5fca9be
--- /dev/null
+++ b/connector_jira/components/jira_res_users_adapter.py
@@ -0,0 +1,35 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html)
+
+from itertools import groupby
+
+from odoo.addons.component.core import Component
+
+
+class JiraResUsersAdapter(Component):
+ _name = "jira.res.users.adapter"
+ _inherit = ["jira.webservice.adapter"]
+ _apply_on = ["jira.res.users"]
+
+ def read(self, id_):
+ # pylint: disable=W8106
+ with self.handle_404():
+ return self.client.user(id_).raw
+
+ def search(self, fragment=None):
+ """Search users
+
+ :param fragment: a string to match usernames, name or email against.
+ """
+ users = self.client.search_users(
+ query=fragment, maxResults=None, includeActive=True, includeInactive=True
+ )
+ # User 'accountId' is unique, and if the same key appears several times,
+ # it means that the same user is found in multiple User Directories
+ return list(
+ map(
+ lambda group: list(group[1])[0],
+ groupby(users, key=lambda user: user.accountId),
+ )
+ )
diff --git a/connector_jira/models/res_users/importer.py b/connector_jira/components/jira_res_users_importer.py
similarity index 82%
rename from connector_jira/models/res_users/importer.py
rename to connector_jira/components/jira_res_users_importer.py
index 779c01cc..9f046fd2 100644
--- a/connector_jira/models/res_users/importer.py
+++ b/connector_jira/components/jira_res_users_importer.py
@@ -7,26 +7,22 @@
from odoo.addons.queue_job.exception import JobError
-class UserImporter(Component):
+class JiraResUsersImporter(Component):
_name = "jira.res.users.importer"
_inherit = ["jira.importer"]
_apply_on = ["jira.res.users"]
def _import(self, binding):
- record = self.external_record
jira_key = self.external_id
- binder = self.binder_for("jira.res.users")
- user = binder.to_internal(jira_key, unwrap=True)
+ user = self.binder_for("jira.res.users").to_internal(jira_key, unwrap=True)
if not user:
- email = record.get("emailAddress")
+ email = self.external_record.get("emailAddress")
if email is None:
raise JobError(
"Unable to find a user from account Id (%s) and no email provided"
% jira_key
)
- user = self.env["res.users"].search(
- [("email", "=", email)],
- )
+ user = self.env["res.users"].search([("email", "=", email)])
if len(user) > 1:
raise JobError(
_(
diff --git a/connector_jira/components/jira_task_project_matcher.py b/connector_jira/components/jira_task_project_matcher.py
new file mode 100644
index 00000000..28035ccc
--- /dev/null
+++ b/connector_jira/components/jira_task_project_matcher.py
@@ -0,0 +1,20 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+
+from odoo.addons.component.core import Component
+
+
+class JiraTaskProjectMatcher(Component):
+ _name = "jira.task.project.matcher"
+ _inherit = ["jira.base"]
+ _usage = "jira.task.project.matcher"
+
+ def find_project_binding(self, jira_task_data, unwrap=False):
+ jira_project_id = jira_task_data["fields"]["project"]["id"]
+ binder = self.binder_for("jira.project.project")
+ return binder.to_internal(jira_project_id, unwrap=unwrap)
+
+ def fallback_project_for_worklogs(self):
+ return self.backend_record.worklog_fallback_project_id
diff --git a/connector_jira/components/jira_timestamp_batch_importer.py b/connector_jira/components/jira_timestamp_batch_importer.py
new file mode 100644
index 00000000..59569921
--- /dev/null
+++ b/connector_jira/components/jira_timestamp_batch_importer.py
@@ -0,0 +1,91 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Importers for Jira.
+
+An import can be skipped if the last sync date is more recent than
+the last update in Jira.
+
+They should call the ``bind`` method if the binder even if the records
+are already bound, to update the last sync date.
+
+"""
+
+import logging
+from datetime import datetime, timedelta
+
+from odoo import _
+
+from odoo.addons.component.core import AbstractComponent
+from odoo.addons.queue_job.exception import RetryableJobError
+
+from .common import IMPORT_DELTA, JIRA_JQL_DATETIME_FORMAT
+
+_logger = logging.getLogger(__name__)
+
+
+class JiraTimestampBatchImporter(AbstractComponent):
+ """Batch Importer working with a jira.backend.timestamp.record
+
+ It locks the timestamp to ensure no other job is working on it,
+ and uses the latest timestamp value as reference for the search.
+
+ The role of a BatchImporter is to search for a list of
+ items to import, then it can either import them directly or delay
+ the import of each item separately.
+ """
+
+ _name = "jira.timestamp.batch.importer"
+ _inherit = ["base.importer", "jira.base"]
+ _usage = "timestamp.batch.importer"
+
+ def run(self, timestamp, force=False, **kwargs):
+ """Run the synchronization using the timestamp"""
+ original_timestamp_value = timestamp.last_timestamp
+ if not timestamp._lock():
+ self._handle_lock_failed(timestamp)
+
+ next_timestamp_value, records = self._search(timestamp)
+ timestamp._update_timestamp(next_timestamp_value)
+ number = self._handle_records(records, force=force)
+ return _(
+ f"Batch from {original_timestamp_value} UTC to {next_timestamp_value} UTC "
+ f"generated {number} imports"
+ )
+
+ def _handle_records(self, records, force=False):
+ """Handle the records to import and return the number handled"""
+ for record_id in records:
+ self._import_record(record_id, force=force)
+ return len(records)
+
+ def _handle_lock_failed(self, timestamp):
+ _logger.warning("Failed to acquire timestamps %s", timestamp, exc_info=True)
+ raise RetryableJobError(
+ "Concurrent job / process already syncing", ignore_retry=True
+ )
+
+ def _search(self, timestamp):
+ """Return a tuple (next timestamp value, jira record ids)"""
+ since, until = timestamp.last_timestamp, datetime.now()
+ since_str, until_str = map(
+ lambda x: x.strftime(JIRA_JQL_DATETIME_FORMAT), (since, until)
+ )
+ return (
+ max(until - timedelta(seconds=IMPORT_DELTA), since),
+ self.backend_adapter.search(
+ f'updated >= "{since_str}" and updated <= "{until_str}"',
+ ),
+ )
+
+ def _import_record(self, record_id, force=False, record=None, **kwargs):
+ """Delay the import of the records"""
+ self.model.with_delay(**kwargs).import_record(
+ self.backend_record,
+ record_id,
+ force=force,
+ record=record,
+ )
diff --git a/connector_jira/components/backend_adapter.py b/connector_jira/components/jira_webservice_adapter.py
similarity index 83%
rename from connector_jira/components/backend_adapter.py
rename to connector_jira/components/jira_webservice_adapter.py
index 12d184a9..655c264d 100644
--- a/connector_jira/components/backend_adapter.py
+++ b/connector_jira/components/jira_webservice_adapter.py
@@ -19,10 +19,7 @@
_logger.debug(err)
-JIRA_JQL_DATETIME_FORMAT = "%Y-%m-%d %H:%M" # no seconds :-(
-
-
-class JiraAdapter(Component):
+class JiraWebserviceAdapter(Component):
"""Generic adapter for using the JIRA backend"""
_name = "jira.webservice.adapter"
@@ -43,27 +40,20 @@ def client(self):
def _post_get_json(
self,
path,
- data=None,
- base=jira.resources.Resource.JIRA_BASE_URL,
+ params=None,
+ base=jira.client.JIRA.JIRA_BASE_URL,
):
"""Get the json for a given path and payload
:param path: The subpath required
:type path: str
- :param data: a payload for the method
- :type data: A json payload
+ :param params: a payload for the method
+ :type params: A json payload
:param base: The Base JIRA URL, defaults to the instance base.
:type base: Optional[str]
:rtype: Union[Dict[str, Any], List[Dict[str, str]]]
"""
- url = self.client._get_url(path, base)
- r = self.client._session.post(url, data=data)
- try:
- r_json = jira.utils.json_loads(r)
- except ValueError as e:
- logging.error(f"{e}\n{r.text}")
- raise e
- return r_json
+ return self.client._get_json(path=path, base=base, params=params, use_post=True)
@contextmanager
def handle_404(self):
diff --git a/connector_jira/components/jira_worklog_adapter.py b/connector_jira/components/jira_worklog_adapter.py
new file mode 100644
index 00000000..572c0b7b
--- /dev/null
+++ b/connector_jira/components/jira_worklog_adapter.py
@@ -0,0 +1,80 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from collections import namedtuple
+
+from odoo.addons.component.core import Component
+
+UpdatedWorklog = namedtuple(
+ "UpdatedWorklog",
+ # id as integer, timestamp
+ "worklog_id updated",
+)
+
+UpdatedWorklogSince = namedtuple(
+ "UpdatedWorklogSince",
+ # timestamp, timestamp, list[UpdatedWorklog]
+ "since until updated_worklogs",
+)
+
+
+DeletedWorklogSince = namedtuple(
+ "DeletedWorklogSince",
+ # timestamp, timestamp, list[ids as integer]
+ "since until deleted_worklog_ids",
+)
+
+
+class WorklogAdapter(Component):
+ _name = "jira.worklog.adapter"
+ _inherit = "jira.webservice.adapter"
+ _apply_on = ["jira.account.analytic.line"]
+
+ def read(self, issue_id, worklog_id):
+ # pylint: disable=W8106
+ with self.handle_404():
+ return self.client.worklog(issue_id, worklog_id).raw
+
+ def search(self, issue_id):
+ """Search worklogs of an issue"""
+ return [worklog.id for worklog in self.client.worklogs(issue_id)]
+
+ @staticmethod
+ def _chunks(whole, size):
+ """Yield successive n-sized chunks from l."""
+ for i in range(0, len(whole), size):
+ yield whole[i : i + size]
+
+ def yield_read(self, worklog_ids):
+ """Generator returning worklog ids data"""
+ # the method returns max 1000 results
+ for chunk in self._chunks(worklog_ids, 1000):
+ yield from self._post_get_json("worklog/list", params={"ids": chunk})
+
+ def updated_since(self, since=None):
+ original_since = since
+ updated_worklogs = []
+ result = {"lastPage": False}
+ while not result["lastPage"]:
+ result = self.client._get_json("worklog/updated", params={"since": since})
+ updated_worklogs += [
+ UpdatedWorklog(worklog_id=row["worklogId"], updated=row["updatedTime"])
+ for row in result["values"]
+ ]
+ until = since = result["until"]
+ return UpdatedWorklogSince(
+ since=original_since, until=until, updated_worklogs=updated_worklogs
+ )
+
+ def deleted_since(self, since=None):
+ original_since = since
+ deleted_worklog_ids = []
+ result = {"lastPage": False}
+ while not result["lastPage"]:
+ result = self.client._get_json("worklog/deleted", params={"since": since})
+ deleted_worklog_ids += [row["worklogId"] for row in result["values"]]
+ until = since = result["until"]
+ return DeletedWorklogSince(
+ since=original_since, until=until, deleted_worklog_ids=deleted_worklog_ids
+ )
diff --git a/connector_jira/components/project_project_listener.py b/connector_jira/components/project_project_listener.py
new file mode 100644
index 00000000..6245ba01
--- /dev/null
+++ b/connector_jira/components/project_project_listener.py
@@ -0,0 +1,30 @@
+# Copyright 2016-2019 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo.addons.component.core import Component
+from odoo.addons.component_event import skip_if
+
+
+class ProjectProjectListener(Component):
+ _name = "project.project.listener"
+ _inherit = ["base.connector.listener", "jira.base"]
+ _apply_on = ["project.project"]
+
+ @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
+ def on_record_write(self, record, fields=None):
+ # Remove ``jira_bind_ids`` and ``message_follower_ids`` from the fields:
+ # - ``jira_bind_ids``: when this field has been modified, an export is triggered
+ # by ``jira.project.project.listener`` after the field's values have been
+ # written to the proper ``jira.project.project`` records, so we ignore this
+ # field to avoid duplicates
+ # - ``message_follower_ids``: when ``mail.thread.message_subscribe()`` has been
+ # called, it does a ``write()`` on field ``message_follower_ids``, but we
+ # never want to export that
+ fields = set(fields or [])
+ fields.difference_update({"jira_bind_ids", "message_follower_ids"})
+ # After cleaning the fields, if we still have some fields to export, do it
+ if fields:
+ fields = list(fields)
+ for binding in record.jira_bind_ids:
+ if binding.sync_action == "export":
+ binding.with_delay(priority=10).export_record(fields=fields)
diff --git a/connector_jira/controllers/__init__.py b/connector_jira/controllers/__init__.py
index c47ab2c2..f8a4a557 100644
--- a/connector_jira/controllers/__init__.py
+++ b/connector_jira/controllers/__init__.py
@@ -1,3 +1,4 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-from . import main
+from . import jira_connect_app_controller
+from . import jira_webhook_controller
diff --git a/connector_jira/controllers/jira_connect_app_controller.py b/connector_jira/controllers/jira_connect_app_controller.py
new file mode 100644
index 00000000..e315b5e5
--- /dev/null
+++ b/connector_jira/controllers/jira_connect_app_controller.py
@@ -0,0 +1,193 @@
+# Copyright 2016-2024 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Receive webhooks from Jira
+
+
+(Outdated) JIRA could well send all the data in the webhook request's body,
+which would avoid Odoo to make another GET to get this data, but
+JIRA webhooks are potentially insecure as we don't know if it really
+comes from JIRA. So we don't use the data sent by the webhook and the job
+gets the data by itself (with the nice side-effect that the job is retryable).
+
+TODO: we now have authenticated calls from Jira through the JWT tokens, so we
+ could move back to a setup where we avoid querying the data back to Jira.
+ Changing this is on the roadmap.
+
+"""
+
+import json
+import logging
+
+import jwt
+import requests
+from werkzeug.exceptions import Forbidden
+
+import odoo
+from odoo import http
+from odoo.http import request
+
+from odoo.addons.web.controllers.utils import ensure_db
+
+_logger = logging.getLogger(__name__)
+
+
+class JiraConnectAppController(http.Controller):
+ """Manage the lifecyle of the App
+
+ The app-descriptor endpoint when called returns the app descriptor,
+ which lists the endpoints for installation / uninstallation /
+ enabling / disabling the app on a Jira cloud server.
+
+ The lifecycle requests all receive a payload with the following keys:
+
+ {
+ "key": "installed-addon-key",
+ "clientKey": "unique-client-identifier",
+ "sharedSecret": "a-secret-key-not-to-be-lost",
+ "serverVersion": "server-version", # DEPRECATED
+ "pluginsVersion": "version-of-connect",
+ "baseUrl": "https://example.atlassian.net",
+ "displayUrl": "https://issues.example.com",
+ "displayUrlServicedeskHelpCenter": "https://support.example.com",
+ "productType": "jira",
+ "description": "Atlassian Jira at https://example.atlassian.net",
+ "serviceEntitlementNumber": "SEN-number",
+ "entitlementId": "Entitlement-Id",
+ "entitlementNumber": "Entitlement-Number",
+ "eventType": "installed",
+ "installationId":
+ "ari:cloud:ecosystem::installation/uuid-of-forge-installation-identifier"
+ }
+
+ Upon reception of an "installed" lifecycle call, we create a backend record
+ for the app, in state "disabled".
+ Upon reception of an "enabled" lifecycle call, we set the backend to "enabled".
+ Upon reception of a "disabled" lifecycle call, we set the backend to "disabled".
+ Upon reception of an "uninstalled" lifecycle call, we unlink the backend record.
+
+ Documentation:
+ https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#lifecycle
+ """
+
+ @http.route(
+ "/jira//app-descriptor.json",
+ type="http",
+ methods=["GET"],
+ auth="public",
+ csrf=False,
+ )
+ def app_descriptor(self, backend_id, **kwargs):
+ ensure_db()
+ request.update_env(user=odoo.SUPERUSER_ID)
+ backend = request.env["jira.backend"].search([("id", "=", backend_id)])
+ body = json.dumps(backend._get_app_descriptor() if backend else {})
+ return request.make_response(
+ body, [("Content-Type", "application/json"), ("Content-Length", len(body))]
+ )
+
+ def _validate_jwt_token(self):
+ """Use authorization header to validate the request
+
+ The process is described in
+ https://developer.atlassian.com/cloud/jira/platform/security-for-connect-apps/
+ """
+ auth_header = request.httprequest.headers["Authorization"]
+ assert auth_header.startswith("JWT "), "unexpected content in Auth header"
+ jwt_token = auth_header[4:]
+ decoded = jwt.get_unverified_header(jwt_token)
+ if "kid" in decoded:
+ kid = decoded["kid"]
+ # pylint: disable=E8106
+ response = requests.get(f"https://connect-install-keys.atlassian.com/{kid}")
+ response.raise_for_status()
+ public_key = response.text
+ response.close()
+ _logger.info("public key:\n%s", public_key)
+ decoded = jwt.decode(
+ jwt_token,
+ public_key,
+ algorithms=[decoded["alg"]],
+ audience=request.env["jira.backend"].sudo()._get_base_url(),
+ )
+ _logger.warning("decoded JWT Token: %s", decoded)
+ else:
+ raise Forbidden()
+ return True
+
+ @http.route(
+ "/jira//installed",
+ type="json",
+ methods=["POST"],
+ auth="public", # security implemented by _validate_jwt_token
+ csrf=False,
+ )
+ def install_app(self, backend_id, **kwargs):
+ self._validate_jwt_token()
+ payload = request.get_json_data()
+ _logger.info("installed: %s", payload)
+ assert payload["eventType"] == "installed"
+ ensure_db()
+ backend = request.env["jira.backend"].sudo().browse(backend_id)
+ return {"status": backend._install_app(payload)}
+
+ @http.route(
+ "/jira//uninstalled",
+ type="json",
+ methods=["POST"],
+ auth="public", # security implemented by _validate_jwt_token
+ csrf=False,
+ )
+ def uninstall_app(self, backend_id, **kwargs):
+ self._validate_jwt_token()
+ payload = request.get_json_data()
+ _logger.info("uninstalled: %s", payload)
+ assert payload["eventType"] == "uninstalled"
+ backend = request.env["jira.backend"].sudo().browse(backend_id)
+ return {"status": backend._uninstall_app(payload)}
+
+ @http.route(
+ "/jira//enabled",
+ type="json",
+ methods=["POST"],
+ auth="public", # security handled with manual JWT check (backend._validate_jwt)
+ csrf=False,
+ )
+ def enable_app(self, backend_id, **kwargs):
+ # self._validate_jwt_token()
+ payload = request.get_json_data()
+ _logger.info("enabled: %s", payload)
+ assert payload["eventType"] == "enabled"
+ backend = request.env["jira.backend"].sudo().browse(backend_id)
+ qstring = request.httprequest.query_string
+ if isinstance(qstring, bytes):
+ qstring = qstring.decode("utf-8")
+ backend._validate_jwt(
+ request.httprequest.headers["Authorization"],
+ f"{request.httprequest.path}?{qstring}",
+ )
+ return {"status": backend._enable_app(payload)}
+
+ @http.route(
+ "/jira//disabled",
+ type="json",
+ methods=["POST"],
+ auth="public", # security handled with manual JWT check (backend._validate_jwt)
+ csrf=False,
+ )
+ def disable_app(self, backend_id, **kwargs):
+ # self._validate_jwt_token()
+ payload = request.get_json_data()
+ _logger.info("disabled: %s", payload)
+ assert payload["eventType"] == "disabled"
+ backend = request.env["jira.backend"].sudo().browse(backend_id)
+ qstring = request.httprequest.query_string
+ if isinstance(qstring, bytes):
+ qstring = qstring.decode("utf-8")
+ backend._validate_jwt(
+ request.httprequest.headers["Authorization"],
+ f"{request.httprequest.path}?{qstring}",
+ )
+ return {"status": backend._disable_app(payload)}
diff --git a/connector_jira/controllers/jira_webhook_controller.py b/connector_jira/controllers/jira_webhook_controller.py
new file mode 100644
index 00000000..e7866da2
--- /dev/null
+++ b/connector_jira/controllers/jira_webhook_controller.py
@@ -0,0 +1,109 @@
+# Copyright 2016-2024 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+"""
+
+Receive webhooks from Jira
+
+
+(Outdated) JIRA could well send all the data in the webhook request's body,
+which would avoid Odoo to make another GET to get this data, but
+JIRA webhooks are potentially insecure as we don't know if it really
+comes from JIRA. So we don't use the data sent by the webhook and the job
+gets the data by itself (with the nice side-effect that the job is retryable).
+
+TODO: we now have authenticated calls from Jira through the JWT tokens, so we
+ could move back to a setup where we avoid querying the data back to Jira.
+ Changing this is on the roadmap.
+
+"""
+
+import logging
+import pprint
+
+import odoo
+from odoo import _, http
+from odoo.http import request
+
+from odoo.addons.web.controllers.utils import ensure_db
+
+_logger = logging.getLogger(__name__)
+
+
+class JiraWebhookController(http.Controller):
+ @http.route(
+ "/connector_jira//webhooks/issue",
+ type="json",
+ auth="none", # security handled with manual JWT check (backend._validate_jwt)
+ csrf=False,
+ )
+ def webhook_issue(self, backend_id, issue_id=None, **kw):
+ ensure_db()
+ data = request.get_json_data()
+ pprint.pprint(data)
+ request.update_env(user=odoo.SUPERUSER_ID)
+ backend = request.env["jira.backend"].search(
+ [("id", "=", backend_id), ("state", "=", "running")]
+ )
+ if not backend:
+ _logger.warning(
+ "Received an Issue webhook from Jira for backend %d but cannot find a "
+ "matching running backend",
+ backend_id,
+ )
+ return
+ qstring = request.httprequest.query_string
+ if isinstance(qstring, bytes):
+ qstring = qstring.decode("utf-8")
+ backend._validate_jwt(
+ request.httprequest.headers["Authorization"],
+ f"{request.httprequest.path}?{qstring}",
+ )
+ model = request.env["jira.project.task"]
+ args = (backend, data["issue"]["id"])
+ if data["webhookEvent"] == "jira:issue_deleted":
+ delay_msg = _("Delete a local issue which has been deleted on JIRA")
+ method = "delete_record"
+ else:
+ delay_msg = _("Import a issue from JIRA")
+ method = "import_record"
+ getattr(model.with_delay(description=delay_msg), method)(*args)
+
+ @http.route(
+ "/connector_jira//webhooks/worklog",
+ type="json",
+ auth="none", # security handled with manual JWT check (backend._validate_jwt)
+ csrf=False,
+ )
+ def webhook_worklog(self, backend_id, **kw):
+ ensure_db()
+ data = request.get_json_data()
+ pprint.pprint(data)
+ request.update_env(user=odoo.SUPERUSER_ID)
+ backend = request.env["jira.backend"].search(
+ [("id", "=", backend_id), ("state", "=", "running")]
+ )
+ if not backend:
+ _logger.warning(
+ "Received a Worklog webhook from Jira for backend %d but cannot find a "
+ "matching runnign backend",
+ backend_id,
+ )
+ return
+ qstring = request.httprequest.query_string
+ if isinstance(qstring, bytes):
+ qstring = qstring.decode("utf-8")
+ backend._validate_jwt(
+ request.httprequest.headers["Authorization"],
+ f"{request.httprequest.path}?{qstring}",
+ )
+ model = request.env["jira.account.analytic.line"]
+ if data["webhookEvent"] == "worklog_deleted":
+ delay_msg = _("Delete a local worklog which has been deleted on JIRA")
+ method = "delete_record"
+ args = (backend, data["worklog"]["id"])
+ else:
+ delay_msg = _("Import a worklog from JIRA")
+ method = "import_record"
+ args = (backend, data["worklog"]["issueId"], data["worklog"]["id"])
+ getattr(model.with_delay(description=delay_msg), method)(*args)
diff --git a/connector_jira/controllers/main.py b/connector_jira/controllers/main.py
deleted file mode 100644
index 1332d71f..00000000
--- a/connector_jira/controllers/main.py
+++ /dev/null
@@ -1,287 +0,0 @@
-# Copyright 2016-2024 Camptocamp SA
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-"""
-
-Receive webhooks from Jira
-
-
-(Outdated) JIRA could well send all the data in the webhook request's body,
-which would avoid Odoo to make another GET to get this data, but
-JIRA webhooks are potentially insecure as we don't know if it really
-comes from JIRA. So we don't use the data sent by the webhook and the job
-gets the data by itself (with the nice side-effect that the job is retryable).
-
-TODO: we now have authenticated calls from Jira through the JWT tokens, so we
-could move back to a setup where we avoid querying the data back to Jira.
-Changing this is on the roadmap.
-
-"""
-
-import json
-import logging
-
-import jwt
-import requests
-from werkzeug.exceptions import Forbidden
-
-import odoo
-from odoo import _, http
-from odoo.http import request
-
-from odoo.addons.web.controllers.main import ensure_db
-
-_logger = logging.getLogger(__name__)
-
-
-class JiraWebhookController(http.Controller):
- @http.route(
- "/connector_jira//webhooks/issue",
- type="json",
- auth="none", # security handled with manual JWT check (backend._validate_jwt)
- csrf=False,
- )
- def webhook_issue(self, backend_id, issue_id=None, **kw):
- ensure_db()
- import pprint
-
- pprint.pprint(request.jsonrequest)
- request.uid = odoo.SUPERUSER_ID
- env = request.env
- backend = env["jira.backend"].search(
- [("id", "=", backend_id), ("state", "=", "running")]
- )
- if not backend:
- _logger.warning(
- "Received an Issue webhook from Jira for backend %d but cannot find a "
- "matching running backend",
- backend_id,
- )
- return
- backend._validate_jwt(
- request.httprequest.headers["Authorization"],
- f"{request.httprequest.path}?{request.httprequest.query_string}",
- )
- action = request.jsonrequest["webhookEvent"]
-
- payload = request.jsonrequest["issue"]
- issue_id = payload["id"]
-
- delayable_model = env["jira.project.task"].with_delay()
- if action == "jira:issue_deleted":
- delayable_model.delete_record(backend, issue_id)
- else:
- delayable_model.import_record(backend, issue_id)
-
- @http.route(
- "/connector_jira//webhooks/worklog",
- type="json",
- auth="none", # security handled with manual JWT check (backend._validate_jwt)
- csrf=False,
- )
- def webhook_worklog(self, backend_id, **kw):
- ensure_db()
- request.uid = odoo.SUPERUSER_ID
- env = request.env
- backend = env["jira.backend"].search(
- [("id", "=", backend_id), ("state", "=", "running")]
- )
- if not backend:
- _logger.warning(
- "Received a Worklog webhook from Jira for backend %d but cannot find a "
- "matching runnign backend",
- backend_id,
- )
- return
- backend._validate_jwt(
- request.httprequest.headers["Authorization"],
- f"{request.httprequest.path}?{request.httprequest.query_string}",
- )
- action = request.jsonrequest["webhookEvent"]
-
- payload = request.jsonrequest["worklog"]
-
- issue_id = payload["issueId"]
- worklog_id = payload["id"]
-
- if action == "worklog_deleted":
- env["jira.account.analytic.line"].with_delay(
- description=_("Delete a local worklog which has been deleted on JIRA")
- ).delete_record(backend, worklog_id)
- else:
- env["jira.account.analytic.line"].with_delay(
- description=_("Import a worklog from JIRA")
- ).import_record(backend, issue_id, worklog_id)
-
-
-class JiraConnectAppController(http.Controller):
- """Manage the lifecyle of the App
-
- The app-descriptor endpoint when called returns the app descriptor,
- which lists the endpoints for installation / uninstallation /
- enabling / disabling the app on a Jira cloud server.
-
- The lifecycle requests all receive a payload with the following keys:
-
- {
- "key": "installed-addon-key",
- "clientKey": "unique-client-identifier",
- "sharedSecret": "a-secret-key-not-to-be-lost",
- "serverVersion": "server-version", # DEPRECATED
- "pluginsVersion": "version-of-connect",
- "baseUrl": "https://example.atlassian.net",
- "displayUrl": "https://issues.example.com",
- "displayUrlServicedeskHelpCenter": "https://support.example.com",
- "productType": "jira",
- "description": "Atlassian Jira at https://example.atlassian.net",
- "serviceEntitlementNumber": "SEN-number",
- "entitlementId": "Entitlement-Id",
- "entitlementNumber": "Entitlement-Number",
- "eventType": "installed",
- "installationId":
- "ari:cloud:ecosystem::installation/uuid-of-forge-installation-identifier"
- }
-
- Upon reception of an "installed" lifecycle call, we create a backend record
- for the app, in state "disabled".
- Upon reception of an "enabled" lifecycle call, we set the backend to "enabled".
- Upon reception of a "disabled" lifecycle call, we set the backend to "disabled".
- Upon reception of a "uninstalled" lifecycle call, we unlink the backend record.
-
- Documentation:
- https://developer.atlassian.com/cloud/jira/platform/connect-app-descriptor/#lifecycle
- """
-
- @http.route(
- "/jira//app-descriptor.json",
- type="http",
- methods=["GET"],
- auth="public",
- csrf=False,
- )
- def app_descriptor(self, backend_id, **kwargs):
- ensure_db()
- request.uid = odoo.SUPERUSER_ID
- env = request.env
- backend = env["jira.backend"].search([("id", "=", backend_id)])
- if not backend:
- descriptor = {}
- else:
- descriptor = backend._get_app_descriptor()
- mime = "application/json"
-
- body = json.dumps(descriptor)
- return request.make_response(
- body, [("Content-Type", mime), ("Content-Length", len(body))]
- )
-
- def _validate_jwt_token(self):
- """use autorization header to validate the request
- The process is described in
- https://developer.atlassian.com/cloud/jira/platform/security-for-connect-apps/
- """
- authorization_header = request.httprequest.headers["Authorization"]
- assert authorization_header.startswith(
- "JWT "
- ), "unexpected content in Authorization header"
- jwt_token = authorization_header[4:]
- decoded = jwt.get_unverified_header(jwt_token)
- if "kid" in decoded:
- # pylint: disable=E8106
- response = requests.get(
- f"https://connect-install-keys.atlassian.com/{decoded['kid']}"
- )
- response.raise_for_status()
- public_key = response.text
- response.close()
- _logger.info("public key:\n%s", public_key)
- decoded = jwt.decode(
- jwt_token,
- public_key,
- algorithms=[decoded["alg"]],
- audience=request.env["jira.backend"].sudo()._get_base_url(),
- )
- _logger.warning("decoded JWT Token: %s", decoded)
- else:
- raise Forbidden()
- return True
-
- @http.route(
- "/jira//installed",
- type="json",
- methods=["POST"],
- auth="public", # security implemented by _validated_jwt_token
- csrf=False,
- )
- def install_app(self, backend_id, **kwargs):
- self._validate_jwt_token()
- payload = request.jsonrequest
- _logger.info("installed: %s", payload)
-
- assert payload["eventType"] == "installed"
- ensure_db()
- env = request.env
- backend = env["jira.backend"].sudo().browse(backend_id)
- response = backend._install_app(payload)
- return {"status": response}
-
- @http.route(
- "/jira//uninstalled",
- type="json",
- methods=["POST"],
- auth="public", # security implemented by _validated_jwt_token
- csrf=False,
- )
- def uninstall_app(self, backend_id, **kwargs):
- self._validate_jwt_token()
- payload = request.jsonrequest
- _logger.info("uninstalled: %s", payload)
- assert payload["eventType"] == "uninstalled"
- env = request.env
- backend = env["jira.backend"].sudo().browse(backend_id)
- response = backend._uninstall_app(payload)
- return {"status": response}
-
- @http.route(
- "/jira//enabled",
- type="json",
- methods=["POST"],
- auth="public", # security handled with manual JWT check (backend._validate_jwt)
- csrf=False,
- )
- def enable_app(self, backend_id, **kwargs):
- # self._validate_jwt_token()
- payload = request.jsonrequest
- _logger.info("enabled: %s", payload)
- assert payload["eventType"] == "enabled"
- env = request.env
- backend = env["jira.backend"].sudo()
- backend = env["jira.backend"].sudo().browse(backend_id)
- backend._validate_jwt(
- request.httprequest.headers["Authorization"],
- f"{request.httprequest.path}?{request.httprequest.query_string}",
- )
- response = backend._enable_app(payload)
- return {"status": response}
-
- @http.route(
- "/jira//disabled",
- type="json",
- methods=["POST"],
- auth="public", # security handled with manual JWT check (backend._validate_jwt)
- csrf=False,
- )
- def disable_app(self, backend_id, **kwargs):
- # self._validate_jwt_token()
- payload = request.jsonrequest
- _logger.info("disabled: %s", payload)
- assert payload["eventType"] == "disabled"
- env = request.env
- backend = env["jira.backend"].sudo()
- backend = env["jira.backend"].sudo().browse(backend_id)
- backend._validate_jwt(
- request.httprequest.headers["Authorization"],
- f"{request.httprequest.path}?{request.httprequest.query_string}",
- )
- response = backend._disable_app(payload)
- return {"status": response}
diff --git a/connector_jira/data/cron.xml b/connector_jira/data/cron.xml
index da4883d4..008626da 100644
--- a/connector_jira/data/cron.xml
+++ b/connector_jira/data/cron.xml
@@ -5,47 +5,47 @@
codemodel._scheduler_import_project_task()
-
+ 10minutes-1
-
+ JIRA - Import Userscodemodel._scheduler_import_res_users()
-
+ 10minutes-1
-
+ JIRA - Import Worklogscodemodel._scheduler_import_analytic_line()
-
+ 10minutes-1
-
+ JIRA - Import Deleted Worklogscodemodel._scheduler_delete_analytic_line()
-
+ 10minutes-1
-
+
diff --git a/connector_jira/data/queue_job_channel.xml b/connector_jira/data/queue_job_channel.xml
new file mode 100644
index 00000000..3726d323
--- /dev/null
+++ b/connector_jira/data/queue_job_channel.xml
@@ -0,0 +1,8 @@
+
+
+
+
+ connector_jira.import
+
+
+
diff --git a/connector_jira/data/queue_job_data.xml b/connector_jira/data/queue_job_function.xml
similarity index 91%
rename from connector_jira/data/queue_job_data.xml
rename to connector_jira/data/queue_job_function.xml
index 2cd85ee6..2b19f8c1 100644
--- a/connector_jira/data/queue_job_data.xml
+++ b/connector_jira/data/queue_job_function.xml
@@ -1,13 +1,6 @@
-
-
- connector_jira.import
-
-
-
-
-
import_batch
-
run_batch_timestamp
-
delete_record
-
import_record
-
export_record
@@ -56,7 +44,6 @@
-
import_batch
diff --git a/connector_jira/fields.py b/connector_jira/fields.py
index bbe77acf..a045e201 100644
--- a/connector_jira/fields.py
+++ b/connector_jira/fields.py
@@ -24,7 +24,7 @@ class MilliDatetime(fields.Field):
column_type = ("timestamp", "timestamp")
@staticmethod
- def from_string(value):
+ def to_datetime(value):
"""Convert a string to :class:`datetime` including milliseconds"""
if not value:
return None
@@ -37,7 +37,10 @@ def from_string(value):
if len(value) > fields.DATETIME_LENGTH:
return datetime.strptime(value, MILLI_DATETIME_FORMAT)
else:
- return fields.Datetime.from_string(value)
+ return fields.Datetime.to_datetime(value)
+
+ # Backward compatibility and consistency w/ fields.Datetime
+ from_string = to_datetime
@staticmethod
def to_string(value):
@@ -60,4 +63,4 @@ def convert_to_cache(self, value, record, validate=True):
raise TypeError(
f"{value} (field {self}) must be string or datetime, not date."
)
- return self.from_string(value)
+ return self.to_datetime(value)
diff --git a/connector_jira/i18n/connector_jira.pot b/connector_jira/i18n/connector_jira.pot
index 1d7eaa93..092ef881 100644
--- a/connector_jira/i18n/connector_jira.pot
+++ b/connector_jira/i18n/connector_jira.pot
@@ -2751,7 +2751,7 @@ msgid "Reference"
msgstr ""
#. module: connector_jira
-#: model:ir.actions.act_window,name:connector_jira.action_jira_aa_line_import
+#: model:ir.actions.act_window,name:connector_jira.act_server_jira_aa_line_import
msgid "Refresh Worklogs from Jira"
msgstr ""
diff --git a/connector_jira/migrations/15.0.1.0.0/pre-migrate.py b/connector_jira/migrations/15.0.1.0.0/pre-migrate.py
deleted file mode 100644
index cd1d6b5a..00000000
--- a/connector_jira/migrations/15.0.1.0.0/pre-migrate.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2022 Camptocamp SA
-# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
-from openupgradelib import openupgrade
-
-
-def migrate(cr, version):
- add_missing_xmlid_on_channel(cr)
-
-
-def add_missing_xmlid_on_channel(cr):
- query = """
- SELECT id FROM queue_job_channel
- WHERE complete_name='root.connector_jira.import';
- """
- cr.execute(query)
- channel = cr.fetchall()
- if channel:
- openupgrade.add_xmlid(
- cr,
- "connector_jira",
- "import_root",
- "queue.job.channel",
- channel[0][0],
- )
diff --git a/connector_jira/migrations/15.0.2.0.0/pre-migrate.py b/connector_jira/migrations/15.0.2.0.0/pre-migrate.py
deleted file mode 100644
index b64af637..00000000
--- a/connector_jira/migrations/15.0.2.0.0/pre-migrate.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2024 Camptocamp SA
-# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
-
-
-def migrate(cr, version):
- remove_field_selection(cr)
-
-
-def remove_field_selection(cr):
- queries = [
- # delete xml ids of ir.model.fields.selections
- "DELETE FROM ir_model_data imd "
- "USING ir_model_fields_selection fs, ir_model_fields f, ir_model m "
- "WHERE imd.module='connector_jira' "
- "AND imd.model='ir.model.fields.selection' "
- "AND res_id=fs.id "
- "AND f.model_id = m.id "
- "AND m.name='jira.backend.auth' "
- "AND fs.field_id=f.id;",
- # delete ir_model_fields_selection
- "DELETE FROM ir_model_fields_selection "
- "USING ir_model_fields f, ir_model m "
- "WHERE f.model_id = m.id "
- "AND m.name='jira.backend.auth' "
- "AND field_id=f.id;",
- # delete ir.model
- "DELETE from ir_model WHERE model='jira.backend.auth';",
- "DROP TABLE jira_backend_auth",
- ]
-
- for query in queries:
- cr.execute(query)
diff --git a/connector_jira/models/__init__.py b/connector_jira/models/__init__.py
index ac2673df..a5411953 100644
--- a/connector_jira/models/__init__.py
+++ b/connector_jira/models/__init__.py
@@ -1,11 +1,19 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-from . import jira_binding # must be before the others
+# Must imported be before the others to instantiate the abstract model inherited by
+# other models
+from . import jira_binding
from . import account_analytic_line
+from . import jira_account_analytic_line
from . import jira_backend
+from . import jira_backend_timestamp
from . import jira_issue_type
+from . import jira_project_base_mixin
+from . import jira_project_project
+from . import jira_project_task
+from . import jira_res_users
from . import project_project
from . import project_task
-from . import res_users
from . import queue_job
+from . import res_users
diff --git a/connector_jira/models/account_analytic_line.py b/connector_jira/models/account_analytic_line.py
new file mode 100644
index 00000000..e3c1d007
--- /dev/null
+++ b/connector_jira/models/account_analytic_line.py
@@ -0,0 +1,152 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _, api, exceptions, fields, models
+
+
+class AccountAnalyticLine(models.Model):
+ _inherit = "account.analytic.line"
+
+ jira_bind_ids = fields.One2many(
+ comodel_name="jira.account.analytic.line",
+ inverse_name="odoo_id",
+ copy=False,
+ string="Worklog Bindings",
+ context={"active_test": False},
+ )
+ # fields needed to display JIRA issue link in views
+ jira_issue_key = fields.Char(
+ string="Original JIRA Issue Key",
+ compute="_compute_jira_references",
+ store=True,
+ )
+ jira_issue_url = fields.Char(
+ string="Original JIRA issue Link",
+ compute="_compute_jira_references",
+ compute_sudo=True,
+ store=True,
+ )
+ jira_epic_issue_key = fields.Char(
+ compute="_compute_jira_references",
+ string="Original JIRA Epic Key",
+ store=True,
+ )
+ jira_epic_issue_url = fields.Char(
+ string="Original JIRA Epic Link",
+ compute="_compute_jira_references",
+ compute_sudo=True,
+ store=True,
+ )
+
+ jira_issue_type_id = fields.Many2one(
+ comodel_name="jira.issue.type",
+ string="Original JIRA Issue Type",
+ compute="_compute_jira_references",
+ store=True,
+ )
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ for vals in vals_list:
+ self._connector_jira_create_validate(vals)
+ return super().create(vals_list)
+
+ @api.model
+ def _connector_jira_create_validate(self, vals):
+ project_id = vals.get("project_id")
+ if project_id:
+ project = self.env["project.project"].sudo().browse(project_id).exists()
+ if (
+ not self.env.context.get("connector_jira")
+ and project.jira_bind_ids._is_linked()
+ ):
+ raise exceptions.UserError(
+ _("Timesheet can not be created in project linked to JIRA!")
+ )
+
+ def write(self, vals):
+ self._connector_jira_write_validate(vals)
+ return super().write(vals)
+
+ def _connector_jira_write_validate(self, vals):
+ if (
+ not self.env.context.get("connector_jira")
+ and self.jira_bind_ids._is_linked()
+ ):
+ new_values = self._convert_to_write(vals)
+ for old_values in self.read(list(vals.keys()), load="_classic_write"):
+ old_values.pop("id", None)
+ old_values = self._convert_to_write(old_values)
+ for field in self._get_connector_jira_fields():
+ if field in vals and new_values[field] != old_values[field]:
+ raise exceptions.UserError(
+ _("Timesheet linked to JIRA Worklog cannot be modified!")
+ )
+
+ @api.ondelete(at_uninstall=False)
+ def _unlink_except_records_are_linked(self):
+ if (
+ not self.env.context.get("connector_jira")
+ and self.jira_bind_ids._is_linked()
+ ):
+ raise exceptions.UserError(
+ _("Timesheet linked to JIRA Worklog can not be deleted!")
+ )
+
+ @api.depends(
+ "jira_bind_ids",
+ "jira_bind_ids.jira_issue_key",
+ "jira_bind_ids.jira_issue_url",
+ "jira_bind_ids.jira_issue_type_id",
+ "jira_bind_ids.jira_epic_issue_key",
+ "jira_bind_ids.jira_epic_issue_url",
+ )
+ def _compute_jira_references(self):
+ """Compute the various references to JIRA.
+
+ We assume that we have only one external record for a line
+ """
+ with_bind = self.filtered("jira_bind_ids")
+ for record in with_bind:
+ main_binding = record.jira_bind_ids[0]
+ record.jira_issue_key = main_binding.jira_issue_key
+ record.jira_issue_url = main_binding.jira_issue_url
+ record.jira_issue_type_id = main_binding.jira_issue_type_id
+ record.jira_epic_issue_key = main_binding.jira_epic_issue_key
+ record.jira_epic_issue_url = main_binding.jira_epic_issue_url
+
+ no_bind = self - with_bind
+ if no_bind:
+ no_bind.update(
+ {
+ "jira_issue_key": "",
+ "jira_issue_url": "",
+ "jira_issue_type_id": False,
+ "jira_epic_issue_key": "",
+ "jira_epic_issue_url": "",
+ }
+ )
+
+ @api.model
+ def _get_connector_jira_fields(self):
+ return [
+ "jira_bind_ids",
+ "project_id",
+ "task_id",
+ "user_id",
+ "employee_id",
+ "name",
+ "date",
+ "unit_amount",
+ ]
+
+ def action_open_refresh_worklogs_from_jira_wizard(self):
+ return {
+ "name": _("Refresh Worklogs from Jira"),
+ "type": "ir.actions.act_window",
+ "target": "new",
+ "view_mode": "form",
+ "res_model": "jira.account.analytic.line.import",
+ "context": {"default_analytic_line_ids": [fields.Command.set(self.ids)]},
+ }
diff --git a/connector_jira/models/account_analytic_line/__init__.py b/connector_jira/models/account_analytic_line/__init__.py
deleted file mode 100644
index fd0b0ed4..00000000
--- a/connector_jira/models/account_analytic_line/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
-from . import deleter
-from . import importer
diff --git a/connector_jira/models/account_analytic_line/common.py b/connector_jira/models/account_analytic_line/common.py
deleted file mode 100644
index b9819c5a..00000000
--- a/connector_jira/models/account_analytic_line/common.py
+++ /dev/null
@@ -1,322 +0,0 @@
-# Copyright 2016-2022 Camptocamp SA
-# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-import json
-from collections import namedtuple
-
-from odoo import _, api, exceptions, fields, models
-
-from odoo.addons.component.core import Component
-
-UpdatedWorklog = namedtuple(
- "UpdatedWorklog",
- "worklog_id updated",
- # id as integer, timestamp
-)
-
-UpdatedWorklogSince = namedtuple(
- "UpdatedWorklogSince",
- "since until updated_worklogs",
- # timestamp, timestamp, [UpdatedWorklog]
-)
-
-
-DeletedWorklogSince = namedtuple(
- "DeletedWorklogSince",
- "since until deleted_worklog_ids",
- # timestamp, timestamp, [ids as integer]
-)
-
-
-class JiraAccountAnalyticLine(models.Model):
- _name = "jira.account.analytic.line"
- _inherit = "jira.binding"
- _inherits = {"account.analytic.line": "odoo_id"}
- _description = "Jira Worklog"
-
- odoo_id = fields.Many2one(
- comodel_name="account.analytic.line",
- string="Timesheet Line",
- required=True,
- index=True,
- ondelete="restrict",
- )
- # The REST API needs issue id + worklog id, so we keep it along
- # in case we'll need it for an eventual export
- jira_issue_id = fields.Char()
-
- # As we can have more than one jira binding on a project.project, we store
- # to which one a task binding is related.
- jira_project_bind_id = fields.Many2one(
- comodel_name="jira.project.project",
- ondelete="restrict",
- )
-
- # we have to store these fields on the analytic line because
- # they may be different than the ones on their odoo task:
- # for instance, we do not import "Tasks" but we import "Epics",
- # the analytic line for a "Task" will be linked to an "Epic" on
- # Odoo, but we still want to know the original task here
- jira_issue_key = fields.Char(
- string="Original Task Key",
- readonly=True,
- )
- jira_issue_type_id = fields.Many2one(
- comodel_name="jira.issue.type",
- string="Original Issue Type",
- readonly=True,
- )
- jira_issue_url = fields.Char(
- string="Original JIRA issue Link",
- compute="_compute_jira_issue_url",
- )
- jira_epic_issue_key = fields.Char(
- string="Original Epic Key",
- readonly=True,
- )
- jira_epic_issue_url = fields.Char(
- string="Original JIRA Epic Link",
- compute="_compute_jira_issue_url",
- )
-
- _sql_constraints = [
- (
- "jira_binding_backend_uniq",
- "unique(backend_id, odoo_id)",
- "A binding already exists for this line and this backend.",
- ),
- ]
-
- def _is_linked(self):
- return self.mapped("jira_project_bind_id")._is_linked()
-
- @api.depends(
- "backend_id", "backend_id.uri", "jira_issue_key", "jira_epic_issue_key"
- )
- def _compute_jira_issue_url(self):
- """Compute the external URL to JIRA."""
- for record in self:
- record.jira_issue_url = self.backend_id.make_issue_url(
- record.jira_issue_key
- )
- record.jira_epic_issue_url = self.backend_id.make_issue_url(
- record.jira_epic_issue_key
- )
-
- @api.model
- def import_record(self, backend, issue_id, worklog_id, force=False):
- """Import a worklog from JIRA"""
- with backend.work_on(self._name) as work:
- importer = work.component(usage="record.importer")
- return importer.run(worklog_id, issue_id=issue_id, force=force)
-
- def force_reimport(self):
- for binding in self.sudo().mapped("jira_bind_ids"):
- binding.with_delay(priority=8).import_record(
- binding.backend_id,
- binding.jira_issue_id,
- binding.external_id,
- force=True,
- )
-
-
-class AccountAnalyticLine(models.Model):
- _inherit = "account.analytic.line"
-
- jira_bind_ids = fields.One2many(
- comodel_name="jira.account.analytic.line",
- inverse_name="odoo_id",
- copy=False,
- string="Worklog Bindings",
- context={"active_test": False},
- )
- # fields needed to display JIRA issue link in views
- jira_issue_key = fields.Char(
- string="Original JIRA Issue Key",
- compute="_compute_jira_references",
- store=True,
- )
- jira_issue_url = fields.Char(
- string="Original JIRA issue Link",
- compute="_compute_jira_references",
- compute_sudo=True,
- )
- jira_epic_issue_key = fields.Char(
- compute="_compute_jira_references",
- string="Original JIRA Epic Key",
- store=True,
- )
- jira_epic_issue_url = fields.Char(
- string="Original JIRA Epic Link",
- compute="_compute_jira_references",
- compute_sudo=True,
- )
-
- jira_issue_type_id = fields.Many2one(
- comodel_name="jira.issue.type",
- string="Original JIRA Issue Type",
- compute="_compute_jira_references",
- store=True,
- )
-
- @api.depends(
- "jira_bind_ids.jira_issue_key",
- "jira_bind_ids.jira_issue_type_id",
- "jira_bind_ids.jira_epic_issue_key",
- )
- def _compute_jira_references(self):
- """Compute the various references to JIRA.
-
- We assume that we have only one external record for a line
- """
- for record in self:
- if not record.jira_bind_ids:
- record.jira_issue_url = False
- record.jira_epic_issue_key = False
- record.jira_epic_issue_url = False
- continue
- main_binding = record.jira_bind_ids[0]
- record.jira_issue_key = main_binding.jira_issue_key
- record.jira_issue_url = main_binding.jira_issue_url
- record.jira_issue_type_id = main_binding.jira_issue_type_id
- record.jira_epic_issue_key = main_binding.jira_epic_issue_key
- record.jira_epic_issue_url = main_binding.jira_epic_issue_url
-
- @api.model
- def _get_connector_jira_fields(self):
- return [
- "jira_bind_ids",
- "project_id",
- "task_id",
- "user_id",
- "employee_id",
- "name",
- "date",
- "unit_amount",
- ]
-
- @api.model
- def _connector_jira_create_validate(self, vals):
- ProjectProject = self.env["project.project"]
- project_id = vals.get("project_id")
- if project_id:
- project_id = ProjectProject.sudo().browse(project_id)
- if (
- not self.env.context.get("connector_jira")
- and project_id.mapped("jira_bind_ids")._is_linked()
- ):
- raise exceptions.UserError(
- _("Timesheet can not be created in project linked to JIRA!")
- )
-
- def _connector_jira_write_validate(self, vals):
- if (
- not self.env.context.get("connector_jira")
- and self.mapped("jira_bind_ids")._is_linked()
- ):
- fields = list(vals.keys())
- new_values = self._convert_to_write(
- vals,
- )
- for old_values in self.read(fields, load="_classic_write"):
- old_values = self._convert_to_write(
- old_values,
- )
- for field in self._get_connector_jira_fields():
- if field not in fields:
- continue
- if new_values[field] == old_values[field]:
- continue
- raise exceptions.UserError(
- _("Timesheet linked to JIRA Worklog can not be modified!")
- )
-
- def _connector_jira_unlink_validate(self):
- if (
- not self.env.context.get("connector_jira")
- and self.mapped("jira_bind_ids")._is_linked()
- ):
- raise exceptions.UserError(
- _("Timesheet linked to JIRA Worklog can not be deleted!")
- )
-
- @api.model
- def create(self, vals):
- self._connector_jira_create_validate(vals)
- return super().create(vals)
-
- def write(self, vals):
- self._connector_jira_write_validate(vals)
- return super().write(vals)
-
- def unlink(self):
- self._connector_jira_unlink_validate()
- return super().unlink()
-
-
-class WorklogAdapter(Component):
- _name = "jira.worklog.adapter"
- _inherit = "jira.webservice.adapter"
- _apply_on = ["jira.account.analytic.line"]
-
- def read(self, issue_id, worklog_id):
- # pylint: disable=W8106
- with self.handle_404():
- return self.client.worklog(issue_id, worklog_id).raw
-
- def search(self, issue_id):
- """Search worklogs of an issue"""
- worklogs = self.client.worklogs(issue_id)
- return [worklog.id for worklog in worklogs]
-
- @staticmethod
- def _chunks(whole, size):
- """Yield successive n-sized chunks from l."""
- for i in range(0, len(whole), size):
- yield whole[i : i + size]
-
- def yield_read(self, worklog_ids):
- """Generator returning worklog ids data"""
- path = "worklog/list"
-
- # the method returns max 1000 results
- for chunk in self._chunks(worklog_ids, 1000):
- payload = json.dumps({"ids": chunk})
- result = self._post_get_json(path, data=payload)
- yield from result
-
- def updated_since(self, since=None):
- path = "worklog/updated"
-
- start_since = since
- updated_worklogs = []
-
- while True:
- result = self.client._get_json(path, params={"since": since})
- updated_worklogs += [
- UpdatedWorklog(worklog_id=row["worklogId"], updated=row["updatedTime"])
- for row in result["values"]
- ]
- until = since = result["until"]
- if result["lastPage"]:
- break
- return UpdatedWorklogSince(
- since=start_since, until=until, updated_worklogs=updated_worklogs
- )
-
- def deleted_since(self, since=None):
- path = "worklog/deleted"
-
- start_since = since
- deleted_worklog_ids = []
-
- while True:
- result = self.client._get_json(path, params={"since": since})
- deleted_worklog_ids += [row["worklogId"] for row in result["values"]]
- until = since = result["until"]
- if result["lastPage"]:
- break
- return DeletedWorklogSince(
- since=start_since, until=until, deleted_worklog_ids=deleted_worklog_ids
- )
diff --git a/connector_jira/models/account_analytic_line/importer.py b/connector_jira/models/account_analytic_line/importer.py
deleted file mode 100644
index a994351e..00000000
--- a/connector_jira/models/account_analytic_line/importer.py
+++ /dev/null
@@ -1,365 +0,0 @@
-# Copyright 2016-2022 Camptocamp SA
-# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-import logging
-
-from pytz import timezone, utc
-
-from odoo import _
-
-from odoo.addons.component.core import Component
-from odoo.addons.connector.components.mapper import mapping
-from odoo.addons.connector.exception import MappingError
-
-from ...components.mapper import (
- iso8601_to_naive_date,
- iso8601_to_utc_datetime,
- whenempty,
-)
-from ...fields import MilliDatetime
-
-_logger = logging.getLogger(__name__)
-
-
-class AnalyticLineMapper(Component):
- _name = "jira.analytic.line.mapper"
- _inherit = "jira.import.mapper"
- _apply_on = ["jira.account.analytic.line"]
-
- direct = [
- (whenempty("comment", _("missing description")), "name"),
- ]
-
- @mapping
- def issue(self, record):
- issue = self.options.linked_issue
- assert issue
- refs = {
- "jira_issue_id": record["issueId"],
- "jira_issue_key": issue["key"],
- }
- task_mapper = self.component(
- usage="import.mapper",
- model_name="jira.project.task",
- )
- issue_type_dict = task_mapper.issue_type(issue)
- refs.update(issue_type_dict)
- epic_field_name = self.backend_record.epic_link_field_name
- if epic_field_name and epic_field_name in issue["fields"]:
- refs["jira_epic_issue_key"] = issue["fields"][epic_field_name]
- if self.backend_record.epic_link_on_epic:
- issue_type = self.env["jira.issue.type"].browse(
- issue_type_dict.get("jira_issue_type_id")
- )
- if issue_type.name == "Epic":
- refs["jira_epic_issue_key"] = issue.get("key")
- return refs
-
- @mapping
- def date(self, record):
- mode = self.backend_record.worklog_date_timezone_mode
- started = record["started"]
- if not mode or mode == "naive":
- return {"date": iso8601_to_naive_date(started)}
- started = iso8601_to_utc_datetime(started).replace(tzinfo=utc)
- if mode == "user":
- tz = timezone(record["author"]["timeZone"])
- elif mode == "specific":
- tz = timezone(self.backend_record.worklog_date_timezone)
- return {"date": started.astimezone(tz).date()}
-
- @mapping
- def duration(self, record):
- spent = float(record["timeSpentSeconds"])
- # amount is in float in odoo... 2h30 = 2.5
- return {"unit_amount": spent / 60 / 60}
-
- @mapping
- def author(self, record):
- jira_author = record["author"]
- jira_author_key = jira_author["accountId"]
- binder = self.binder_for("jira.res.users")
- user = binder.to_internal(jira_author_key, unwrap=True)
- if not user:
- email = jira_author.get("emailAddress", "")
- raise MappingError(
- _(
- "No user found with login '%(key)s' or email '%(mail)s'."
- " You must create a user or link it manually if the"
- " login/email differs.",
- key=jira_author_key,
- mail=email,
- )
- )
- employee = (
- self.env["hr.employee"]
- .with_context(
- active_test=False,
- )
- .search([("user_id", "=", user.id)], limit=1)
- )
- return {"user_id": user.id, "employee_id": employee.id}
-
- @mapping
- def project_and_task(self, record):
- assert (
- self.options.task_binding
- or self.options.project_binding
- or self.options.fallback_project
- )
- task_binding = self.options.task_binding
- if not task_binding:
- if self.options.fallback_project:
- return {"project_id": self.options.fallback_project.id}
- project = self.options.project_binding.odoo_id
- if project:
- return {
- "project_id": project.id,
- "jira_project_bind_id": self.options.project_binding.id,
- }
-
- project = task_binding.project_id
- return {
- "task_id": task_binding.odoo_id.id,
- "project_id": project.id,
- "jira_project_bind_id": task_binding.jira_project_bind_id.id,
- }
-
- @mapping
- def backend_id(self, record):
- return {"backend_id": self.backend_record.id}
-
-
-class AnalyticLineBatchImporter(Component):
- """Import the Jira worklogs
-
- For every id in in the list, a delayed job is created.
- Import from a date
- """
-
- _name = "jira.analytic.line.batch.importer"
- _inherit = "jira.timestamp.batch.importer"
- _apply_on = ["jira.account.analytic.line"]
-
- def _search(self, timestamp):
- unix_timestamp = MilliDatetime.to_timestamp(timestamp.last_timestamp)
- result = self.backend_adapter.updated_since(since=unix_timestamp)
- worklog_ids = self._filter_update(result.updated_worklogs)
- # We need issue_id + worklog_id for the worklog importer (the jira
- # "read" method for worklogs asks both), get it from yield_read.
- # TODO we might consider to optimize the import process here:
- # yield_read reads worklogs data, then the individual
- # import will do a request again (and 2 with the tempo module)
- next_timestamp = MilliDatetime.from_timestamp(result.until)
- return (next_timestamp, self.backend_adapter.yield_read(worklog_ids))
-
- def _handle_records(self, records, force=False):
- count = 0
- for worklog in records:
- count += 1
- worklog_id = worklog["id"]
- issue_id = worklog["issueId"]
- self._import_record(issue_id, worklog_id, force=force)
- return count
-
- def _filter_update(self, updated_worklogs):
- """Filter only the worklogs needing an update
-
- The result from Jira contains the worklog id and
- the last update on Jira. So we keep only the worklog
- ids with an sync_date before the Jira last update.
- """
- if not updated_worklogs:
- return []
- self.env.cr.execute(
- "SELECT external_id, jira_updated_at "
- "FROM jira_account_analytic_line "
- "WHERE external_id IN %s ",
- (tuple(str(r.worklog_id) for r in updated_worklogs),),
- )
- bindings = {int(row[0]): row[1] for row in self.env.cr.fetchall()}
- worklog_ids = []
- for worklog in updated_worklogs:
- worklog_id = worklog.worklog_id
- # we store the latest "updated_at" value on the binding
- # so we can check if we already know the latest value,
- # for instance because we imported the record from a
- # webhook before, we can skip the import
- binding_updated_at = bindings.get(worklog_id)
- if not binding_updated_at:
- worklog_ids.append(worklog_id)
- continue
- binding_updated_at = MilliDatetime.from_string(binding_updated_at)
- jira_updated_at = MilliDatetime.from_timestamp(worklog.updated)
- if binding_updated_at < jira_updated_at:
- worklog_ids.append(worklog_id)
- return worklog_ids
-
- def _import_record(self, issue_id, worklog_id, force=False, **kwargs):
- """Delay the import of the records"""
- self.model.with_delay(**kwargs).import_record(
- self.backend_record,
- issue_id,
- worklog_id,
- force=force,
- )
-
-
-class AnalyticLineImporter(Component):
- _name = "jira.analytic.line.importer"
- _inherit = "jira.importer"
- _apply_on = ["jira.account.analytic.line"]
-
- def __init__(self, work_context):
- super().__init__(work_context)
- self.external_issue_id = None
- self.task_binding = None
- self.project_binding = None
- self.fallback_project = None
-
- def _get_external_updated_at(self):
- assert self.external_record
- external_updated_at = self.external_record.get("updated")
- if not external_updated_at:
- return None
- return iso8601_to_utc_datetime(external_updated_at)
-
- @property
- def _issue_fields_to_read(self):
- epic_field_name = self.backend_record.epic_link_field_name
- return ["issuetype", "project", "parent", epic_field_name]
-
- def _recurse_import_task(self):
- """Import and return the task of proper type for the worklog
-
- As we decide which type of issues are imported for a project,
- a worklog could be linked to an issue that we don't import.
- In that case, we climb the parents of the issue until we find
- a issue of a type we synchronize.
-
- It ensures that the 'to-be-linked' issue is imported and return it.
-
- """
- issue_adapter = self.component(
- usage="backend.adapter", model_name="jira.project.task"
- )
- issue_binder = self.binder_for("jira.project.task")
- issue_type_binder = self.binder_for("jira.issue.type")
- jira_issue_id = self.external_record["issueId"]
- epic_field_name = self.backend_record.epic_link_field_name
- project_matcher = self.component(usage="jira.task.project.matcher")
- current_project_id = self.external_issue["fields"]["project"]["id"]
- while jira_issue_id:
- issue = issue_adapter.read(
- jira_issue_id,
- fields=self._issue_fields_to_read,
- )
- jira_project_id = issue["fields"]["project"]["id"]
- jira_issue_type_id = issue["fields"]["issuetype"]["id"]
- project_binding = project_matcher.find_project_binding(issue)
- issue_type_binding = issue_type_binder.to_internal(jira_issue_type_id)
- # JIRA allows to set an EPIC of a different project.
- # If it happens, we discard it.
- if (
- jira_project_id == current_project_id
- and issue_type_binding.is_sync_for_project(project_binding)
- ):
- break
- if issue["fields"].get("parent"):
- # 'parent' is used on sub-tasks relating to their parent task
- jira_issue_id = issue["fields"]["parent"]["id"]
- elif issue["fields"].get(epic_field_name):
- # the epic link is set on a jira custom field
- epic_key = issue["fields"][epic_field_name]
- epic = issue_adapter.read(epic_key, fields="id")
- # we got the key of the epic issue, so we translate
- # it to the ID with a call to the API
- jira_issue_id = epic["id"]
- else:
- # no parent issue of a type we are synchronizing has been
- # found, the worklog will be assigned to no task
- jira_issue_id = None
-
- if jira_issue_id:
- self._import_dependency(jira_issue_id, "jira.project.task")
- return issue_binder.to_internal(jira_issue_id)
-
- def _create_data(self, map_record, **kwargs):
- return super()._create_data(
- map_record,
- task_binding=self.task_binding,
- project_binding=self.project_binding,
- fallback_project=self.fallback_project,
- linked_issue=self.external_issue,
- )
-
- def _update_data(self, map_record, **kwargs):
- return super()._update_data(
- map_record,
- task_binding=self.task_binding,
- project_binding=self.project_binding,
- fallback_project=self.fallback_project,
- linked_issue=self.external_issue,
- )
-
- def run(self, external_id, force=False, record=None, **kwargs):
- assert "issue_id" in kwargs
- self.external_issue_id = kwargs.pop("issue_id")
- return super().run(external_id, force=force, record=record, **kwargs)
-
- def _handle_record_missing_on_jira(self):
- """Hook called when we are importing a record missing on Jira
-
- For worklogs, we drop the analytic line if we discover it doesn't exist
- on Jira, as the latter is the master.
- """
- binding = self._get_binding()
- if binding:
- record = binding.odoo_id
- binding.unlink()
- record.unlink()
- return _("Record does no longer exist in Jira")
-
- def _get_external_data(self):
- """Return the raw Jira data for ``self.external_id``"""
- issue_adapter = self.component(
- usage="backend.adapter", model_name="jira.project.task"
- )
- self.external_issue = issue_adapter.read(self.external_issue_id)
- return self.backend_adapter.read(self.external_issue_id, self.external_id)
-
- def _before_import(self):
- task_binding = self._recurse_import_task()
- if task_binding and task_binding.active:
- self.task_binding = task_binding
- if not self.task_binding:
- # when no task exists in Odoo (because we don't synchronize
- # the issue type for instance), we link the line directly
- # to the corresponding project, not linked to any task
- issue = self.external_issue
- assert issue
- matcher = self.component(usage="jira.task.project.matcher")
- project_binding = matcher.find_project_binding(issue)
- if project_binding and project_binding.active:
- self.project_binding = project_binding
- else:
- self.fallback_project = matcher.fallback_project_for_worklogs()
-
- def _import(self, binding, **kwargs):
- if not (self.task_binding or self.project_binding or self.fallback_project):
- _logger.debug(
- "No task or project synchronized for attaching worklog %s",
- self.external_record["id"],
- )
- return
- return super()._import(binding, **kwargs)
-
- def _import_dependency_assignee(self):
- jira_assignee = self.external_record["author"]
- jira_key = jira_assignee.get("accountId")
- self._import_dependency(jira_key, "jira.res.users", record=jira_assignee)
-
- def _import_dependencies(self):
- """Import the dependencies for the record"""
- self._import_dependency_assignee()
diff --git a/connector_jira/models/jira_account_analytic_line.py b/connector_jira/models/jira_account_analytic_line.py
new file mode 100644
index 00000000..2291ba0a
--- /dev/null
+++ b/connector_jira/models/jira_account_analytic_line.py
@@ -0,0 +1,89 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import api, fields, models
+
+
+class JiraAccountAnalyticLine(models.Model):
+ _name = "jira.account.analytic.line"
+ _inherit = "jira.binding"
+ _inherits = {"account.analytic.line": "odoo_id"}
+ _description = "Jira Worklog"
+
+ odoo_id = fields.Many2one(
+ comodel_name="account.analytic.line",
+ string="Timesheet Line",
+ required=True,
+ index=True,
+ ondelete="restrict",
+ )
+ # The REST API needs issue id + worklog id, so we keep it along
+ # in case we'll need it for an eventual export
+ jira_issue_id = fields.Char()
+
+ # As we can have more than one jira binding on a project.project, we store
+ # to which one a task binding is related.
+ jira_project_bind_id = fields.Many2one(
+ comodel_name="jira.project.project",
+ ondelete="restrict",
+ )
+
+ # we have to store these fields on the analytic line because
+ # they may be different than the ones on their odoo task:
+ # for instance, we do not import "Tasks" but we import "Epics",
+ # the analytic line for a "Task" will be linked to an "Epic" on
+ # Odoo, but we still want to know the original task here
+ jira_issue_key = fields.Char(string="Original Task Key")
+ jira_issue_type_id = fields.Many2one(
+ comodel_name="jira.issue.type",
+ string="Original Issue Type",
+ )
+ jira_issue_url = fields.Char(
+ string="Original JIRA issue Link",
+ compute="_compute_jira_issue_url",
+ store=True,
+ )
+ jira_epic_issue_key = fields.Char(string="Original Epic Key")
+ jira_epic_issue_url = fields.Char(
+ string="Original JIRA Epic Link",
+ compute="_compute_jira_issue_url",
+ store=True,
+ )
+
+ _sql_constraints = [
+ (
+ "jira_binding_backend_uniq",
+ "unique(backend_id, odoo_id)",
+ "A binding already exists for this line and this backend.",
+ ),
+ ]
+
+ def _is_linked(self):
+ return self.jira_project_bind_id._is_linked()
+
+ @api.depends(
+ "backend_id", "backend_id.uri", "jira_issue_key", "jira_epic_issue_key"
+ )
+ def _compute_jira_issue_url(self):
+ """Compute the external URL to JIRA."""
+ for record in self:
+ urlmaker = record.backend_id.make_issue_url
+ record.jira_issue_url = urlmaker(record.jira_issue_key)
+ record.jira_epic_issue_url = urlmaker(record.jira_epic_issue_key)
+
+ @api.model
+ def import_record(self, backend, issue_id, worklog_id, force=False):
+ """Import a worklog from JIRA"""
+ with backend.work_on(self._name) as work:
+ importer = work.component(usage="record.importer")
+ return importer.run(worklog_id, issue_id=issue_id, force=force)
+
+ def force_reimport(self):
+ for binding in self.sudo().mapped("jira_bind_ids"):
+ binding.with_delay(priority=8).import_record(
+ binding.backend_id,
+ binding.jira_issue_id,
+ binding.external_id,
+ force=True,
+ )
diff --git a/connector_jira/models/jira_backend/common.py b/connector_jira/models/jira_backend.py
similarity index 73%
rename from connector_jira/models/jira_backend/common.py
rename to connector_jira/models/jira_backend.py
index 6bf2dd3d..6e791773 100644
--- a/connector_jira/models/jira_backend/common.py
+++ b/connector_jira/models/jira_backend.py
@@ -5,23 +5,16 @@
import logging
import urllib.parse
-from contextlib import closing, contextmanager
from datetime import datetime
import jwt
-import psycopg2
import pytz
import requests
from atlassian_jwt import url_utils
-import odoo
-from odoo import _, api, exceptions, fields, models, tools
+from odoo import _, api, exceptions, fields, models
from odoo.tools import config
-from odoo.addons.component.core import Component
-
-from ...fields import MilliDatetime
-
_logger = logging.getLogger(__name__)
JIRA_TIMEOUT = 30 # seconds
@@ -37,21 +30,6 @@
_logger.debug(err)
-@contextmanager
-def new_env(env):
- registry = odoo.registry(env.cr.dbname)
- with closing(registry.cursor()) as cr:
- new_env = api.Environment(cr, env.uid, env.context)
- try:
- yield new_env
- except Exception:
- cr.rollback()
- raise
- else:
- if not tools.config["test_enable"]:
- cr.commit() # pylint: disable=invalid-commit
-
-
class JiraBackend(models.Model):
_name = "jira.backend"
_description = "Jira Backend"
@@ -67,7 +45,6 @@ class JiraBackend(models.Model):
uri = fields.Char(
string="Jira URI",
- readonly=True,
help="the value is provided when the app is installed on Jira Cloud.",
)
name = fields.Char(
@@ -79,9 +56,7 @@ class JiraBackend(models.Model):
help="URL to use when registering the backend as an app on the marketplace",
compute="_compute_app_descriptor_url",
)
- display_url = fields.Char(
- help="Url used for the Jira app in messages", readonly=True
- )
+ display_url = fields.Char(help="Url used for the Jira app in messages")
application_key = fields.Char(
compute="_compute_application_key",
store=True,
@@ -130,7 +105,6 @@ class JiraBackend(models.Model):
],
default="setup",
required=True,
- readonly=True,
help="State of the Backend.\n"
"Setup: in this state you can register the backend on "
"https://marketplace.atlassian.com/ as an app, using the app descriptor url.\n"
@@ -138,12 +112,11 @@ class JiraBackend(models.Model):
"(transition is automatic).",
)
private_key = fields.Char(
- readonly=True,
groups="connector.group_connector_manager",
help="The shared secret for JWT, provided at app installation",
)
public_key = fields.Text(
- readonly=True, help="The Client Key for JWT, provided at app installation"
+ help="The Client Key for JWT, provided at app installation"
)
verify_ssl = fields.Boolean(default=True, string="Verify SSL?")
@@ -182,7 +155,6 @@ class JiraBackend(models.Model):
comodel_name="jira.issue.type",
inverse_name="backend_id",
string="Issue Types",
- readonly=True,
)
epic_link_field_name = fields.Char(
@@ -201,11 +173,10 @@ class JiraBackend(models.Model):
)
# TODO: use something better to show this info
- # For instance, we could use web_notify to simply show a system msg.
- report_user_sync = fields.Html(readonly=True)
+ # For instance, we could use web_notify to simply show a system msg.
+ report_user_sync = fields.Html()
@api.model_create_multi
- @api.returns("self", lambda value: value.id)
def create(self, vals_list):
records = super().create(vals_list)
records._compute_application_key()
@@ -218,7 +189,6 @@ def _compute_application_key(self):
def _compute_app_descriptor_url(self):
base_url = self._get_base_url()
-
for rec in self:
rec.app_descriptor_url = f"{base_url}/jira/{rec.id}/app-descriptor.json"
@@ -236,10 +206,8 @@ def _selection_project_template(self):
@api.constrains("project_template_shared")
def check_jira_key(self):
- for backend in self:
- if not backend.project_template_shared:
- continue
- valid = self.env["jira.project.project"]._jira_key_valid
+ valid = self.env["jira.project.project"]._jira_key_valid
+ for backend in self.filtered("project_template_shared"):
if not valid(backend.project_template_shared):
raise exceptions.ValidationError(
_("%s is not a valid JIRA Key") % backend.project_template_shared
@@ -270,8 +238,8 @@ def _compute_last_import_date(self):
)
def _inverse_date_fields(self, field_name, component_usage):
+ ts_model = self.env["jira.backend.timestamp"]
for rec in self:
- ts_model = self.env["jira.backend.timestamp"]
timestamp = ts_model._timestamp_for_field(rec, field_name, component_usage)
if not timestamp._lock():
raise exceptions.UserError(
@@ -280,7 +248,7 @@ def _inverse_date_fields(self, field_name, component_usage):
"probably due to an ongoing synchronization."
)
)
- value = getattr(rec, field_name)
+ value = rec[field_name]
# As the timestamp field is using MilliDatetime, we lose
# the milliseconds precision when a user writes a new
# date on the backend. This is not really an issue as we
@@ -318,8 +286,7 @@ def _run_background_from_date(
concurrency issue arises, it will be logged and rollbacked silently.
"""
self.ensure_one()
- ts_model = self.env["jira.backend.timestamp"]
- timestamp = ts_model._timestamp_for_field(
+ timestamp = self.env["jira.backend.timestamp"]._timestamp_for_field(
self,
from_date_field,
component_usage,
@@ -335,9 +302,7 @@ def button_setup(self):
def activate_epic_link(self):
self.ensure_one()
with self.work_on("jira.backend") as work:
- adapter = work.component(usage="backend.adapter")
- jira_fields = adapter.list_fields()
- for field in jira_fields:
+ for field in work.component(usage="backend.adapter").list_fields():
custom_ref = field.get("schema", {}).get("custom")
if custom_ref == "com.pyxis.greenhopper.jira:gh-epic-link":
self.epic_link_field_name = field["id"]
@@ -360,19 +325,18 @@ def state_running(self):
def _onchange_worklog_date_import_timezone_mode(self):
for jira_backend in self:
if jira_backend.worklog_date_timezone_mode == "specific":
- continue
- jira_backend.worklog_date_timezone = False
+ jira_backend.worklog_date_timezone = self.env.user.tz or "UTC"
+ else:
+ jira_backend.worklog_date_timezone = False
def check_connection(self):
self.ensure_one()
try:
self.get_api_client().myself()
- except (ValueError, requests.exceptions.ConnectionError) as err:
- raise exceptions.UserError(_("Failed to connect (%s)") % (err,)) from err
- except JIRAError as err:
- raise exceptions.UserError(
- _("Failed to connect (%s)") % (err.text,)
- ) from err
+ except (ValueError, requests.exceptions.ConnectionError) as e:
+ raise exceptions.UserError(_("Failed to connect (%s)", e)) from e
+ except JIRAError as e:
+ raise exceptions.UserError(_("Failed to connect (%s)", e.text)) from e
raise exceptions.UserError(_("Connection successful"))
def import_project_task(self):
@@ -404,7 +368,7 @@ def delete_analytic_line(self):
def import_res_users(self):
self.report_user_sync = None
result = self.env["res.users"].search([]).link_with_jira(backends=self)
- for __, bknd_result in result.items():
+ for bknd_result in result.values():
if bknd_result.get("error"):
self.report_user_sync = self.env["ir.ui.view"]._render_template(
"connector_jira.backend_report_user_sync",
@@ -413,9 +377,7 @@ def import_res_users(self):
return True
def get_user_resolution_order(self):
- return [
- "email",
- ]
+ return ["email"]
def import_issue_type(self):
self.env["jira.issue.type"].import_batch(self)
@@ -425,54 +387,44 @@ def get_api_client(self):
self.ensure_one()
# tokens are only readable by connector managers
backend = self.sudo()
-
- options = {
- "server": backend.uri,
- "verify": backend.verify_ssl,
- }
- jwt = {
- "secret": backend.private_key,
- "payload": {
- "iss": self.application_key, # application key in the app descriptor
- },
- }
+ # application key in the app descriptor
+ app_key = self.application_key
return JIRA(
- options=options, jwt=jwt, timeout=JIRA_TIMEOUT, get_server_info=False
+ options={"server": backend.uri, "verify": backend.verify_ssl},
+ jwt={"secret": backend.private_key, "payload": {"iss": app_key}},
+ timeout=JIRA_TIMEOUT,
+ get_server_info=False,
)
@api.model
def _scheduler_import_project_task(self):
- backends = self.search([("state", "=", "running")])
- for backend in backends:
+ for backend in self.search([("state", "=", "running")]):
backend.import_project_task()
@api.model
def _scheduler_import_res_users(self):
- backends = self.search([("state", "=", "running")])
- for backend in backends:
+ for backend in self.search([("state", "=", "running")]):
backend.import_res_users()
@api.model
def _scheduler_import_analytic_line(self):
- backends = self.search([("state", "=", "running")])
- for backend in backends:
- backend.search([]).import_analytic_line()
+ for backend in self.search([("state", "=", "running")]):
+ backend.import_analytic_line()
@api.model
def _scheduler_delete_analytic_line(self):
- backends = self.search([("state", "=", "running")])
- for backend in backends:
- backend.search([]).delete_analytic_line()
+ for backend in self.search([("state", "=", "running")]):
+ backend.delete_analytic_line()
def make_issue_url(self, jira_issue_id):
return urllib.parse.urljoin(self.uri, f"/browse/{jira_issue_id}")
+ @api.model
def _get_base_url(self):
- fqdn = self.env["ir.config_parameter"].get_param("web.base.url", "")
- if "://" in fqdn:
- fqdn = fqdn.split("://", maxsplit=1)[-1]
- base_url = "https://" + fqdn
- return base_url
+ base_url = self.env["ir.config_parameter"].get_param("web.base.url", "")
+ if "://" in base_url:
+ base_url = base_url.split("://", maxsplit=1)[-1]
+ return "https://" + base_url
def _get_app_descriptor(self):
self.ensure_one()
@@ -567,17 +519,9 @@ def _prepare_backend_values(self, payload):
def _uninstall_app(self, payload):
self.ensure_one()
# wait for disabled to complete
- self.env.cr.execute(
- "SELECT id from jira_backend WHERE id = %s FOR UPDATE",
- (self.id,),
- )
- self.write(
- {
- "public_key": False,
- "private_key": False,
- "state": "setup",
- }
- )
+ query = "SELECT id from jira_backend WHERE id = %s FOR UPDATE"
+ self.env.cr.execute(query, (self.id,))
+ self.write({"public_key": False, "private_key": False, "state": "setup"})
_logger.info("Uninstalled Jira backend for uri %s", self.uri)
return "ok"
@@ -592,10 +536,8 @@ def _enable_app(self, payload):
def _disable_app(self, payload):
self.ensure_one()
- self.env.cr.execute(
- "SELECT id from jira_backend WHERE id = %s FOR UPDATE",
- (self.id,),
- )
+ query = "SELECT id from jira_backend WHERE id = %s FOR UPDATE"
+ self.env.cr.execute(query, (self.id,))
values = self._prepare_backend_values(payload)
values["state"] = "setup"
_logger.info("disable %s -> %s", self.ids, values)
@@ -603,22 +545,19 @@ def _disable_app(self, payload):
_logger.info("Disabled Jira backend for uri %s", self.mapped("uri"))
return "ok"
- def _validate_jwt(self, authorization_header, query_url=None):
- """validation if the JSON Web Token
+ def _validate_jwt(self, auth_header, query_url=None):
+ """Validation for the JSON Web Token
- Use the algorithm provided by the atlassan module to compute the 'iss' hash
+ Use the algorithm provided by the Atlassian module to compute the 'iss' hash
from the URL and compare it to the value in the token, in addition to the
standard claims checks.
"""
self.ensure_one()
- assert authorization_header.startswith(
- "JWT "
- ), "unexpected content in Authorization header"
- jwt_token = authorization_header[4:]
+ assert auth_header.startswith("JWT "), "Unexpected content in Auth header"
# see https://developer.atlassian.com/cloud/jira/software/understanding-jwt/
# for more info
decoded = jwt.decode(
- jwt_token,
+ auth_header[4:],
self.private_key,
algorithms=["HS256"],
# audience=self._get_base_url(),
@@ -640,100 +579,3 @@ def _validate_jwt(self, authorization_header, query_url=None):
if decoded["iss"] != expected_hash:
return False
return True
-
-
-class JiraBackendTimestamp(models.Model):
- _name = "jira.backend.timestamp"
- _description = "Jira Backend Import Timestamps"
-
- backend_id = fields.Many2one(
- comodel_name="jira.backend",
- string="Jira Backend",
- required=True,
- )
- from_date_field = fields.Char(
- required=True,
- )
- # For worklogs, jira allows to work with milliseconds
- # unix timestamps, we keep this precision by using a new type
- # of field. The ORM values for this field are Unix timestamps the
- # same way Jira use them: unix timestamp as integer multiplied * 1000
- # to keep the milli precision with 3 digits (example 1554318348000).
- last_timestamp = MilliDatetime(
- string="Last Timestamp",
- required=True,
- )
-
- # The content of this field must match to the "usage" of a component.
- # The method JiraBinding.run_batch_timestamp() will find the matching
- # component for the model and call "run()" on it.
- component_usage = fields.Char(
- required=True,
- help="Used by the connector to find which component "
- "execute the batch import (technical).",
- )
-
- _sql_constraints = [
- (
- "timestamp_field_uniq",
- "unique(backend_id, from_date_field, component_usage)",
- "A timestamp already exists.",
- ),
- ]
-
- @api.model
- def _timestamp_for_field(self, backend, field_name, component_usage):
- """Return the timestamp for a field"""
- timestamp = self.search(
- [
- ("backend_id", "=", backend.id),
- ("from_date_field", "=", field_name),
- ("component_usage", "=", component_usage),
- ]
- )
- if not timestamp:
- timestamp = self.env["jira.backend.timestamp"].create(
- {
- "backend_id": backend.id,
- "from_date_field": field_name,
- "component_usage": component_usage,
- "last_timestamp": datetime.fromtimestamp(0),
- }
- )
- return timestamp
-
- def _update_timestamp(self, timestamp):
- self.ensure_one()
- self.last_timestamp = timestamp
-
- def _lock(self):
- """Update the timestamp for a synchro
-
- thus, we prevent 2 synchros to be launched at the same time.
- The lock is released at the commit of the transaction.
-
- Return True if the lock could be acquired.
- """
- self.ensure_one()
- query = """
- SELECT id FROM jira_backend_timestamp
- WHERE id = %s
- FOR UPDATE NOWAIT
- """
- try:
- self.env.cr.execute(query, (self.id,))
- except psycopg2.OperationalError:
- return False
- row = self.env.cr.fetchone()
- return bool(row)
-
-
-class BackendAdapter(Component):
- _name = "jira.backend.adapter"
- _inherit = "jira.webservice.adapter"
- _apply_on = ["jira.backend"]
-
- webhook_base_path = "{server}/rest/webhooks/1.0/{path}"
-
- def list_fields(self):
- return self.client._get_json("field")
diff --git a/connector_jira/models/jira_backend/__init__.py b/connector_jira/models/jira_backend/__init__.py
deleted file mode 100644
index 63602330..00000000
--- a/connector_jira/models/jira_backend/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
diff --git a/connector_jira/models/jira_backend_timestamp.py b/connector_jira/models/jira_backend_timestamp.py
new file mode 100644
index 00000000..b08c21c5
--- /dev/null
+++ b/connector_jira/models/jira_backend_timestamp.py
@@ -0,0 +1,89 @@
+# Copyright: 2015 LasLabs, Inc.
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from datetime import datetime
+
+import psycopg2
+
+from odoo import api, fields, models
+
+from ..fields import MilliDatetime
+
+
+class JiraBackendTimestamp(models.Model):
+ _name = "jira.backend.timestamp"
+ _description = "Jira Backend Import Timestamps"
+
+ backend_id = fields.Many2one(
+ comodel_name="jira.backend",
+ string="Jira Backend",
+ required=True,
+ )
+ from_date_field = fields.Char(required=True)
+
+ # For worklogs, jira allows to work with milliseconds
+ # unix timestamps, we keep this precision by using a new type
+ # of field. The ORM values for this field are Unix timestamps the
+ # same way Jira use them: unix timestamp as integer multiplied * 1000
+ # to keep the milli precision with 3 digits (example 1554318348000).
+ last_timestamp = MilliDatetime(string="Last Timestamp", required=True)
+
+ # The content of this field must match to the "usage" of a component.
+ # The method JiraBinding.run_batch_timestamp() will find the matching
+ # component for the model and call "run()" on it.
+ component_usage = fields.Char(
+ required=True,
+ help="Used by the connector to find which component "
+ "execute the batch import (technical).",
+ )
+
+ _sql_constraints = [
+ (
+ "timestamp_field_uniq",
+ "unique(backend_id, from_date_field, component_usage)",
+ "A timestamp already exists.",
+ ),
+ ]
+
+ @api.model
+ def _timestamp_for_field(self, backend, field_name, component_usage):
+ """Return the timestamp for a field"""
+ timestamp = self.search(
+ [
+ ("backend_id", "=", backend.id),
+ ("from_date_field", "=", field_name),
+ ("component_usage", "=", component_usage),
+ ]
+ )
+ if not timestamp:
+ timestamp = self.env["jira.backend.timestamp"].create(
+ {
+ "backend_id": backend.id,
+ "from_date_field": field_name,
+ "component_usage": component_usage,
+ "last_timestamp": datetime.fromtimestamp(0),
+ }
+ )
+ return timestamp
+
+ def _update_timestamp(self, timestamp):
+ self.ensure_one()
+ self.last_timestamp = timestamp
+
+ def _lock(self):
+ """Update the timestamp for a synchro
+
+ thus, we prevent 2 synchros to be launched at the same time.
+ The lock is released at the commit of the transaction.
+
+ Return True if the lock could be acquired.
+ """
+ self.ensure_one()
+ query = "SELECT id FROM jira_backend_timestamp WHERE id = %s FOR UPDATE NOWAIT"
+ try:
+ self.env.cr.execute(query, (self.id,))
+ except psycopg2.OperationalError:
+ return False
+ return bool(self.env.cr.fetchone())
diff --git a/connector_jira/models/jira_binding/common.py b/connector_jira/models/jira_binding.py
similarity index 96%
rename from connector_jira/models/jira_binding/common.py
rename to connector_jira/models/jira_binding.py
index 31cc238b..84deed5b 100644
--- a/connector_jira/models/jira_binding/common.py
+++ b/connector_jira/models/jira_binding.py
@@ -3,7 +3,7 @@
from odoo import api, fields, models
-from ...fields import MilliDatetime
+from ..fields import MilliDatetime
class JiraBinding(models.AbstractModel):
@@ -26,7 +26,7 @@ class JiraBinding(models.AbstractModel):
ondelete="restrict",
)
jira_updated_at = MilliDatetime()
- external_id = fields.Char(string="ID on Jira", index=True)
+ external_id = fields.Char(string="ID on Jira", index="trigram")
_sql_constraints = [
(
diff --git a/connector_jira/models/jira_binding/__init__.py b/connector_jira/models/jira_binding/__init__.py
deleted file mode 100644
index 63602330..00000000
--- a/connector_jira/models/jira_binding/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
diff --git a/connector_jira/models/jira_issue_type.py b/connector_jira/models/jira_issue_type.py
new file mode 100644
index 00000000..203720c6
--- /dev/null
+++ b/connector_jira/models/jira_issue_type.py
@@ -0,0 +1,26 @@
+# Copyright 2016-2022 Camptocamp SA
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import fields, models
+
+
+class JiraIssueType(models.Model):
+ _name = "jira.issue.type"
+ _inherit = "jira.binding"
+ _description = "Jira Issue Type"
+
+ name = fields.Char(required=True)
+ description = fields.Char()
+ backend_id = fields.Many2one(ondelete="cascade")
+
+ def is_sync_for_project(self, project_binding):
+ self.ensure_one()
+ return bool(project_binding) and self in project_binding.sync_issue_type_ids
+
+ def import_batch(self, backend, from_date=None, to_date=None):
+ """Prepare a batch import of issue types from Jira
+
+ from_date and to_date are ignored for issue types
+ """
+ with backend.work_on(self._name) as work:
+ work.component(usage="batch.importer").run()
diff --git a/connector_jira/models/jira_issue_type/__init__.py b/connector_jira/models/jira_issue_type/__init__.py
deleted file mode 100644
index ea8197b1..00000000
--- a/connector_jira/models/jira_issue_type/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
-from . import importer
diff --git a/connector_jira/models/jira_issue_type/common.py b/connector_jira/models/jira_issue_type/common.py
deleted file mode 100644
index 6690e9f5..00000000
--- a/connector_jira/models/jira_issue_type/common.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2016-2022 Camptocamp SA
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from odoo import fields, models
-
-from odoo.addons.component.core import Component
-
-
-class JiraIssueType(models.Model):
- _name = "jira.issue.type"
- _inherit = "jira.binding"
- _description = "Jira Issue Type"
-
- name = fields.Char(required=True, readonly=True)
- description = fields.Char(readonly=True)
- backend_id = fields.Many2one(ondelete="cascade")
-
- def is_sync_for_project(self, project_binding):
- self.ensure_one()
- if not project_binding:
- return False
- return self in project_binding.sync_issue_type_ids
-
- def import_batch(self, backend, from_date=None, to_date=None):
- """Prepare a batch import of issue types from Jira
-
- from_date and to_date are ignored for issue types
- """
- with backend.work_on(self._name) as work:
- importer = work.component(usage="batch.importer")
- importer.run()
-
-
-class IssueTypeAdapter(Component):
- _name = "jira.issue.type.adapter"
- _inherit = ["jira.webservice.adapter"]
- _apply_on = ["jira.issue.type"]
-
- def read(self, id_):
- # pylint: disable=W8106
- with self.handle_404():
- return self.client.issue_type(id_).raw
-
- def search(self):
- issues = self.client.issue_types()
- return [issue.id for issue in issues]
diff --git a/connector_jira/models/jira_project_base_mixin.py b/connector_jira/models/jira_project_base_mixin.py
new file mode 100644
index 00000000..522707c7
--- /dev/null
+++ b/connector_jira/models/jira_project_base_mixin.py
@@ -0,0 +1,54 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import api, fields, models
+
+
+class JiraProjectBaseFields(models.AbstractModel):
+ """JIRA Project Base fields
+
+ Shared by the binding jira.project.project
+ and the wizard to link/create a JIRA project
+ """
+
+ _name = "jira.project.base.mixin"
+ _description = "JIRA Project Base Mixin"
+
+ jira_key = fields.Char(
+ string="JIRA Key",
+ required=True,
+ size=10, # limit on JIRA
+ )
+ sync_issue_type_ids = fields.Many2many(
+ comodel_name="jira.issue.type",
+ string="Issue Levels to Synchronize",
+ domain="[('backend_id', '=', backend_id)]",
+ help="Only issues of these levels are imported. "
+ "When a worklog is imported no a level which is "
+ "not sync'ed, it is attached to the nearest "
+ "sync'ed parent level. If no parent can be found, "
+ "it is attached to a special 'Unassigned' task.",
+ )
+ project_template = fields.Selection(
+ selection="_selection_project_template",
+ string="Default Project Template",
+ default="Scrum software development",
+ )
+ project_template_shared = fields.Char(
+ string="Default Shared Template",
+ )
+ sync_action = fields.Selection(
+ selection=[("link", "Link with JIRA"), ("export", "Export to JIRA")],
+ default="link",
+ required=True,
+ help="Defines if the information of the project (name "
+ "and key) are exported to JIRA when changed. Link means"
+ "the project already exists on JIRA, no sync of the project"
+ " details once the link is established."
+ " Tasks are always imported from JIRA, not pushed.",
+ )
+
+ @api.model
+ def _selection_project_template(self):
+ return self.env["jira.backend"]._selection_project_template()
diff --git a/connector_jira/models/jira_project_project.py b/connector_jira/models/jira_project_project.py
new file mode 100644
index 00000000..740abaf1
--- /dev/null
+++ b/connector_jira/models/jira_project_project.py
@@ -0,0 +1,142 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+import re
+
+from odoo import _, api, exceptions, fields, models, tools
+
+
+class JiraProjectProject(models.Model):
+ _name = "jira.project.project"
+ _inherit = ["jira.binding", "jira.project.base.mixin"]
+ _inherits = {"project.project": "odoo_id"}
+ _description = "Jira Projects"
+
+ odoo_id = fields.Many2one(
+ comodel_name="project.project",
+ string="Project",
+ required=True,
+ index=True,
+ ondelete="restrict",
+ )
+ project_type = fields.Selection(selection="_selection_project_type")
+
+ @api.model
+ def _selection_project_type(self):
+ return [("software", "Software"), ("business", "Business")]
+
+ # Disable and implement the constraint jira_binding_uniq as python because
+ # we need to override it in connector_jira_service_desk, and it would try
+ # to create it again at every update because of the base implementation
+ # in the binding's parent model.
+ def _add_sql_constraints(self):
+ # we replace the sql constraint by a python one
+ # to include the organizations
+ for key, definition, __ in self._sql_constraints:
+ conname = f"{self._table}_{key}"
+ if key == "jira_binding_uniq":
+ if tools.constraint_definition(self.env.cr, self._table, conname):
+ tools.drop_constraint(self.env.cr, self._table, conname)
+ else:
+ tools.add_constraint(self.env.cr, self._table, conname, definition)
+ return super()._add_sql_constraints()
+
+ def _export_binding_domain(self):
+ """Return the domain for the constraints on export bindings"""
+ self.ensure_one()
+ return [
+ ("odoo_id", "=", self.odoo_id.id),
+ ("backend_id", "=", self.backend_id.id),
+ ("sync_action", "=", "export"),
+ ]
+
+ @api.constrains("backend_id", "odoo_id", "sync_action")
+ def _constrains_odoo_jira_sync_action_export_uniq(self):
+ """Add a constraint on backend+odoo id for export action
+
+ Only one binding can have the sync_action "export", as it pushes the
+ name and key to Jira, we cannot export the same values to several
+ projects.
+ """
+ for binding in self:
+ domain = binding._export_binding_domain()
+ export_bindings = self.with_context(active_test=False).search(domain)
+ if len(export_bindings) > 1:
+ raise exceptions.ValidationError(
+ _(
+ "Only one Jira binding can be configured with the Sync. Action"
+ ' "Export" for a project. "%s" already has one.',
+ binding.display_name,
+ )
+ )
+
+ @api.constrains("backend_id", "external_id")
+ def _constrains_jira_uniq(self):
+ """Add a constraint on backend+jira id
+
+ Defined as a python method rather than a postgres constraint
+ in order to ease the override in connector_jira_servicedesk
+ """
+ for binding in self.filtered("external_id"):
+ same_link_bindings = self.with_context(active_test=False).search(
+ [
+ ("id", "!=", binding.id),
+ ("backend_id", "=", binding.backend_id.id),
+ ("external_id", "=", binding.external_id),
+ ]
+ )
+ if same_link_bindings:
+ raise exceptions.ValidationError(
+ _(
+ "The project %s is already linked with the same JIRA project.",
+ same_link_bindings.display_name,
+ )
+ )
+
+ @api.constrains("jira_key")
+ def _check_jira_key(self):
+ for key in self.filtered("jira_key").mapped("jira_key"):
+ if not self._jira_key_valid(key):
+ raise exceptions.ValidationError(_("%s is not a valid JIRA Key", key))
+
+ @api.onchange("backend_id")
+ def onchange_project_backend_id(self):
+ self.project_template = self.backend_id.project_template
+ self.project_template_shared = self.backend_id.project_template_shared
+
+ @staticmethod
+ def _jira_key_valid(key):
+ return bool(re.match(r"^[A-Z][A-Z0-9]{1,9}$", key))
+
+ @api.constrains("project_template_shared")
+ def _check_project_template_shared(self):
+ for tmpl in set(self.mapped("project_template_shared")):
+ if tmpl and not self._jira_key_valid(tmpl):
+ raise exceptions.ValidationError(_("%s is not a valid JIRA Key", tmpl))
+
+ def _is_linked(self):
+ return bool(self) and any(p.sync_action == "link" for p in self)
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ records = super().create(vals_list)
+ records._ensure_jira_key()
+ return records
+
+ def write(self, values):
+ if "project_template" in values:
+ raise exceptions.UserError(_("The project template cannot be modified."))
+ res = super().write(values)
+ self._ensure_jira_key()
+ return res
+
+ @api.ondelete(at_uninstall=False)
+ def _unlink_unless_exported(self):
+ if any(self.mapped("external_id")):
+ raise exceptions.UserError(_("Exported project cannot be deleted."))
+
+ def _ensure_jira_key(self):
+ if self.env.context.get("connector_no_export") or all(r.jira_key for r in self):
+ return
+ raise exceptions.UserError(_("JIRA Key is mandatory to link a project"))
diff --git a/connector_jira/models/jira_project_task.py b/connector_jira/models/jira_project_task.py
new file mode 100644
index 00000000..b004c44e
--- /dev/null
+++ b/connector_jira/models/jira_project_task.py
@@ -0,0 +1,70 @@
+# Copyright 2016-2019 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _, api, exceptions, fields, models
+
+
+class JiraProjectTask(models.Model):
+ _name = "jira.project.task"
+ _inherit = "jira.binding"
+ _inherits = {"project.task": "odoo_id"}
+ _description = "Jira Tasks"
+
+ odoo_id = fields.Many2one(
+ comodel_name="project.task",
+ string="Task",
+ required=True,
+ index=True,
+ ondelete="restrict",
+ )
+ # As we can have more than one jira binding on a project.project, we store
+ # to which one a task binding is related.
+ jira_project_bind_id = fields.Many2one(
+ comodel_name="jira.project.project",
+ ondelete="restrict",
+ )
+ jira_key = fields.Char(
+ string="Key",
+ )
+ jira_issue_type_id = fields.Many2one(
+ comodel_name="jira.issue.type",
+ string="Issue Type",
+ )
+ jira_epic_link_id = fields.Many2one(
+ comodel_name="jira.project.task",
+ string="Epic",
+ )
+ jira_parent_id = fields.Many2one(
+ comodel_name="jira.project.task",
+ string="Parent Issue",
+ help="Parent issue when the issue is a subtask. "
+ "Empty if the type of parent is filtered out "
+ "of the synchronizations.",
+ )
+ jira_issue_url = fields.Char(
+ string="JIRA issue",
+ compute="_compute_jira_issue_url",
+ )
+
+ _sql_constraints = [
+ (
+ "jira_binding_backend_uniq",
+ "unique(backend_id, odoo_id)",
+ "A binding already exists for this task and this backend.",
+ ),
+ ]
+
+ def _is_linked(self):
+ return self.jira_project_bind_id._is_linked()
+
+ @api.ondelete(at_uninstall=False)
+ def _unlink_unless_is_jira_task(self):
+ if any(self.mapped("external_id")):
+ raise exceptions.UserError(_("A Jira task cannot be deleted."))
+
+ @api.depends("backend_id.uri", "jira_key")
+ def _compute_jira_issue_url(self):
+ """Compute the external URL to JIRA."""
+ for record in self:
+ record.jira_issue_url = record.backend_id.make_issue_url(record.jira_key)
diff --git a/connector_jira/models/jira_res_users.py b/connector_jira/models/jira_res_users.py
new file mode 100644
index 00000000..468e1e0f
--- /dev/null
+++ b/connector_jira/models/jira_res_users.py
@@ -0,0 +1,20 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html)
+
+from odoo import fields, models
+
+
+class JiraResUsers(models.Model):
+ _name = "jira.res.users"
+ _inherit = "jira.binding"
+ _inherits = {"res.users": "odoo_id"}
+ _description = "Jira User"
+
+ odoo_id = fields.Many2one(
+ comodel_name="res.users",
+ string="User",
+ required=True,
+ index=True,
+ ondelete="restrict",
+ )
diff --git a/connector_jira/models/project_project.py b/connector_jira/models/project_project.py
new file mode 100644
index 00000000..8044c23b
--- /dev/null
+++ b/connector_jira/models/project_project.py
@@ -0,0 +1,42 @@
+# Copyright 2016-2022 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import api, fields, models
+
+
+class ProjectProject(models.Model):
+ _inherit = "project.project"
+ _rec_names_search = ["jira_key"]
+
+ jira_bind_ids = fields.One2many(
+ comodel_name="jira.project.project",
+ inverse_name="odoo_id",
+ copy=False,
+ string="Project Bindings",
+ context={"active_test": False},
+ )
+ jira_key = fields.Char(
+ string="JIRA Key",
+ compute="_compute_jira_key",
+ store=True,
+ )
+
+ @api.depends("jira_bind_ids.jira_key")
+ def _compute_jira_key(self):
+ for project in self:
+ project.jira_key = ", ".join(project.jira_bind_ids.mapped("jira_key"))
+
+ # pylint: disable=W8110
+ @api.depends("jira_key")
+ def _compute_display_name(self):
+ super()._compute_display_name()
+ for project in self.filtered("jira_key"):
+ project.display_name = f"[{project.jira_key}] {project.display_name}"
+
+ def create_and_link_jira(self):
+ self.ensure_one()
+ xmlid = "connector_jira.open_project_link_jira"
+ action = self.env["ir.actions.act_window"]._for_xml_id(xmlid)
+ action["context"] = dict(self.env.context, default_project_id=self.id)
+ return action
diff --git a/connector_jira/models/project_project/__init__.py b/connector_jira/models/project_project/__init__.py
deleted file mode 100644
index 654c9a43..00000000
--- a/connector_jira/models/project_project/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import binder
-from . import common
-from . import project_link_jira
-from . import exporter
diff --git a/connector_jira/models/project_project/common.py b/connector_jira/models/project_project/common.py
deleted file mode 100644
index f79c5a90..00000000
--- a/connector_jira/models/project_project/common.py
+++ /dev/null
@@ -1,367 +0,0 @@
-# Copyright 2016-2022 Camptocamp SA
-# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-import json
-import logging
-import re
-import tempfile
-
-from odoo import _, api, exceptions, fields, models, tools
-from odoo.osv import expression
-
-from odoo.addons.component.core import Component
-
-_logger = logging.getLogger(__name__)
-
-try:
- from jira import JIRAError
- from jira.utils import json_loads
-except ImportError as err:
- _logger.debug(err)
-
-
-class JiraProjectBaseFields(models.AbstractModel):
- """JIRA Project Base fields
-
- Shared by the binding jira.project.project
- and the wizard to link/create a JIRA project
- """
-
- _name = "jira.project.base.mixin"
- _description = "JIRA Project Base Mixin"
-
- jira_key = fields.Char(
- string="JIRA Key",
- required=True,
- size=10,
- ) # limit on JIRA
- sync_issue_type_ids = fields.Many2many(
- comodel_name="jira.issue.type",
- string="Issue Levels to Synchronize",
- domain="[('backend_id', '=', backend_id)]",
- help="Only issues of these levels are imported. "
- "When a worklog is imported no a level which is "
- "not sync'ed, it is attached to the nearest "
- "sync'ed parent level. If no parent can be found, "
- "it is attached to a special 'Unassigned' task.",
- )
- project_template = fields.Selection(
- selection="_selection_project_template",
- string="Default Project Template",
- default="Scrum software development",
- )
- project_template_shared = fields.Char(
- string="Default Shared Template",
- )
- sync_action = fields.Selection(
- selection=[("link", "Link with JIRA"), ("export", "Export to JIRA")],
- default="link",
- required=True,
- help="Defines if the information of the project (name "
- "and key) are exported to JIRA when changed. Link means"
- "the project already exists on JIRA, no sync of the project"
- " details once the link is established."
- " Tasks are always imported from JIRA, not pushed.",
- )
-
- @api.model
- def _selection_project_template(self):
- return self.env["jira.backend"]._selection_project_template()
-
-
-class JiraProjectProject(models.Model):
- _name = "jira.project.project"
- _inherit = ["jira.binding", "jira.project.base.mixin"]
- _inherits = {"project.project": "odoo_id"}
- _description = "Jira Projects"
-
- odoo_id = fields.Many2one(
- comodel_name="project.project",
- string="Project",
- required=True,
- index=True,
- ondelete="restrict",
- )
- project_type = fields.Selection(selection="_selection_project_type")
-
- @api.model
- def _selection_project_type(self):
- return [
- ("software", "Software"),
- ("business", "Business"),
- ]
-
- # Disable and implement the constraint jira_binding_uniq as python because
- # we need to override the in connector_jira_service_desk and it would try
- # to create it again at every update because of the base implementation
- # in the binding's parent model.
- def _add_sql_constraints(self):
- # we replace the sql constraint by a python one
- # to include the organizations
- for key, definition, _msg in self._sql_constraints:
- conname = f"{self._table}_{key}"
- if key == "jira_binding_uniq":
- has_definition = tools.constraint_definition(
- self.env.cr, self._table, conname
- )
- if has_definition:
- tools.drop_constraint(self.env.cr, self._table, conname)
- else:
- tools.add_constraint(self.env.cr, self._table, conname, definition)
- return super()._add_sql_constraints()
-
- def _export_binding_domain(self):
- """Return the domain for the constraints on export bindings"""
- self.ensure_one()
- domain = [
- ("odoo_id", "=", self.odoo_id.id),
- ("backend_id", "=", self.backend_id.id),
- ("sync_action", "=", "export"),
- ]
- return domain
-
- @api.constrains("backend_id", "odoo_id", "sync_action")
- def _constrains_odoo_jira_sync_action_export_uniq(self):
- """Add a constraint on backend+odoo id for export action
-
- Only one binding can have the sync_action "export", as it pushes the
- name and key to Jira, we cannot export the same values to several
- projects.
- """
- for binding in self:
- export_bindings = self.with_context(active_test=False).search(
- self._export_binding_domain()
- )
- if len(export_bindings) > 1:
- raise exceptions.ValidationError(
- _(
- "Only one Jira binding can be configured with the Sync."
- ' Action "Export" for a project. "%s" already'
- " has one."
- )
- % (binding.display_name,)
- )
-
- @api.constrains("backend_id", "external_id")
- def _constrains_jira_uniq(self):
- """Add a constraint on backend+jira id
-
- Defined as a python method rather than a postgres constraint
- in order to ease the override in connector_jira_servicedesk
- """
- for binding in self:
- if not binding.external_id:
- continue
- same_link_bindings = self.with_context(active_test=False).search(
- [
- ("id", "!=", binding.id),
- ("backend_id", "=", binding.backend_id.id),
- ("external_id", "=", binding.external_id),
- ]
- )
- if same_link_bindings:
- raise exceptions.ValidationError(
- _("The project %s is already linked with the same" " JIRA project.")
- % (same_link_bindings.display_name)
- )
-
- @api.constrains("jira_key")
- def check_jira_key(self):
- for project in self:
- if not project.jira_key:
- continue
- if not self._jira_key_valid(project.jira_key):
- raise exceptions.ValidationError(
- _("%s is not a valid JIRA Key") % project.jira_key
- )
-
- @api.onchange("backend_id")
- def onchange_project_backend_id(self):
- self.project_template = self.backend_id.project_template
- self.project_template_shared = self.backend_id.project_template_shared
-
- @staticmethod
- def _jira_key_valid(key):
- return bool(re.match(r"^[A-Z][A-Z0-9]{1,9}$", key))
-
- @api.constrains("project_template_shared")
- def check_project_template_shared(self):
- for binding in self:
- if not binding.project_template_shared:
- continue
- if not self._jira_key_valid(binding.project_template_shared):
- raise exceptions.ValidationError(
- _("%s is not a valid JIRA Key") % binding.project_template_shared
- )
-
- def _is_linked(self):
- for project in self:
- if project.sync_action == "link":
- return True
- return False
-
- @api.model
- def create(self, values):
- record = super().create(values)
- record._ensure_jira_key()
- return record
-
- def write(self, values):
- if "project_template" in values:
- raise exceptions.UserError(_("The project template cannot be modified."))
- res = super().write(values)
- self._ensure_jira_key()
- return res
-
- def _ensure_jira_key(self):
- if self.env.context.get("connector_no_export"):
- return
- for record in self:
- if not record.jira_key:
- raise exceptions.UserError(
- _("The JIRA Key is mandatory in order to link a project")
- )
-
- def unlink(self):
- if any(self.mapped("external_id")):
- raise exceptions.UserError(_("Exported project cannot be deleted."))
- return super().unlink()
-
-
-class ProjectProject(models.Model):
- _inherit = "project.project"
-
- jira_bind_ids = fields.One2many(
- comodel_name="jira.project.project",
- inverse_name="odoo_id",
- copy=False,
- string="Project Bindings",
- context={"active_test": False},
- )
- jira_key = fields.Char(
- string="JIRA Key",
- compute="_compute_jira_key",
- store=True,
- )
-
- @api.depends("jira_bind_ids.jira_key")
- def _compute_jira_key(self):
- for project in self:
- keys = project.mapped("jira_bind_ids.jira_key")
- project.jira_key = ", ".join(keys)
-
- def name_get(self):
- names = []
- for project in self:
- project_id, name = super(ProjectProject, project).name_get()[0]
- if project.jira_key:
- name = f"[{project.jira_key}] {name}"
- names.append((project_id, name))
- return names
-
- @api.model
- def name_search(self, name="", args=None, operator="ilike", limit=100):
- res = super().name_search(name, args, operator, limit)
- if not name:
- return res
- domain = [
- "|",
- ("jira_key", "=ilike", name + "%"),
- ("id", "in", [x[0] for x in res]),
- ]
- if operator in expression.NEGATIVE_TERM_OPERATORS:
- domain = ["&", "!"] + domain[1:]
- return self.search(
- domain + (args or []),
- limit=limit,
- ).name_get()
-
- def create_and_link_jira(self):
- action_link = self.env.ref("connector_jira.open_project_link_jira")
- action = action_link.read()[0]
- action["context"] = dict(
- self.env.context,
- active_id=self.id,
- active_model=self._name,
- )
- return action
-
-
-class ProjectAdapter(Component):
- _name = "jira.project.adapter"
- _inherit = ["jira.webservice.adapter"]
- _apply_on = ["jira.project.project"]
-
- def read(self, id_):
- # pylint: disable=W8106
- with self.handle_404():
- return self.get(id_).raw
-
- def get(self, id_):
- with self.handle_404():
- return self.client.project(id_)
-
- def write(self, id_, values):
- super().write(id_, values)
- with self.handle_404():
- return self.get(id_).update(values)
-
- def create(self, key=None, name=None, template_name=None, values=None):
- super().create(key=key, name=name, template_name=template_name, values=values)
- project = self.client.create_project(
- key=key,
- name=name,
- template_name=template_name,
- )
- if values:
- project.update(values)
- return project
-
- def create_shared(self, key=None, name=None, shared_key=None, lead=None):
- assert key and name and shared_key
- # There is no public method for creating a shared project:
- # https://jira.atlassian.com/browse/JRA-45929
- # People found a private method for doing so, which is explained on:
- # https://jira.atlassian.com/browse/JRASERVER-27256
-
- try:
- project = self.read(shared_key)
- project_id = project["id"]
- except JIRAError as err:
- if err.status_code == 404:
- raise exceptions.UserError(
- _('Project template with key "%s" not found.') % shared_key
- ) from err
- else:
- raise
-
- url = (
- self.client._options["server"]
- + "/rest/project-templates/1.0/createshared/%s" % project_id
- )
- payload = {
- "name": name,
- "key": key,
- "lead": lead,
- }
-
- r = self.client._session.post(url, data=json.dumps(payload))
- if r.status_code == 200:
- r_json = json_loads(r)
- return r_json
-
- f = tempfile.NamedTemporaryFile(
- suffix=".html",
- prefix="python-jira-error-create-shared-project-",
- delete=False,
- )
- f.write(r.text)
-
- if self.logging:
- logging.error(
- "Unexpected result while running create shared project."
- f" Server response saved in {f.name} for further investigation"
- f" [HTTP response={r.status_code}]."
- )
- return False
diff --git a/connector_jira/models/project_project/exporter.py b/connector_jira/models/project_project/exporter.py
deleted file mode 100644
index e17e3a82..00000000
--- a/connector_jira/models/project_project/exporter.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright 2016-2019 Camptocamp SA
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from odoo.addons.component.core import Component
-from odoo.addons.component_event import skip_if
-
-
-class JiraProjectProjectListener(Component):
- _name = "jira.project.project.listener"
- _inherit = ["base.connector.listener"]
- _apply_on = ["jira.project.project"]
-
- @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
- def on_record_create(self, record, fields=None):
- if record.sync_action == "export":
- record.with_delay(priority=10).export_record(fields=fields)
-
- @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
- def on_record_write(self, record, fields=None):
- if record.sync_action == "export":
- record.with_delay(priority=10).export_record(fields=fields)
-
-
-class ProjectProjectListener(Component):
- _name = "project.project.listener"
- _inherit = ["base.connector.listener"]
- _apply_on = ["project.project"]
-
- @skip_if(lambda self, record, **kwargs: self.no_connector_export(record))
- def on_record_write(self, record, fields=None):
- if fields == ["jira_bind_ids"] or fields == ["message_follower_ids"]:
- # When vals is esb_bind_ids:
- # Binding edited from the record's view. When only this field has
- # been modified, an other job has already been delayed for the
- # binding record so can exit this event early.
-
- # When vals is message_follower_ids:
- # MailThread.message_subscribe() has been called, this
- # method does a write on the field message_follower_ids,
- # we never want to export that.
- return
- for binding in record.jira_bind_ids:
- if binding.sync_action == "export":
- binding.with_delay(priority=10).export_record(fields=fields)
-
-
-class JiraProjectProjectExporter(Component):
- _name = "jira.project.project.exporter"
- _inherit = ["jira.exporter"]
- _apply_on = ["jira.project.project"]
-
- def _create_project(self, adapter, key, name, template, values):
- project = adapter.create(
- key=key,
- name=name,
- template_name=template,
- values=values,
- )
- return project["projectId"]
-
- def _create_shared_project(self, adapter, key, name, shared_key, lead):
- project = adapter.create_shared(
- key=key,
- name=name,
- shared_key=shared_key,
- lead=lead,
- )
- return project["projectId"]
-
- def _update_project(self, adapter, values):
- adapter.write(self.external_id, values)
-
- def _run(self, fields=None):
- adapter = self.component(usage="backend.adapter")
-
- key = self.binding.jira_key
- name = self.binding.name[:80]
- template = self.binding.project_template
- # TODO: add lead
-
- if self.external_id:
- self._update_project(adapter, {"name": name, "key": key})
- else:
- if template == "shared":
- shared_key = self.binding.project_template_shared
- self.external_id = self._create_shared_project(
- adapter, key, name, shared_key, None
- )
- else:
- self.external_id = self._create_project(
- adapter, key, name, template, {}
- )
diff --git a/connector_jira/models/project_task.py b/connector_jira/models/project_task.py
new file mode 100644
index 00000000..427619c8
--- /dev/null
+++ b/connector_jira/models/project_task.py
@@ -0,0 +1,164 @@
+# Copyright 2016-2019 Camptocamp SA
+# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import _, api, exceptions, fields, models
+
+
+class ProjectTask(models.Model):
+ _inherit = "project.task"
+ _rec_names_search = ["jira_compound_key"]
+
+ jira_bind_ids = fields.One2many(
+ comodel_name="jira.project.task",
+ inverse_name="odoo_id",
+ copy=False,
+ string="Task Bindings",
+ context={"active_test": False},
+ )
+ jira_issue_type = fields.Char(
+ compute="_compute_jira_issue_type",
+ string="JIRA Issue Type",
+ store=True,
+ )
+ jira_compound_key = fields.Char(
+ compute="_compute_jira_compound_key",
+ string="JIRA Key",
+ store=True,
+ )
+ jira_epic_link_task_id = fields.Many2one(
+ comodel_name="project.task",
+ compute="_compute_jira_epic_link_task_id",
+ string="JIRA Epic",
+ store=True,
+ )
+ jira_parent_task_id = fields.Many2one(
+ comodel_name="project.task",
+ compute="_compute_jira_parent_task_id",
+ string="JIRA Parent",
+ store=True,
+ )
+ jira_issue_url = fields.Char(
+ string="JIRA issue",
+ compute="_compute_jira_issue_url",
+ )
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ # for vals in vals_list:
+ # self._connector_jira_create_validate(vals)
+ return super().create(vals_list)
+
+ @api.model
+ def _connector_jira_create_validate(self, vals):
+ project_id = vals.get("project_id")
+ if project_id:
+ project = self.env["project.project"].sudo().browse(project_id).exists()
+ if (
+ not self.env.context.get("connector_jira")
+ and project.jira_bind_ids._is_linked()
+ ):
+ raise exceptions.UserError(
+ _("Task can not be created in project linked to JIRA!")
+ )
+
+ def write(self, vals):
+ self._connector_jira_write_validate(vals)
+ return super().write(vals)
+
+ def _connector_jira_write_validate(self, vals):
+ if (
+ not self.env.context.get("connector_jira")
+ and self.jira_bind_ids._is_linked()
+ ):
+ new_values = self._convert_to_write(vals)
+ for old_values in self.read(list(vals.keys()), load="_classic_write"):
+ old_values.pop("id", None)
+ old_values = self._convert_to_write(old_values)
+ for field in self._get_connector_jira_fields():
+ if field in vals and new_values[field] != old_values[field]:
+ raise exceptions.UserError(
+ _("Task linked to JIRA Issue can not be modified!")
+ )
+
+ @api.ondelete(at_uninstall=False)
+ def _unlink_except_records_are_linked(self):
+ if (
+ not self.env.context.get("connector_jira")
+ and self.jira_bind_ids._is_linked()
+ ):
+ raise exceptions.UserError(
+ _("Task linked to JIRA Issue can not be deleted!")
+ )
+
+ @api.depends("jira_bind_ids.jira_issue_type_id.name")
+ def _compute_jira_issue_type(self):
+ for record in self:
+ types = record.jira_bind_ids.jira_issue_type_id.mapped("name")
+ record.jira_issue_type = ",".join([t for t in types if t])
+
+ @api.depends("jira_bind_ids.jira_key")
+ def _compute_jira_compound_key(self):
+ for record in self:
+ keys = record.jira_bind_ids.mapped("jira_key")
+ record.jira_compound_key = ",".join([k for k in keys if k])
+
+ @api.depends("jira_bind_ids.jira_epic_link_id.odoo_id")
+ def _compute_jira_epic_link_task_id(self):
+ self.jira_epic_link_task_id = False
+ for record in self:
+ tasks = record.jira_bind_ids.jira_epic_link_id.odoo_id
+ if len(tasks) == 1:
+ record.jira_epic_link_task_id = tasks
+
+ @api.depends("jira_bind_ids.jira_parent_id.odoo_id")
+ def _compute_jira_parent_task_id(self):
+ self.jira_parent_task_id = False
+ for record in self:
+ tasks = record.jira_bind_ids.jira_parent_id.odoo_id
+ if len(tasks) == 1:
+ record.jira_parent_task_id = tasks
+
+ @api.depends("jira_bind_ids.jira_issue_url")
+ def _compute_jira_issue_url(self):
+ """Compute the external URL to JIRA.
+
+ We assume that we have only one external record.
+ """
+ for record in self:
+ main_binding = record.jira_bind_ids[:1]
+ record.jira_issue_url = main_binding.jira_issue_url or ""
+
+ # pylint: disable=W8110
+ @api.depends("jira_compound_key")
+ def _compute_display_name(self):
+ super()._compute_display_name()
+ for task in self.filtered("jira_compound_key"):
+ task.display_name = f"[{task.jira_compound_key}] {task.display_name}"
+
+ @api.model
+ def _get_connector_jira_fields(self):
+ return [
+ "jira_bind_ids",
+ "name",
+ "date_deadline",
+ "user_id",
+ "description",
+ "active",
+ "project_id",
+ "allocated_hours",
+ "stage_id",
+ ]
+
+ def create_and_link_jira(self):
+ self.ensure_one()
+ backends = self.project_id.jira_bind_ids.backend_id
+ xmlid = "connector_jira.open_task_link_jira"
+ action = self.env["ir.actions.act_window"]._for_xml_id(xmlid)
+ action["context"] = dict(
+ self.env.context,
+ default_task_id=self.id,
+ default_linked_backend_ids=[fields.Command.set(backends.ids)],
+ default_backend_id=backends.id if len(backends) == 1 else False,
+ )
+ return action
diff --git a/connector_jira/models/project_task/__init__.py b/connector_jira/models/project_task/__init__.py
deleted file mode 100644
index fbb9dae9..00000000
--- a/connector_jira/models/project_task/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
-from . import importer
-from . import task_link_jira
diff --git a/connector_jira/models/project_task/common.py b/connector_jira/models/project_task/common.py
deleted file mode 100644
index 83b8ce11..00000000
--- a/connector_jira/models/project_task/common.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2016-2019 Camptocamp SA
-# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from odoo import _, api, exceptions, fields, models
-from odoo.osv import expression
-
-from odoo.addons.component.core import Component
-
-
-class JiraProjectTask(models.Model):
- _name = "jira.project.task"
- _inherit = "jira.binding"
- _inherits = {"project.task": "odoo_id"}
- _description = "Jira Tasks"
-
- odoo_id = fields.Many2one(
- comodel_name="project.task",
- string="Task",
- required=True,
- index=True,
- ondelete="restrict",
- )
- # As we can have more than one jira binding on a project.project, we store
- # to which one a task binding is related.
- jira_project_bind_id = fields.Many2one(
- comodel_name="jira.project.project",
- ondelete="restrict",
- )
- jira_key = fields.Char(
- string="Key",
- readonly=True,
- )
- jira_issue_type_id = fields.Many2one(
- comodel_name="jira.issue.type",
- string="Issue Type",
- readonly=True,
- )
- jira_epic_link_id = fields.Many2one(
- comodel_name="jira.project.task",
- string="Epic",
- readonly=True,
- )
- jira_parent_id = fields.Many2one(
- comodel_name="jira.project.task",
- string="Parent Issue",
- readonly=True,
- help="Parent issue when the issue is a subtask. "
- "Empty if the type of parent is filtered out "
- "of the synchronizations.",
- )
- jira_issue_url = fields.Char(
- string="JIRA issue",
- compute="_compute_jira_issue_url",
- )
-
- _sql_constraints = [
- (
- "jira_binding_backend_uniq",
- "unique(backend_id, odoo_id)",
- "A binding already exists for this task and this backend.",
- ),
- ]
-
- def _is_linked(self):
- return self.mapped("jira_project_bind_id")._is_linked()
-
- def unlink(self):
- if any(self.mapped("external_id")):
- raise exceptions.UserError(_("A Jira task cannot be deleted."))
- return super().unlink()
-
- @api.depends("jira_key")
- def _compute_jira_issue_url(self):
- """Compute the external URL to JIRA."""
- for record in self:
- record.jira_issue_url = record.backend_id.make_issue_url(record.jira_key)
-
-
-class ProjectTask(models.Model):
- _inherit = "project.task"
-
- jira_bind_ids = fields.One2many(
- comodel_name="jira.project.task",
- inverse_name="odoo_id",
- copy=False,
- string="Task Bindings",
- context={"active_test": False},
- )
- jira_issue_type = fields.Char(
- compute="_compute_jira_issue_type",
- string="JIRA Issue Type",
- store=True,
- )
- jira_compound_key = fields.Char(
- compute="_compute_jira_compound_key",
- string="JIRA Key",
- store=True,
- )
- jira_epic_link_task_id = fields.Many2one(
- comodel_name="project.task",
- compute="_compute_jira_epic_link_task_id",
- string="JIRA Epic",
- store=True,
- )
- jira_parent_task_id = fields.Many2one(
- comodel_name="project.task",
- compute="_compute_jira_parent_task_id",
- string="JIRA Parent",
- store=True,
- )
- jira_issue_url = fields.Char(
- string="JIRA issue",
- compute="_compute_jira_issue_url",
- )
-
- @api.depends("jira_bind_ids.jira_issue_type_id.name")
- def _compute_jira_issue_type(self):
- for record in self:
- types = record.mapped("jira_bind_ids.jira_issue_type_id.name")
- record.jira_issue_type = ",".join([t for t in types if t])
-
- @api.depends("jira_bind_ids.jira_key")
- def _compute_jira_compound_key(self):
- for record in self:
- keys = record.mapped("jira_bind_ids.jira_key")
- record.jira_compound_key = ",".join([k for k in keys if k])
-
- @api.depends("jira_bind_ids.jira_epic_link_id.odoo_id")
- def _compute_jira_epic_link_task_id(self):
- for record in self:
- tasks = record.mapped("jira_bind_ids.jira_epic_link_id.odoo_id")
- if len(tasks) == 1:
- record.jira_epic_link_task_id = tasks
-
- @api.depends("jira_bind_ids.jira_parent_id.odoo_id")
- def _compute_jira_parent_task_id(self):
- for record in self:
- tasks = record.mapped("jira_bind_ids.jira_parent_id.odoo_id")
- if len(tasks) == 1:
- record.jira_parent_task_id = tasks
-
- @api.depends("jira_bind_ids.jira_key")
- def _compute_jira_issue_url(self):
- """Compute the external URL to JIRA.
-
- We assume that we have only one external record.
- """
- for record in self:
- if not record.jira_bind_ids:
- record.jira_issue_url = False
- continue
- main_binding = record.jira_bind_ids[0]
- record.jira_issue_url = main_binding.jira_issue_url
-
- def name_get(self):
- names = []
- for task in self:
- task_id, name = super(ProjectTask, task).name_get()[0]
- if task.jira_compound_key:
- name = f"[{task.jira_compound_key}] {name}"
- names.append((task_id, name))
- return names
-
- @api.model
- def name_search(self, name="", args=None, operator="ilike", limit=100):
- res = super().name_search(name, args, operator, limit)
- if not name:
- return res
- domain = [
- "|",
- ("jira_compound_key", "=ilike", name + "%"),
- ("id", "in", [x[0] for x in res]),
- ]
- if operator in expression.NEGATIVE_TERM_OPERATORS:
- domain = ["&", "!"] + domain[1:]
- return self.search(
- domain + (args or []),
- limit=limit,
- ).name_get()
-
- @api.model
- def _get_connector_jira_fields(self):
- return [
- "jira_bind_ids",
- "name",
- "date_deadline",
- "user_id",
- "description",
- "active",
- "project_id",
- "planned_hours",
- "stage_id",
- ]
-
- @api.model
- def _connector_jira_create_validate(self, vals):
- ProjectProject = self.env["project.project"]
- project_id = vals.get("project_id")
- if project_id:
- project_id = ProjectProject.sudo().browse(project_id)
- if (
- not self.env.context.get("connector_jira")
- and project_id.mapped("jira_bind_ids")._is_linked()
- ):
- raise exceptions.UserError(
- _("Task can not be created in project linked to JIRA!")
- )
-
- def _connector_jira_write_validate(self, vals):
- if (
- not self.env.context.get("connector_jira")
- and self.mapped("jira_bind_ids")._is_linked()
- ):
- fields = list(vals.keys())
- self._update_cache(vals)
- new_values = self._convert_to_write(
- vals,
- )
- for old_values in self.read(fields, load="_classic_write"):
- old_values = self._convert_to_write(
- old_values,
- )
- for field in self._get_connector_jira_fields():
- if field not in fields:
- continue
- if new_values[field] == old_values[field]:
- continue
- raise exceptions.UserError(
- _("Task linked to JIRA Issue can not be modified!")
- )
-
- def _connector_jira_unlink_validate(self):
- if (
- not self.env.context.get("connector_jira")
- and self.mapped("jira_bind_ids")._is_linked()
- ):
- raise exceptions.UserError(
- _("Task linked to JIRA Issue can not be deleted!")
- )
-
- @api.model
- def create(self, vals):
- self._connector_jira_create_validate(vals)
- return super().create(vals)
-
- def write(self, vals):
- self._connector_jira_write_validate(vals)
- return super().write(vals)
-
- def unlink(self):
- self._connector_jira_unlink_validate()
- return super().unlink()
-
-
-class TaskAdapter(Component):
- _name = "jira.project.task.adapter"
- _inherit = ["jira.webservice.adapter"]
- _apply_on = ["jira.project.task"]
-
- def read(self, id_, fields=None):
- # pylint: disable=W8106
- return self.get(id_, fields=fields).raw
-
- def get(self, id_, fields=None):
- with self.handle_404():
- return self.client.issue(id_, fields=fields, expand=["renderedFields"])
-
- def search(self, jql):
- # we need to have at least one field which is not 'id' or 'key'
- # due to this bug: https://github.com/pycontribs/jira/pull/289
- fields = "id,updated"
- issues = self.client.search_issues(jql, fields=fields, maxResults=None)
- return [issue.id for issue in issues]
diff --git a/connector_jira/models/project_task/importer.py b/connector_jira/models/project_task/importer.py
deleted file mode 100644
index 0896aae6..00000000
--- a/connector_jira/models/project_task/importer.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# Copyright 2016-2022 Camptocamp SA
-# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from odoo import _
-
-from odoo.addons.component.core import Component
-from odoo.addons.connector.components.mapper import mapping
-from odoo.addons.connector.exception import MappingError
-
-
-class ProjectTaskMapper(Component):
- _name = "jira.project.task.mapper"
- _inherit = "jira.import.mapper"
- _apply_on = ["jira.project.task"]
-
- direct = [
- ("key", "jira_key"),
- ]
-
- from_fields = [
- ("duedate", "date_deadline"),
- ]
-
- @mapping
- def from_attributes(self, record):
- return self.component(usage="map.from.attrs").values(record, self)
-
- @mapping
- def name(self, record):
- # On an Epic, you have 2 fields:
-
- # a field like 'customfield_10003' labelled "Epic Name"
- # a field 'summary' labelled "Sumarry"
-
- # The other types of tasks have only the 'summary' field, the other is
- # empty. To simplify, we always try to read the Epic Name, which
- # will always be empty for other types.
- epic_name_field = self.backend_record.epic_name_field_name
- name = False
- if epic_name_field:
- name = record["fields"].get(epic_name_field)
- if not name:
- name = record["fields"]["summary"]
- return {"name": name}
-
- @mapping
- def issue_type(self, record):
- binder = self.binder_for("jira.issue.type")
- jira_type_id = record["fields"]["issuetype"]["id"]
- binding = binder.to_internal(jira_type_id)
- return {"jira_issue_type_id": binding.id}
-
- @mapping
- def assignee(self, record):
- assignee = record["fields"].get("assignee")
- if not assignee:
- return {"user_ids": False}
- jira_key = assignee["accountId"]
- binder = self.binder_for("jira.res.users")
- user = binder.to_internal(jira_key, unwrap=True)
- if not user:
- email = assignee.get("emailAddress")
- raise MappingError(
- _(
- 'No user found with accountId "%(jira_key)s" or email "%(email)s".'
- "You must create a user or link it manually if the "
- "login/email differs.",
- jira_key=jira_key,
- email=email,
- )
- )
- return {"user_id": user.id}
-
- @mapping
- def description(self, record):
- return {"description": record["renderedFields"]["description"]}
-
- @mapping
- def project(self, record):
- binder = self.binder_for("jira.project.project")
- project = binder.unwrap_binding(self.options.project_binding)
- values = {
- "project_id": project.id,
- "company_id": project.company_id.id,
- "jira_project_bind_id": self.options.project_binding.id,
- }
- if not project.active:
- values["active"] = False
- return values
-
- @mapping
- def epic(self, record):
- if not self.options.jira_epic:
- return {}
- jira_epic_id = self.options.jira_epic["id"]
- binder = self.binder_for("jira.project.task")
- binding = binder.to_internal(jira_epic_id)
- return {"jira_epic_link_id": binding.id}
-
- @mapping
- def parent(self, record):
- jira_parent = record["fields"].get("parent")
- if not jira_parent:
- return {}
- jira_parent_id = jira_parent["id"]
- binder = self.binder_for("jira.project.task")
- binding = binder.to_internal(jira_parent_id)
- return {"jira_parent_id": binding.id}
-
- @mapping
- def backend_id(self, record):
- return {"backend_id": self.backend_record.id}
-
- @mapping
- def status(self, record):
- status = record["fields"].get("status", {})
- status_name = status.get("name")
- if not status_name:
- return {"stage_id": False}
- project_binder = self.binder_for("jira.project.project")
- project_id = project_binder.unwrap_binding(self.options.project_binding)
- stage = self.env["project.task.type"].search(
- [("name", "=", status_name), ("project_ids", "=", project_id.id)],
- limit=1,
- )
- return {"stage_id": stage.id}
-
- @mapping
- def time_estimate(self, record):
- original_estimate = record["fields"].get("timeoriginalestimate")
- if not original_estimate:
- return {"planned_hours": False}
- return {"planned_hours": float(original_estimate) / 3600.0}
-
- def finalize(self, map_record, values):
- values = values.copy()
- if values.get("odoo_id"):
- # If a mapping binds the issue to an existing odoo
- # task, we should not change the project.
- # It's not only unexpected, but would fail as soon
- # as we have invoiced timesheet lines on the task.
- values.pop("project_id")
- return values
-
-
-class ProjectTaskBatchImporter(Component):
- """Import the Jira tasks
-
- For every id in in the list of tasks, a delayed job is created.
- Import from a date
- """
-
- _name = "jira.project.task.batch.importer"
- _inherit = ["jira.timestamp.batch.importer"]
- _apply_on = ["jira.project.task"]
-
-
-class ProjectTaskProjectMatcher(Component):
- _name = "jira.task.project.matcher"
- _inherit = ["jira.base"]
- _usage = "jira.task.project.matcher"
-
- def find_project_binding(self, jira_task_data, unwrap=False):
- jira_project_id = jira_task_data["fields"]["project"]["id"]
- binder = self.binder_for("jira.project.project")
- return binder.to_internal(jira_project_id, unwrap=unwrap)
-
- def fallback_project_for_worklogs(self):
- return self.backend_record.worklog_fallback_project_id
-
-
-class ProjectTaskImporter(Component):
- _name = "jira.project.task.importer"
- _inherit = ["jira.importer"]
- _apply_on = ["jira.project.task"]
-
- def __init__(self, work_context):
- super().__init__(work_context)
- self.jira_epic = None
- self.project_binding = None
-
- def _get_external_data(self):
- """Return the raw Jira data for ``self.external_id``"""
- result = super()._get_external_data()
- epic_field_name = self.backend_record.epic_link_field_name
- if epic_field_name:
- issue_adapter = self.component(
- usage="backend.adapter", model_name="jira.project.task"
- )
- epic_key = result["fields"][epic_field_name]
- if epic_key:
- self.jira_epic = issue_adapter.read(epic_key)
- return result
-
- def _find_project_binding(self):
- matcher = self.component(usage="jira.task.project.matcher")
- self.project_binding = matcher.find_project_binding(self.external_record)
-
- def _is_issue_type_sync(self):
- project_binding = self.project_binding
- task_sync_type_id = self.external_record["fields"]["issuetype"]["id"]
- task_sync_type_binder = self.binder_for("jira.issue.type")
- task_sync_type_binding = task_sync_type_binder.to_internal(
- task_sync_type_id,
- )
- return task_sync_type_binding.is_sync_for_project(project_binding)
-
- def _create_data(self, map_record, **kwargs):
- return super()._create_data(
- map_record,
- jira_epic=self.jira_epic,
- project_binding=self.project_binding,
- **kwargs,
- )
-
- def _update_data(self, map_record, **kwargs):
- return super()._update_data(
- map_record,
- jira_epic=self.jira_epic,
- project_binding=self.project_binding,
- **kwargs,
- )
-
- def _import(self, binding, **kwargs):
- # called at the beginning of _import because we must be sure
- # that dependencies are there (project and issue type)
- self._find_project_binding()
- if not self._is_issue_type_sync():
- return _("Project or issue type is not synchronized.")
- return super()._import(binding, **kwargs)
-
- def _import_dependency_assignee(self):
- jira_assignee = self.external_record["fields"].get("assignee") or {}
- jira_key = jira_assignee.get("accountId")
- self._import_dependency(jira_key, "jira.res.users", record=jira_assignee)
-
- def _import_dependency_issue_type(self):
- jira_issue_type = self.external_record["fields"]["issuetype"]
- jira_issue_type_id = jira_issue_type["id"]
- self._import_dependency(
- jira_issue_type_id, "jira.issue.type", record=jira_issue_type
- )
-
- def _import_dependency_parent(self):
- jira_parent = self.external_record["fields"].get("parent")
- if jira_parent:
- jira_parent_id = jira_parent["id"]
- self._import_dependency(jira_parent_id, "jira.project.task")
-
- def _import_dependency_epic(self):
- if self.jira_epic:
- self._import_dependency(
- self.jira_epic["id"], "jira.project.task", record=self.jira_epic
- )
-
- def _import_dependencies(self):
- """Import the dependencies for the record"""
- self._import_dependency_assignee()
- self._import_dependency_issue_type()
- self._import_dependency_parent()
- self._import_dependency_epic()
diff --git a/connector_jira/models/queue_job/common.py b/connector_jira/models/queue_job.py
similarity index 76%
rename from connector_jira/models/queue_job/common.py
rename to connector_jira/models/queue_job.py
index 81d826c7..82a683b7 100644
--- a/connector_jira/models/queue_job/common.py
+++ b/connector_jira/models/queue_job.py
@@ -7,15 +7,13 @@
class QueueJob(models.Model):
_inherit = "queue.job"
- def related_action_jira_link(self):
+ def related_action_jira_link(self) -> dict:
"""Open a jira url for an issue"""
self.ensure_one()
- model_name = self.model_name
# only tested on issues so far
- issue_models = ("jira.project.task", "jira.account.analytic.line")
- if model_name not in issue_models:
- return
+ if self.model_name not in ("jira.project.task", "jira.account.analytic.line"):
+ return {}
backend = self.args[0]
jira_id = self.args[1]
@@ -29,10 +27,8 @@ def related_action_jira_link(self):
adapter = work.component(usage="backend.adapter")
with adapter.handle_user_api_errors():
jira_record = adapter.get(jira_id)
- jira_key = jira_record.key
-
return {
"type": "ir.actions.act_url",
"target": "new",
- "url": backend.make_issue_url(jira_key),
+ "url": backend.make_issue_url(jira_record.key),
}
diff --git a/connector_jira/models/queue_job/__init__.py b/connector_jira/models/queue_job/__init__.py
deleted file mode 100644
index 63602330..00000000
--- a/connector_jira/models/queue_job/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
diff --git a/connector_jira/models/res_users/common.py b/connector_jira/models/res_users.py
similarity index 58%
rename from connector_jira/models/res_users/common.py
rename to connector_jira/models/res_users.py
index 6d3bfe9a..58ef6be4 100644
--- a/connector_jira/models/res_users/common.py
+++ b/connector_jira/models/res_users.py
@@ -2,27 +2,8 @@
# Copyright 2019 Brainbean Apps (https://brainbeanapps.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html)
-from itertools import groupby
-
from odoo import _, exceptions, fields, models
-from odoo.addons.component.core import Component
-
-
-class JiraResUsers(models.Model):
- _name = "jira.res.users"
- _inherit = "jira.binding"
- _inherits = {"res.users": "odoo_id"}
- _description = "Jira User"
-
- odoo_id = fields.Many2one(
- comodel_name="res.users",
- string="User",
- required=True,
- index=True,
- ondelete="restrict",
- )
-
class ResUsers(models.Model):
_inherit = "res.users"
@@ -42,30 +23,39 @@ def button_link_with_jira(self):
raise exceptions.UserError(_("No JIRA user could be found"))
def link_with_jira(self, backends=None, raise_if_mismatch=False):
+ jira_user_model = self.env["jira.res.users"]
if backends is None:
backends = self.env["jira.backend"].search([])
+
+ # TODO: try to split this method, though it's quite hard since all its variables
+ # are used somewhere in the method itself...
result = {}
for backend in backends:
- bknd_result = {
- "success": [],
- "error": [],
- }
+ bknd_result = {"success": [], "error": []}
+ result[backend] = bknd_result
with backend.work_on("jira.res.users") as work:
binder = work.component(usage="binder")
adapter = work.component(usage="backend.adapter")
for user in self:
+ # Already linked to the current user
if binder.to_external(user, wrap=True):
continue
- jira_user = None
+
+ # Retrieve users in Jira
+ jira_users = []
for resolve_by in backend.get_user_resolution_order():
resolve_by_key = resolve_by
resolve_by_value = user[resolve_by]
- jira_user = adapter.search(fragment=resolve_by_value)
- if jira_user:
+ jira_users = adapter.search(fragment=resolve_by_value)
+ if jira_users:
break
- if not jira_user:
+
+ # No user => nothing to do
+ if not jira_users:
continue
- elif len(jira_user) > 1:
+
+ # Multiple users => raise an error or log the info
+ elif len(jira_users) > 1:
if raise_if_mismatch:
raise exceptions.UserError(
_(
@@ -81,25 +71,21 @@ def link_with_jira(self, backends=None, raise_if_mismatch=False):
"key": resolve_by_key,
"value": resolve_by_value,
"error": "multiple_found",
- "detail": [x.accountId for x in jira_user],
+ "detail": [x.accountId for x in jira_users],
}
)
continue
- jira_user = jira_user[0]
- existing = (
- self.env["jira.res.users"]
- .with_context(
- active_test=False,
- )
- .search(
- [
- ("backend_id", "=", backend.id),
- ("external_id", "=", jira_user.accountId),
- ("odoo_id", "!=", user.id),
- ]
- )
- )
+ # Exactly 1 user in Jira => extract it, bind it to the current user
+ external_id = jira_users[0].accountId
+ domain = [
+ ("backend_id", "=", backend.id),
+ ("external_id", "=", external_id),
+ ("odoo_id", "!=", user.id),
+ ]
+ existing = jira_user_model.with_context(active=False).search(domain)
+
+ # Jira user is already linked to an Odoo user => log the info
if existing:
bknd_result["error"].append(
{
@@ -110,58 +96,29 @@ def link_with_jira(self, backends=None, raise_if_mismatch=False):
}
)
continue
+
+ # Create binding
+ vals = {"backend_id": backend.id, "odoo_id": user.id}
try:
- binding = self.env["jira.res.users"].create(
- {"backend_id": backend.id, "odoo_id": user.id}
- )
- binder.bind(jira_user.accountId, binding)
- bknd_result["success"].append(
+ binding = jira_user_model.create(vals)
+ binder.bind(external_id, binding)
+ except Exception as err:
+ # Log errors
+ bknd_result["error"].append(
{
"key": "login",
"value": user.login,
- "detail": jira_user.accountId,
+ "error": "binding_error",
+ "detail": str(err),
}
)
-
- except Exception as err:
- bknd_result["error"].append(
+ else:
+ # Log success
+ bknd_result["success"].append(
{
"key": "login",
"value": user.login,
- "error": "binding_error",
- "detail": str(err),
+ "detail": external_id,
}
)
- result[backend] = bknd_result
return result
-
-
-class UserAdapter(Component):
- _name = "jira.res.users.adapter"
- _inherit = ["jira.webservice.adapter"]
- _apply_on = ["jira.res.users"]
-
- def read(self, id_):
- # pylint: disable=W8106
- with self.handle_404():
- return self.client.user(id_).raw
-
- def search(self, fragment=None):
- """Search users
-
- :param fragment: a string to match usernames, name or email against.
- """
- users = self.client.search_users(
- query=fragment, maxResults=None, includeActive=True, includeInactive=True
- )
-
- # User 'accountId' is unique and if same key appears several times, it means
- # that same user is present in multiple User Directories
- users = list(
- map(
- lambda group: list(group[1])[0],
- groupby(users, key=lambda user: user.accountId),
- )
- )
-
- return users
diff --git a/connector_jira/models/res_users/__init__.py b/connector_jira/models/res_users/__init__.py
deleted file mode 100644
index ea8197b1..00000000
--- a/connector_jira/models/res_users/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
-
-from . import common
-from . import importer
diff --git a/connector_jira/reports/__init__.py b/connector_jira/reports/__init__.py
new file mode 100644
index 00000000..2e30f148
--- /dev/null
+++ b/connector_jira/reports/__init__.py
@@ -0,0 +1 @@
+from . import timesheet_analysis_report
diff --git a/connector_jira/reports/timesheet_analysis_report.py b/connector_jira/reports/timesheet_analysis_report.py
new file mode 100644
index 00000000..4db66062
--- /dev/null
+++ b/connector_jira/reports/timesheet_analysis_report.py
@@ -0,0 +1,22 @@
+# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
+
+from odoo import api, fields, models
+
+
+class TimesheetsAnalysisReport(models.Model):
+ _inherit = "timesheets.analysis.report"
+
+ jira_issue_key = fields.Char(readonly=True)
+ jira_epic_issue_key = fields.Char(readonly=True)
+ jira_issue_type_id = fields.Many2one("jira.issue.type", readonly=True)
+
+ @api.model
+ def _select(self):
+ return (
+ super()._select()
+ + """,
+ A.jira_issue_key AS jira_issue_key,
+ A.jira_epic_issue_key AS jira_epic_issue_key,
+ A.jira_issue_type_id AS jira_issue_type_id
+ """
+ )
diff --git a/connector_jira/tests/test_backend.py b/connector_jira/tests/test_backend.py
index 831b60ea..6fedcede 100644
--- a/connector_jira/tests/test_backend.py
+++ b/connector_jira/tests/test_backend.py
@@ -41,7 +41,7 @@ def test_from_to_string(self):
"2019-04-08 10:30:59.375000",
)
self.assertEqual(
- MilliDatetime.from_string("2019-04-08 10:30:59.375000"),
+ MilliDatetime.to_datetime("2019-04-08 10:30:59.375000"),
datetime(2019, 4, 8, 10, 30, 59, 375000),
)
@@ -66,7 +66,7 @@ def _test_import_date_computed_field(self, timestamp_field_name, component_usage
# The field on jira.backend is a standard odoo Datetime field so works
# with strings (in 11.0). But the field on jira.backend.timestamp is a
# "custom" MilliDatetime field which works with datetime instances.
- self.assertEqual(jira_ts.last_timestamp, fields.Datetime.from_string(test_date))
+ self.assertEqual(jira_ts.last_timestamp, fields.Datetime.to_datetime(test_date))
def test_import_project_task_from_date(self):
self._test_import_date_computed_field(
diff --git a/connector_jira/tests/test_import_task.py b/connector_jira/tests/test_import_task.py
index f646c925..b4bc5ab6 100644
--- a/connector_jira/tests/test_import_task.py
+++ b/connector_jira/tests/test_import_task.py
@@ -120,7 +120,7 @@ def test_import_task_parents(self):
self.assertEqual(task_binding.name, "Task1")
self.assertEqual(task_binding.jira_issue_type_id, self.task_issue_type)
self.assertTrue(task_binding.jira_epic_link_id)
- self.assertAlmostEqual(task_binding.odoo_id.planned_hours, 4.5)
+ self.assertAlmostEqual(task_binding.odoo_id.allocated_hours, 4.5)
epic_binding = task_binding.jira_epic_link_id
self.assertEqual(epic_binding.jira_key, "TEST-1")
diff --git a/connector_jira/views/timesheet_account_analytic_line.xml b/connector_jira/views/account_analytic_line.xml
similarity index 86%
rename from connector_jira/views/timesheet_account_analytic_line.xml
rename to connector_jira/views/account_analytic_line.xml
index 84190364..f795595d 100644
--- a/connector_jira/views/timesheet_account_analytic_line.xml
+++ b/connector_jira/views/account_analytic_line.xml
@@ -11,14 +11,14 @@
name="jira_issue_url"
widget="url"
options='{"text_field": "jira_issue_key"}'
- attrs="{'invisible': [('jira_issue_key', '=', False)]}"
+ invisible="not jira_issue_key"
/>
@@ -30,19 +30,19 @@
-
+
-
+
@@ -57,7 +57,7 @@
-
+
+
+
+
+
+ jira.backend.form
+ jira.backend
+
+
+
+
+
+ jira.backend.tree
+ jira.backend
+
+
+
+
+
+
+
+
+ Jira Backends
+ jira.backend
+ tree,form
+ {"search_default_all": 1}
+
+
+
diff --git a/connector_jira/views/jira_backend_views.xml b/connector_jira/views/jira_backend_views.xml
deleted file mode 100644
index 8d48f8cb..00000000
--- a/connector_jira/views/jira_backend_views.xml
+++ /dev/null
@@ -1,295 +0,0 @@
-
-
-
-
-
- jira.backend.form
- jira.backend
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- You can use the App Descriptor URL to register a new App on Atlassian Marketplace. Check the module README for the detailed process.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- By clicking on the buttons,
- you will initiate the synchronizations
- with Jira.
- Note that the import or exports
- won't be done directly,
- they will create 'Jobs'
- executed as soon as possible.
-
-
- Once imported,
- some types of records,
- like the products or categories,
- need a manual review.
- You will find the list
- of the new records to review
- in the menu 'Connectors > Checkpoint'.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- (
-
-
- )
-
-
-
-
-
-
-
-
-
- (
-
-
- )
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Activate the synchronization of the Epic Link field.
- Only on JIRA Software. The field contains the name of
- the JIRA custom field that contains the Epic Link.
-
- Note that if a project does not synchronize the Epics,
- the field will be empty.
-
+ The checkboxes define which types of JIRA issues will be
+ imported
+ into Odoo. For instance, if you check 'Story', only issues of type
+ Story will be imported. Several choices possible.
+
+
+ There is a direct implication on the Worklogs.
+ When a worklog is done on a JIRA Sub-Task and this type is not
+ sync'ed, the worklog will be attached to the parent Task of the
+ Sub-Task. If the Task is not sync'ed, it will be attached to the
+ Epic. Finally, if there is no Epic, the worklog will not be
+ attached to any task.
+
- The checkboxes define which types of JIRA issues will be
- imported
- into Odoo. For instance, if you check 'Story', only issues of type
- Story will be imported. Several choices possible.
-
-
- There is a direct implication on the Worklogs.
- When a worklog is done on a JIRA Sub-Task and this type is not
- sync'ed, the worklog will be attached to the parent Task of the
- Sub-Task. If the Task is not sync'ed, it will be attached to the
- Epic. Finally, if there is no Epic, the worklog will not be
- attached to any task.
-