-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodels.py
6820 lines (5862 loc) · 301 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
# Part of modoo. See LICENSE file for full copyright and licensing details.
"""
Object Relational Mapping module:
* Hierarchical structure
* Constraints consistency and validation
* Object metadata depends on its status
* Optimised processing by complex query (multiple actions at once)
* Default field values
* Permissions optimisation
* Persistent object: DB postgresql
* Data conversion
* Multi-level caching system
* Two different inheritance mechanisms
* Rich set of field types:
- classical (varchar, integer, boolean, ...)
- relational (one2many, many2one, many2many)
- functional
"""
import collections
import contextlib
import datetime
import dateutil
import fnmatch
import functools
import itertools
import io
import logging
import operator
import pytz
import re
import uuid
from collections import defaultdict, OrderedDict
from collections.abc import MutableMapping
from contextlib import closing
from inspect import getmembers, currentframe
from operator import attrgetter, itemgetter
import babel.dates
import dateutil.relativedelta
import psycopg2, psycopg2.extensions
from lxml import etree
from lxml.builder import E
from psycopg2.extensions import AsIs
import modoo
from . import SUPERUSER_ID
from . import api
from . import tools
from .exceptions import AccessError, MissingError, ValidationError, UserError
from .osv.query import Query
from .tools import frozendict, lazy_classproperty, ormcache, \
Collector, LastOrderedSet, OrderedSet, IterableGenerator, \
groupby, discardattr, partition
from .tools.config import config
from .tools.func import frame_codeinfo
from .tools.misc import CountingStream, clean_context, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT, get_lang
from .tools.translate import _
from .tools import date_utils
from .tools import populate
from .tools import unique
from .tools.lru import LRU
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__ + '.schema')
_unlink = logging.getLogger(__name__ + '.unlink')
regex_order = re.compile('^(\s*([a-z0-9:_]+|"[a-z0-9:_]+")(\s+(desc|asc))?\s*(,|$))+(?<!,)$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
regex_pg_name = re.compile(r'^[a-z_][a-z0-9_$]*$', re.I)
regex_field_agg = re.compile(r'(\w+)(?::(\w+)(?:\((\w+)\))?)?')
AUTOINIT_RECALCULATE_STORED_FIELDS = 1000
def check_object_name(name):
""" Check if the given name is a valid model name.
The _name attribute in osv and osv_memory object is subject to
some restrictions. This function returns True or False whether
the given name is allowed or not.
TODO: this is an approximation. The goal in this approximation
is to disallow uppercase characters (in some places, we quote
table/column names and in other not, which leads to this kind
of errors:
psycopg2.ProgrammingError: relation "xxx" does not exist).
The same restriction should apply to both osv and osv_memory
objects for consistency.
"""
if regex_object_name.match(name) is None:
return False
return True
def raise_on_invalid_object_name(name):
if not check_object_name(name):
msg = "The _name attribute %s is not valid." % name
raise ValueError(msg)
def check_pg_name(name):
""" Check whether the given name is a valid PostgreSQL identifier name. """
if not regex_pg_name.match(name):
raise ValidationError("Invalid characters in table name %r" % name)
if len(name) > 63:
raise ValidationError("Table name %r is too long" % name)
# match private methods, to prevent their remote invocation
regex_private = re.compile(r'^(_.*|init)$')
def check_method_name(name):
""" Raise an ``AccessError`` if ``name`` is a private method name. """
if regex_private.match(name):
raise AccessError(_('Private methods (such as %s) cannot be called remotely.') % (name,))
def same_name(f, g):
""" Test whether functions ``f`` and ``g`` are identical or have the same name """
return f == g or getattr(f, '__name__', 0) == getattr(g, '__name__', 1)
def fix_import_export_id_paths(fieldname):
"""
Fixes the id fields in import and exports, and splits field paths
on '/'.
:param str fieldname: name of the field to import/export
:return: split field name
:rtype: list of str
"""
fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
return fixed_external_id.split('/')
def trigger_tree_merge(node1, node2):
""" Merge two trigger trees. """
for key, val in node2.items():
if key is None:
node1.setdefault(None, OrderedSet())
node1[None].update(val)
else:
node1.setdefault(key, {})
trigger_tree_merge(node1[key], node2[key])
class MetaModel(api.Meta):
""" The metaclass of all model classes.
Its main purpose is to register the models per module.
"""
module_to_models = defaultdict(list)
def __new__(meta, name, bases, attrs):
# this prevents assignment of non-fields on recordsets
attrs.setdefault('__slots__', ())
# this collects the fields defined on the class (via Field.__set_name__())
attrs.setdefault('_field_definitions', [])
if attrs.get('_register', True):
# determine '_module'
if '_module' not in attrs:
module = attrs['__module__']
assert module.startswith('modoo.addons.'), \
f"Invalid import of {module}.{name}, it should start with 'modoo.addons'."
attrs['_module'] = module.split('.')[2]
# determine model '_name' and normalize '_inherits'
inherit = attrs.get('_inherit', ())
if isinstance(inherit, str):
inherit = attrs['_inherit'] = [inherit]
if '_name' not in attrs:
attrs['_name'] = inherit[0] if len(inherit) == 1 else name
return super().__new__(meta, name, bases, attrs)
def __init__(self, name, bases, attrs):
super().__init__(name, bases, attrs)
if not attrs.get('_register', True):
return
# Remember which models to instantiate for this module.
if self._module:
self.module_to_models[self._module].append(self)
if not self._abstract and self._name not in self._inherit:
# this class defines a model: add magic fields
def add(name, field):
setattr(self, name, field)
field.__set_name__(self, name)
def add_default(name, field):
if name not in attrs:
setattr(self, name, field)
field.__set_name__(self, name)
add('id', fields.Id(automatic=True))
add(self.CONCURRENCY_CHECK_FIELD, fields.Datetime(
string='Last Modified on', automatic=True,
compute='_compute_concurrency_field', compute_sudo=False))
add_default('display_name', fields.Char(
string='Display Name', automatic=True, compute='_compute_display_name'))
if attrs.get('_log_access', self._auto):
add_default('create_uid', fields.Many2one(
'res.users', string='Created by', automatic=True, readonly=True))
add_default('create_date', fields.Datetime(
string='Created on', automatic=True, readonly=True))
add_default('write_uid', fields.Many2one(
'res.users', string='Last Updated by', automatic=True, readonly=True))
add_default('write_date', fields.Datetime(
string='Last Updated on', automatic=True, readonly=True))
class NewId(object):
""" Pseudo-ids for new records, encapsulating an optional origin id (actual
record id) and an optional reference (any value).
"""
__slots__ = ['origin', 'ref']
def __init__(self, origin=None, ref=None):
self.origin = origin
self.ref = ref
def __bool__(self):
return False
def __eq__(self, other):
return isinstance(other, NewId) and (
(self.origin and other.origin and self.origin == other.origin)
or (self.ref and other.ref and self.ref == other.ref)
)
def __hash__(self):
return hash(self.origin or self.ref or id(self))
def __repr__(self):
return (
"<NewId origin=%r>" % self.origin if self.origin else
"<NewId ref=%r>" % self.ref if self.ref else
"<NewId 0x%x>" % id(self)
)
def __str__(self):
if self.origin or self.ref:
id_part = repr(self.origin or self.ref)
else:
id_part = hex(id(self))
return "NewId_%s" % id_part
def origin_ids(ids):
""" Return an iterator over the origin ids corresponding to ``ids``.
Actual ids are returned as is, and ids without origin are not returned.
"""
return ((id_ or id_.origin) for id_ in ids if (id_ or getattr(id_, "origin", None)))
def expand_ids(id0, ids):
""" Return an iterator of unique ids from the concatenation of ``[id0]`` and
``ids``, and of the same kind (all real or all new).
"""
yield id0
seen = {id0}
kind = bool(id0)
for id_ in ids:
if id_ not in seen and bool(id_) == kind:
yield id_
seen.add(id_)
IdType = (int, str, NewId)
# maximum number of prefetched records
PREFETCH_MAX = 1000
# special columns automatically created by the ORM
LOG_ACCESS_COLUMNS = ['create_uid', 'create_date', 'write_uid', 'write_date']
MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS
# valid SQL aggregation functions
VALID_AGGREGATE_FUNCTIONS = {
'array_agg', 'count', 'count_distinct',
'bool_and', 'bool_or', 'max', 'min', 'avg', 'sum',
}
# THE DEFINITION AND REGISTRY CLASSES
#
# The framework deals with two kinds of classes for models: the "definition"
# classes and the "registry" classes.
#
# The "definition" classes are the ones defined in modules source code: they
# define models and extend them. Those classes are essentially "static", for
# whatever that means in Python. The only exception is custom models: their
# definition class is created dynamically.
#
# The "registry" classes are the ones you find in the registry. They are the
# actual classes of the recordsets of their model. The "registry" class of a
# model is created dynamically when the registry is built. It inherits (in the
# Python sense) from all the definition classes of the model, and possibly other
# registry classes (when the model inherits from another model). It also
# carries model metadata inferred from its parent classes.
#
#
# THE REGISTRY CLASS OF A MODEL
#
# In the simplest case, a model's registry class inherits from all the classes
# that define the model in a flat hierarchy. Consider the model definition
# below. The registry class of model 'a' inherits from the definition classes
# A1, A2, A3, in reverse order, to match the expected overriding order. The
# registry class carries inferred metadata that is shared between all the
# model's instances for a given registry.
#
# class A1(Model): Model
# _name = 'a' / | \
# A3 A2 A1 <- definition classes
# class A2(Model): \ | /
# _inherit = 'a' a <- registry class: registry['a']
# |
# class A3(Model): records <- model instances, like env['a']
# _inherit = 'a'
#
# Note that when the model inherits from another model, we actually make the
# registry classes inherit from each other, so that extensions to an inherited
# model are visible in the registry class of the child model, like in the
# following example.
#
# class A1(Model):
# _name = 'a' Model
# / / \ \
# class B1(Model): / / \ \
# _name = 'b' / A2 A1 \
# B2 \ / B1
# class B2(Model): \ \ / /
# _name = 'b' \ a /
# _inherit = ['a', 'b'] \ | /
# \ | /
# class A2(Model): b
# _inherit = 'a'
#
#
# THE FIELDS OF A MODEL
#
# The fields of a model are given by the model's definition classes, inherited
# models ('_inherit' and '_inherits') and other parties, like custom fields.
# Note that a field can be partially overridden when it appears on several
# definition classes of its model. In that case, the field's final definition
# depends on the presence or absence of each definition class, which itself
# depends on the modules loaded in the registry.
#
# By design, the registry class has access to all the fields on the model's
# definition classes. When possible, the field is used directly from the
# model's registry class. There are a number of cases where the field cannot be
# used directly:
# - the field is related (and bits may not be shared);
# - the field is overridden on definition classes;
# - the field is defined for another model (and accessible by mixin).
#
# The last case prevents sharing the field, because the field object is specific
# to a model, and is used as a key in several key dictionaries, like the record
# cache and pending computations.
#
# Setting up a field on its definition class helps saving memory and time.
# Indeed, when sharing is possible, the field's setup is almost entirely done
# where the field was defined. It is thus done when the definition class was
# created, and it may be reused across registries.
#
# In the example below, the field 'foo' appears once on its model's definition
# classes. Assuming that it is not related, that field can be set up directly
# on its definition class. If the model appears in several registries, the
# field 'foo' is effectively shared across registries.
#
# class A1(Model): Model
# _name = 'a' / \
# foo = ... / \
# bar = ... A2 A1
# bar foo, bar
# class A2(Model): \ /
# _inherit = 'a' \ /
# bar = ... a
# bar
#
# On the other hand, the field 'bar' is overridden in its model's definition
# classes. In that case, the framework recreates the field on the model's
# registry class. The field's setup will be based on its definitions, and will
# not be shared across registries.
#
# The so-called magic fields ('id', 'display_name', ...) used to be added on
# registry classes. But doing so prevents them from being shared. So instead,
# we add them on definition classes that define a model without extending it.
# This increases the number of fields that are shared across registries.
def is_definition_class(cls):
""" Return whether ``cls`` is a model definition class. """
return isinstance(cls, MetaModel) and getattr(cls, 'pool', None) is None
def is_registry_class(cls):
""" Return whether ``cls`` is a model registry class. """
return getattr(cls, 'pool', None) is not None
class BaseModel(metaclass=MetaModel):
"""Base class for modoo models.
modoo models are created by inheriting one of the following:
* :class:`Model` for regular database-persisted models
* :class:`TransientModel` for temporary data, stored in the database but
automatically vacuumed every so often
* :class:`AbstractModel` for abstract super classes meant to be shared by
multiple inheriting models
The system automatically instantiates every model once per database. Those
instances represent the available models on each database, and depend on
which modules are installed on that database. The actual class of each
instance is built from the Python classes that create and inherit from the
corresponding model.
Every model instance is a "recordset", i.e., an ordered collection of
records of the model. Recordsets are returned by methods like
:meth:`~.browse`, :meth:`~.search`, or field accesses. Records have no
explicit representation: a record is represented as a recordset of one
record.
To create a class that should not be instantiated,
the :attr:`~modoo.models.BaseModel._register` attribute may be set to False.
"""
__slots__ = ['env', '_ids', '_prefetch_ids']
_auto = False
"""Whether a database table should be created.
If set to ``False``, override :meth:`~modoo.models.BaseModel.init`
to create the database table.
Automatically defaults to `True` for :class:`Model` and
:class:`TransientModel`, `False` for :class:`AbstractModel`.
.. tip:: To create a model without any table, inherit
from :class:`~modoo.models.AbstractModel`.
"""
_register = False #: registry visibility
_abstract = True
""" Whether the model is *abstract*.
.. seealso:: :class:`AbstractModel`
"""
_transient = False
""" Whether the model is *transient*.
.. seealso:: :class:`TransientModel`
"""
_name = None #: the model name (in dot-notation, module namespace)
_description = None #: the model's informal name
_module = None #: the model's module (in the modoo sense)
_custom = False #: should be True for custom models only
_inherit = ()
"""Python-inherited models:
:type: str or list(str)
.. note::
* If :attr:`._name` is set, name(s) of parent models to inherit from
* If :attr:`._name` is unset, name of a single model to extend in-place
"""
_inherits = frozendict()
"""dictionary {'parent_model': 'm2o_field'} mapping the _name of the parent business
objects to the names of the corresponding foreign key fields to use::
_inherits = {
'a.model': 'a_field_id',
'b.model': 'b_field_id'
}
implements composition-based inheritance: the new model exposes all
the fields of the inherited models but stores none of them:
the values themselves remain stored on the linked record.
.. warning::
if multiple fields with the same name are defined in the
:attr:`~modoo.models.Model._inherits`-ed models, the inherited field will
correspond to the last one (in the inherits list order).
"""
_table = None #: SQL table name used by model if :attr:`_auto`
_table_query = None #: SQL expression of the table's content (optional)
_sequence = None #: SQL sequence to use for ID field
_sql_constraints = [] #: SQL constraints [(name, sql_def, message)]
_rec_name = None #: field to use for labeling records, default: ``name``
_order = 'id' #: default order field for searching results
_parent_name = 'parent_id' #: the many2one field used as parent field
_parent_store = False
"""set to True to compute parent_path field.
Alongside a :attr:`~.parent_path` field, sets up an indexed storage
of the tree structure of records, to enable faster hierarchical queries
on the records of the current model using the ``child_of`` and
``parent_of`` domain operators.
"""
_active_name = None #: field to use for active records
_date_name = 'date' #: field to use for default calendar view
_fold_name = 'fold' #: field to determine folded groups in kanban views
_needaction = False # whether the model supports "need actions" (Old API)
_translate = True # False disables translations export for this model (Old API)
_check_company_auto = False
"""On write and create, call ``_check_company`` to ensure companies
consistency on the relational fields having ``check_company=True``
as attribute.
"""
_depends = frozendict()
"""dependencies of models backed up by SQL views
``{model_name: field_names}``, where ``field_names`` is an iterable.
This is only used to determine the changes to flush to database before
executing ``search()`` or ``read_group()``. It won't be used for cache
invalidation or recomputing fields.
"""
# default values for _transient_vacuum()
_transient_max_count = lazy_classproperty(lambda _: config.get('osv_memory_count_limit'))
_transient_max_hours = lazy_classproperty(lambda _: config.get('transient_age_limit'))
CONCURRENCY_CHECK_FIELD = '__last_update'
@api.model
def view_init(self, fields_list):
""" Override this method to do specific things when a form view is
opened. This method is invoked by :meth:`~default_get`.
"""
pass
def _valid_field_parameter(self, field, name):
""" Return whether the given parameter name is valid for the field. """
return name == 'related_sudo'
@api.model
def _add_field(self, name, field):
""" Add the given ``field`` under the given ``name`` in the class """
cls = type(self)
# add field as an attribute and in cls._fields (for reflection)
if not isinstance(getattr(cls, name, field), Field):
_logger.warning("In model %r, field %r overriding existing value", cls._name, name)
setattr(cls, name, field)
field._toplevel = True
field.__set_name__(cls, name)
cls._fields[name] = field
@api.model
def _pop_field(self, name):
""" Remove the field with the given ``name`` from the model.
This method should only be used for manual fields.
"""
cls = type(self)
field = cls._fields.pop(name, None)
discardattr(cls, name)
if cls._rec_name == name:
# fixup _rec_name and display_name's dependencies
cls._rec_name = None
if cls.display_name in cls.pool.field_depends:
cls.pool.field_depends[cls.display_name] = tuple(
dep for dep in cls.pool.field_depends[cls.display_name] if dep != name
)
return field
@api.depends(lambda model: ('create_date', 'write_date') if model._log_access else ())
def _compute_concurrency_field(self):
fname = self.CONCURRENCY_CHECK_FIELD
if self._log_access:
for record in self:
record[fname] = record.write_date or record.create_date or Datetime.now()
else:
self[fname] = modoo.fields.Datetime.now()
#
# Goal: try to apply inheritance at the instantiation level and
# put objects in the pool var
#
@classmethod
def _build_model(cls, pool, cr):
""" Instantiate a given model in the registry.
This method creates or extends a "registry" class for the given model.
This "registry" class carries inferred model metadata, and inherits (in
the Python sense) from all classes that define the model, and possibly
other registry classes.
"""
if getattr(cls, '_constraints', None):
_logger.warning("Model attribute '_constraints' is no longer supported, "
"please use @api.constrains on methods instead.")
# Keep links to non-inherited constraints in cls; this is useful for
# instance when exporting translations
cls._local_sql_constraints = cls.__dict__.get('_sql_constraints', [])
# all models except 'base' implicitly inherit from 'base'
name = cls._name
parents = list(cls._inherit)
if name != 'base':
parents.append('base')
# create or retrieve the model's class
if name in parents:
if name not in pool:
raise TypeError("Model %r does not exist in registry." % name)
ModelClass = pool[name]
ModelClass._build_model_check_base(cls)
check_parent = ModelClass._build_model_check_parent
else:
ModelClass = type(name, (cls,), {
'_name': name,
'_register': False,
'_original_module': cls._module,
'_inherit_module': {}, # map parent to introducing module
'_inherit_children': OrderedSet(), # names of children models
'_inherits_children': set(), # names of children models
'_fields': {}, # populated in _setup_base()
})
check_parent = cls._build_model_check_parent
# determine all the classes the model should inherit from
bases = LastOrderedSet([cls])
for parent in parents:
if parent not in pool:
raise TypeError("Model %r inherits from non-existing model %r." % (name, parent))
parent_class = pool[parent]
if parent == name:
for base in parent_class.__base_classes:
bases.add(base)
else:
check_parent(cls, parent_class)
bases.add(parent_class)
ModelClass._inherit_module[parent] = cls._module
parent_class._inherit_children.add(name)
# ModelClass.__bases__ must be assigned those classes; however, this
# operation is quite slow, so we do it once in method _prepare_setup()
ModelClass.__base_classes = tuple(bases)
# determine the attributes of the model's class
ModelClass._build_model_attributes(pool)
check_pg_name(ModelClass._table)
# Transience
if ModelClass._transient:
assert ModelClass._log_access, \
"TransientModels must have log_access turned on, " \
"in order to implement their vacuum policy"
# link the class to the registry, and update the registry
ModelClass.pool = pool
pool[name] = ModelClass
# backward compatibility: instantiate the model, and initialize it
model = object.__new__(ModelClass)
model.__init__(pool, cr)
return ModelClass
@classmethod
def _build_model_check_base(model_class, cls):
""" Check whether ``model_class`` can be extended with ``cls``. """
if model_class._abstract and not cls._abstract:
msg = ("%s transforms the abstract model %r into a non-abstract model. "
"That class should either inherit from AbstractModel, or set a different '_name'.")
raise TypeError(msg % (cls, model_class._name))
if model_class._transient != cls._transient:
if model_class._transient:
msg = ("%s transforms the transient model %r into a non-transient model. "
"That class should either inherit from TransientModel, or set a different '_name'.")
else:
msg = ("%s transforms the model %r into a transient model. "
"That class should either inherit from Model, or set a different '_name'.")
raise TypeError(msg % (cls, model_class._name))
@classmethod
def _build_model_check_parent(model_class, cls, parent_class):
""" Check whether ``model_class`` can inherit from ``parent_class``. """
if model_class._abstract and not parent_class._abstract:
msg = ("In %s, the abstract model %r cannot inherit from the non-abstract model %r.")
raise TypeError(msg % (cls, model_class._name, parent_class._name))
@classmethod
def _build_model_attributes(cls, pool):
""" Initialize base model attributes. """
cls._description = cls._name
cls._table = cls._name.replace('.', '_')
cls._sequence = None
cls._log_access = cls._auto
inherits = {}
depends = {}
cls._sql_constraints = {}
for base in reversed(cls.__base_classes):
if is_definition_class(base):
# the following attributes are not taken from registry classes
if cls._name not in base._inherit and not base._description:
_logger.warning("The model %s has no _description", cls._name)
cls._description = base._description or cls._description
cls._table = base._table or cls._table
cls._sequence = base._sequence or cls._sequence
cls._log_access = getattr(base, '_log_access', cls._log_access)
inherits.update(base._inherits)
for mname, fnames in base._depends.items():
depends.setdefault(mname, []).extend(fnames)
for cons in base._sql_constraints:
cls._sql_constraints[cons[0]] = cons
cls._sequence = cls._sequence or (cls._table + '_id_seq')
cls._sql_constraints = list(cls._sql_constraints.values())
# avoid assigning an empty dict to save memory
if inherits:
cls._inherits = inherits
if depends:
cls._depends = depends
# update _inherits_children of parent models
for parent_name in cls._inherits:
pool[parent_name]._inherits_children.add(cls._name)
# recompute attributes of _inherit_children models
for child_name in cls._inherit_children:
child_class = pool[child_name]
child_class._build_model_attributes(pool)
@classmethod
def _init_constraints_onchanges(cls):
# store list of sql constraint qualified names
for (key, _, _) in cls._sql_constraints:
cls.pool._sql_constraints.add(cls._table + '_' + key)
# reset properties memoized on cls
cls._constraint_methods = BaseModel._constraint_methods
cls._ondelete_methods = BaseModel._ondelete_methods
cls._onchange_methods = BaseModel._onchange_methods
@property
def _constraint_methods(self):
""" Return a list of methods implementing Python constraints. """
def is_constraint(func):
return callable(func) and hasattr(func, '_constrains')
def wrap(func, names):
# wrap func into a proxy function with explicit '_constrains'
@api.constrains(*names)
def wrapper(self):
return func(self)
return wrapper
cls = type(self)
methods = []
for attr, func in getmembers(cls, is_constraint):
if callable(func._constrains):
func = wrap(func, func._constrains(self))
for name in func._constrains:
field = cls._fields.get(name)
if not field:
_logger.warning("method %s.%s: @constrains parameter %r is not a field name", cls._name, attr, name)
elif not (field.store or field.inverse or field.inherited):
_logger.warning("method %s.%s: @constrains parameter %r is not writeable", cls._name, attr, name)
methods.append(func)
# optimization: memoize result on cls, it will not be recomputed
cls._constraint_methods = methods
return methods
@property
def _ondelete_methods(self):
""" Return a list of methods implementing checks before unlinking. """
def is_ondelete(func):
return callable(func) and hasattr(func, '_ondelete')
cls = type(self)
methods = [func for _, func in getmembers(cls, is_ondelete)]
# optimization: memoize results on cls, it will not be recomputed
cls._ondelete_methods = methods
return methods
@property
def _onchange_methods(self):
""" Return a dictionary mapping field names to onchange methods. """
def is_onchange(func):
return callable(func) and hasattr(func, '_onchange')
# collect onchange methods on the model's class
cls = type(self)
methods = defaultdict(list)
for attr, func in getmembers(cls, is_onchange):
missing = []
for name in func._onchange:
if name not in cls._fields:
missing.append(name)
methods[name].append(func)
if missing:
_logger.warning(
"@api.onchange%r parameters must be field names -> not valid: %s",
func._onchange, missing
)
# add onchange methods to implement "change_default" on fields
def onchange_default(field, self):
value = field.convert_to_write(self[field.name], self)
condition = "%s=%s" % (field.name, value)
defaults = self.env['ir.default'].get_model_defaults(self._name, condition)
self.update(defaults)
for name, field in cls._fields.items():
if field.change_default:
methods[name].append(functools.partial(onchange_default, field))
# optimization: memoize result on cls, it will not be recomputed
cls._onchange_methods = methods
return methods
def __new__(cls):
# In the past, this method was registering the model class in the server.
# This job is now done entirely by the metaclass MetaModel.
return None
def __init__(self, pool, cr):
""" Deprecated method to initialize the model. """
pass
def _is_an_ordinary_table(self):
return self.pool.is_an_ordinary_table(self)
def __ensure_xml_id(self, skip=False):
""" Create missing external ids for records in ``self``, and return an
iterator of pairs ``(record, xmlid)`` for the records in ``self``.
:rtype: Iterable[Model, str | None]
"""
if skip:
return ((record, None) for record in self)
if not self:
return iter([])
if not self._is_an_ordinary_table():
raise Exception(
"You can not export the column ID of model %s, because the "
"table %s is not an ordinary table."
% (self._name, self._table))
modname = '__export__'
cr = self.env.cr
cr.execute("""
SELECT res_id, module, name
FROM ir_model_data
WHERE model = %s AND res_id in %s
""", (self._name, tuple(self.ids)))
xids = {
res_id: (module, name)
for res_id, module, name in cr.fetchall()
}
def to_xid(record_id):
(module, name) = xids[record_id]
return ('%s.%s' % (module, name)) if module else name
# create missing xml ids
missing = self.filtered(lambda r: r.id not in xids)
if not missing:
return (
(record, to_xid(record.id))
for record in self
)
xids.update(
(r.id, (modname, '%s_%s_%s' % (
r._table,
r.id,
uuid.uuid4().hex[:8],
)))
for r in missing
)
fields = ['module', 'model', 'name', 'res_id']
# disable eventual async callback / support for the extent of
# the COPY FROM, as these are apparently incompatible
callback = psycopg2.extensions.get_wait_callback()
psycopg2.extensions.set_wait_callback(None)
try:
cr.copy_from(io.StringIO(
u'\n'.join(
u"%s\t%s\t%s\t%d" % (
modname,
record._name,
xids[record.id][1],
record.id,
)
for record in missing
)),
table='ir_model_data',
columns=fields,
)
finally:
psycopg2.extensions.set_wait_callback(callback)
self.env['ir.model.data'].invalidate_cache(fnames=fields)
return (
(record, to_xid(record.id))
for record in self
)
def _export_rows(self, fields, *, _is_toplevel_call=True):
""" Export fields of the records in ``self``.
:param fields: list of lists of fields to traverse
:param bool _is_toplevel_call:
used when recursing, avoid using when calling from outside
:return: list of lists of corresponding values
"""
import_compatible = self.env.context.get('import_compat', True)
lines = []
def splittor(rs):
""" Splits the self recordset in batches of 1000 (to avoid
entire-recordset-prefetch-effects) & removes the previous batch
from the cache after it's been iterated in full
"""
for idx in range(0, len(rs), 1000):
sub = rs[idx:idx+1000]
for rec in sub:
yield rec
rs.invalidate_cache(ids=sub.ids)
if not _is_toplevel_call:
splittor = lambda rs: rs
# memory stable but ends up prefetching 275 fields (???)
for record in splittor(self):
# main line of record, initially empty
current = [''] * len(fields)
lines.append(current)
# list of primary fields followed by secondary field(s)
primary_done = []
# process column by column
for i, path in enumerate(fields):
if not path:
continue
name = path[0]
if name in primary_done:
continue
if name == '.id':
current[i] = str(record.id)
elif name == 'id':
current[i] = (record._name, record.id)
else:
field = record._fields[name]
value = record[name]
# this part could be simpler, but it has to be done this way
# in order to reproduce the former behavior
if not isinstance(value, BaseModel):
current[i] = field.convert_to_export(value, record)
else:
primary_done.append(name)
# recursively export the fields that follow name; use
# 'display_name' where no subfield is exported
fields2 = [(p[1:] or ['display_name'] if p and p[0] == name else [])
for p in fields]
# in import_compat mode, m2m should always be exported as
# a comma-separated list of xids or names in a single cell
if import_compatible and field.type == 'many2many':
index = None
# find out which subfield the user wants & its
# location as we might not get it as the first
# column we encounter
for name in ['id', 'name', 'display_name']:
with contextlib.suppress(ValueError):
index = fields2.index([name])
break
if index is None:
# not found anything, assume we just want the
# name_get in the first column
name = None
index = i
if name == 'id':