mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-21 10:32:09 +02:00
Initial commit: Core packages
This commit is contained in:
commit
12c29a983b
9512 changed files with 8379910 additions and 0 deletions
25
odoo-bringout-oca-ocb-base/odoo/modules/__init__.py
Normal file
25
odoo-bringout-oca-ocb-base/odoo/modules/__init__.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
""" Modules (also called addons) management.
|
||||
|
||||
"""
|
||||
|
||||
from . import db, graph, loading, migration, module, registry, neutralize
|
||||
|
||||
from odoo.modules.loading import load_modules, reset_modules_state
|
||||
|
||||
from odoo.modules.module import (
|
||||
adapt_version,
|
||||
check_manifest_dependencies,
|
||||
get_module_path,
|
||||
get_module_resource,
|
||||
get_modules,
|
||||
get_modules_with_version,
|
||||
get_resource_from_path,
|
||||
get_resource_path,
|
||||
check_resource_path,
|
||||
initialize_sys_path,
|
||||
get_manifest,
|
||||
load_openerp_module,
|
||||
)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
180
odoo-bringout-oca-ocb-base/odoo/modules/db.py
Normal file
180
odoo-bringout-oca-ocb-base/odoo/modules/db.py
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from psycopg2.extras import Json
|
||||
import logging
|
||||
from enum import IntEnum
|
||||
|
||||
import odoo.modules
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
def is_initialized(cr):
|
||||
""" Check if a database has been initialized for the ORM.
|
||||
|
||||
The database can be initialized with the 'initialize' function below.
|
||||
|
||||
"""
|
||||
return odoo.tools.table_exists(cr, 'ir_module_module')
|
||||
|
||||
def initialize(cr):
|
||||
""" Initialize a database with for the ORM.
|
||||
|
||||
This executes base/data/base_data.sql, creates the ir_module_categories
|
||||
(taken from each module descriptor file), and creates the ir_module_module
|
||||
and ir_model_data entries.
|
||||
|
||||
"""
|
||||
f = odoo.modules.get_module_resource('base', 'data', 'base_data.sql')
|
||||
if not f:
|
||||
m = "File not found: 'base.sql' (provided by module 'base')."
|
||||
_logger.critical(m)
|
||||
raise IOError(m)
|
||||
|
||||
with odoo.tools.misc.file_open(f) as base_sql_file:
|
||||
cr.execute(base_sql_file.read()) # pylint: disable=sql-injection
|
||||
|
||||
for i in odoo.modules.get_modules():
|
||||
mod_path = odoo.modules.get_module_path(i)
|
||||
if not mod_path:
|
||||
continue
|
||||
|
||||
# This will raise an exception if no/unreadable descriptor file.
|
||||
info = odoo.modules.get_manifest(i)
|
||||
|
||||
if not info:
|
||||
continue
|
||||
categories = info['category'].split('/')
|
||||
category_id = create_categories(cr, categories)
|
||||
|
||||
if info['installable']:
|
||||
state = 'uninstalled'
|
||||
else:
|
||||
state = 'uninstallable'
|
||||
|
||||
cr.execute('INSERT INTO ir_module_module \
|
||||
(author, website, name, shortdesc, description, \
|
||||
category_id, auto_install, state, web, license, application, icon, sequence, summary) \
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id', (
|
||||
info['author'],
|
||||
info['website'], i, Json({'en_US': info['name']}),
|
||||
Json({'en_US': info['description']}), category_id,
|
||||
info['auto_install'] is not False, state,
|
||||
info['web'],
|
||||
info['license'],
|
||||
info['application'], info['icon'],
|
||||
info['sequence'], Json({'en_US': info['summary']})))
|
||||
id = cr.fetchone()[0]
|
||||
cr.execute('INSERT INTO ir_model_data \
|
||||
(name,model,module, res_id, noupdate) VALUES (%s,%s,%s,%s,%s)', (
|
||||
'module_'+i, 'ir.module.module', 'base', id, True))
|
||||
dependencies = info['depends']
|
||||
for d in dependencies:
|
||||
cr.execute(
|
||||
'INSERT INTO ir_module_module_dependency (module_id, name, auto_install_required)'
|
||||
' VALUES (%s, %s, %s)',
|
||||
(id, d, d in (info['auto_install'] or ()))
|
||||
)
|
||||
|
||||
# Install recursively all auto-installing modules
|
||||
while True:
|
||||
# this selects all the auto_install modules whose auto_install_required
|
||||
# deps are marked as to install
|
||||
cr.execute("""
|
||||
SELECT m.name FROM ir_module_module m
|
||||
WHERE m.auto_install
|
||||
AND state != 'to install'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM ir_module_module_dependency d
|
||||
JOIN ir_module_module mdep ON (d.name = mdep.name)
|
||||
WHERE d.module_id = m.id
|
||||
AND d.auto_install_required
|
||||
AND mdep.state != 'to install'
|
||||
)""")
|
||||
to_auto_install = [x[0] for x in cr.fetchall()]
|
||||
# however if the module has non-required deps we need to install
|
||||
# those, so merge-in the modules which have a dependen*t* which is
|
||||
# *either* to_install or in to_auto_install and merge it in?
|
||||
cr.execute("""
|
||||
SELECT d.name FROM ir_module_module_dependency d
|
||||
JOIN ir_module_module m ON (d.module_id = m.id)
|
||||
JOIN ir_module_module mdep ON (d.name = mdep.name)
|
||||
WHERE (m.state = 'to install' OR m.name = any(%s))
|
||||
-- don't re-mark marked modules
|
||||
AND NOT (mdep.state = 'to install' OR mdep.name = any(%s))
|
||||
""", [to_auto_install, to_auto_install])
|
||||
to_auto_install.extend(x[0] for x in cr.fetchall())
|
||||
|
||||
if not to_auto_install: break
|
||||
cr.execute("""UPDATE ir_module_module SET state='to install' WHERE name in %s""", (tuple(to_auto_install),))
|
||||
|
||||
def create_categories(cr, categories):
|
||||
""" Create the ir_module_category entries for some categories.
|
||||
|
||||
categories is a list of strings forming a single category with its
|
||||
parent categories, like ['Grand Parent', 'Parent', 'Child'].
|
||||
|
||||
Return the database id of the (last) category.
|
||||
|
||||
"""
|
||||
p_id = None
|
||||
category = []
|
||||
while categories:
|
||||
category.append(categories[0])
|
||||
xml_id = 'module_category_' + ('_'.join(x.lower() for x in category)).replace('&', 'and').replace(' ', '_')
|
||||
# search via xml_id (because some categories are renamed)
|
||||
cr.execute("SELECT res_id FROM ir_model_data WHERE name=%s AND module=%s AND model=%s",
|
||||
(xml_id, "base", "ir.module.category"))
|
||||
|
||||
c_id = cr.fetchone()
|
||||
if not c_id:
|
||||
cr.execute('INSERT INTO ir_module_category \
|
||||
(name, parent_id) \
|
||||
VALUES (%s, %s) RETURNING id', (Json({'en_US': categories[0]}), p_id))
|
||||
c_id = cr.fetchone()[0]
|
||||
cr.execute('INSERT INTO ir_model_data (module, name, res_id, model, noupdate) \
|
||||
VALUES (%s, %s, %s, %s, %s)', ('base', xml_id, c_id, 'ir.module.category', True))
|
||||
else:
|
||||
c_id = c_id[0]
|
||||
p_id = c_id
|
||||
categories = categories[1:]
|
||||
return p_id
|
||||
|
||||
class FunctionStatus(IntEnum):
|
||||
MISSING = 0 # function is not present (falsy)
|
||||
PRESENT = 1 # function is present but not indexable (not immutable)
|
||||
INDEXABLE = 2 # function is present and indexable (immutable)
|
||||
|
||||
def has_unaccent(cr):
|
||||
""" Test whether the database has function 'unaccent' and return its status.
|
||||
|
||||
The unaccent is supposed to be provided by the PostgreSQL unaccent contrib
|
||||
module but any similar function will be picked by OpenERP.
|
||||
|
||||
:rtype: FunctionStatus
|
||||
"""
|
||||
cr.execute("""
|
||||
SELECT p.provolatile
|
||||
FROM pg_proc p
|
||||
LEFT JOIN pg_catalog.pg_namespace ns ON p.pronamespace = ns.oid
|
||||
WHERE p.proname = 'unaccent'
|
||||
AND p.pronargs = 1
|
||||
AND ns.nspname = 'public'
|
||||
""")
|
||||
result = cr.fetchone()
|
||||
if not result:
|
||||
return FunctionStatus.MISSING
|
||||
# The `provolatile` of unaccent allows to know whether the unaccent function
|
||||
# can be used to create index (it should be 'i' - means immutable), see
|
||||
# https://www.postgresql.org/docs/current/catalog-pg-proc.html.
|
||||
return FunctionStatus.INDEXABLE if result[0] == 'i' else FunctionStatus.PRESENT
|
||||
|
||||
def has_trigram(cr):
|
||||
""" Test if the database has the a word_similarity function.
|
||||
|
||||
The word_similarity is supposed to be provided by the PostgreSQL built-in
|
||||
pg_trgm module but any similar function will be picked by Odoo.
|
||||
|
||||
"""
|
||||
cr.execute("SELECT proname FROM pg_proc WHERE proname='word_similarity'")
|
||||
return len(cr.fetchall()) > 0
|
||||
189
odoo-bringout-oca-ocb-base/odoo/modules/graph.py
Normal file
189
odoo-bringout-oca-ocb-base/odoo/modules/graph.py
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
""" Modules dependency graph. """
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
import odoo
|
||||
import odoo.tools as tools
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
class Graph(dict):
|
||||
""" Modules dependency graph.
|
||||
|
||||
The graph is a mapping from module name to Nodes.
|
||||
|
||||
"""
|
||||
|
||||
def add_node(self, name, info):
|
||||
max_depth, father = 0, None
|
||||
for d in info['depends']:
|
||||
n = self.get(d) or Node(d, self, None) # lazy creation, do not use default value for get()
|
||||
if n.depth >= max_depth:
|
||||
father = n
|
||||
max_depth = n.depth
|
||||
if father:
|
||||
return father.add_child(name, info)
|
||||
else:
|
||||
return Node(name, self, info)
|
||||
|
||||
def update_from_db(self, cr):
|
||||
if not len(self):
|
||||
return
|
||||
# update the graph with values from the database (if exist)
|
||||
## First, we set the default values for each package in graph
|
||||
additional_data = {key: {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None} for key in self.keys()}
|
||||
## Then we get the values from the database
|
||||
cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version'
|
||||
' FROM ir_module_module'
|
||||
' WHERE name IN %s',(tuple(additional_data),)
|
||||
)
|
||||
|
||||
## and we update the default values with values from the database
|
||||
additional_data.update((x['name'], x) for x in cr.dictfetchall())
|
||||
|
||||
for package in self.values():
|
||||
for k, v in additional_data[package.name].items():
|
||||
setattr(package, k, v)
|
||||
|
||||
def add_module(self, cr, module, force=None):
|
||||
self.add_modules(cr, [module], force)
|
||||
|
||||
def add_modules(self, cr, module_list, force=None):
|
||||
if force is None:
|
||||
force = []
|
||||
packages = []
|
||||
len_graph = len(self)
|
||||
for module in module_list:
|
||||
info = odoo.modules.module.get_manifest(module)
|
||||
if info and info['installable']:
|
||||
packages.append((module, info)) # TODO directly a dict, like in get_modules_with_version
|
||||
elif module != 'studio_customization':
|
||||
_logger.warning('module %s: not installable, skipped', module)
|
||||
|
||||
dependencies = dict([(p, info['depends']) for p, info in packages])
|
||||
current, later = set([p for p, info in packages]), set()
|
||||
|
||||
while packages and current > later:
|
||||
package, info = packages[0]
|
||||
deps = info['depends']
|
||||
|
||||
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
|
||||
if all(dep in self for dep in deps):
|
||||
if not package in current:
|
||||
packages.pop(0)
|
||||
continue
|
||||
later.clear()
|
||||
current.remove(package)
|
||||
node = self.add_node(package, info)
|
||||
for kind in ('init', 'demo', 'update'):
|
||||
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
|
||||
setattr(node, kind, True)
|
||||
else:
|
||||
later.add(package)
|
||||
packages.append((package, info))
|
||||
packages.pop(0)
|
||||
|
||||
self.update_from_db(cr)
|
||||
|
||||
for package in later:
|
||||
unmet_deps = [p for p in dependencies[package] if p not in self]
|
||||
_logger.info('module %s: Unmet dependencies: %s', package, ', '.join(unmet_deps))
|
||||
|
||||
return len(self) - len_graph
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
level = 0
|
||||
done = set(self.keys())
|
||||
while done:
|
||||
level_modules = sorted((name, module) for name, module in self.items() if module.depth==level)
|
||||
for name, module in level_modules:
|
||||
done.remove(name)
|
||||
yield module
|
||||
level += 1
|
||||
|
||||
def __str__(self):
|
||||
return '\n'.join(str(n) for n in self if n.depth == 0)
|
||||
|
||||
class Node(object):
|
||||
""" One module in the modules dependency graph.
|
||||
|
||||
Node acts as a per-module singleton. A node is constructed via
|
||||
Graph.add_module() or Graph.add_modules(). Some of its fields are from
|
||||
ir_module_module (set by Graph.update_from_db()).
|
||||
|
||||
"""
|
||||
def __new__(cls, name, graph, info):
|
||||
if name in graph:
|
||||
inst = graph[name]
|
||||
else:
|
||||
inst = object.__new__(cls)
|
||||
graph[name] = inst
|
||||
return inst
|
||||
|
||||
def __init__(self, name, graph, info):
|
||||
self.name = name
|
||||
self.graph = graph
|
||||
self.info = info or getattr(self, 'info', {})
|
||||
if not hasattr(self, 'children'):
|
||||
self.children = []
|
||||
if not hasattr(self, 'depth'):
|
||||
self.depth = 0
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self.info
|
||||
|
||||
def add_child(self, name, info):
|
||||
node = Node(name, self.graph, info)
|
||||
node.depth = self.depth + 1
|
||||
if node not in self.children:
|
||||
self.children.append(node)
|
||||
for attr in ('init', 'update', 'demo'):
|
||||
if hasattr(self, attr):
|
||||
setattr(node, attr, True)
|
||||
self.children.sort(key=lambda x: x.name)
|
||||
return node
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
super(Node, self).__setattr__(name, value)
|
||||
if name in ('init', 'update', 'demo'):
|
||||
tools.config[name][self.name] = 1
|
||||
for child in self.children:
|
||||
setattr(child, name, value)
|
||||
if name == 'depth':
|
||||
for child in self.children:
|
||||
setattr(child, name, value + 1)
|
||||
|
||||
def __iter__(self):
|
||||
return itertools.chain(
|
||||
self.children,
|
||||
itertools.chain.from_iterable(self.children)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self._pprint()
|
||||
|
||||
def _pprint(self, depth=0):
|
||||
s = '%s\n' % self.name
|
||||
for c in self.children:
|
||||
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
|
||||
return s
|
||||
|
||||
def should_have_demo(self):
|
||||
return (hasattr(self, 'demo') or (self.dbdemo and self.state != 'installed')) and all(p.dbdemo for p in self.parents)
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
if self.depth == 0:
|
||||
return []
|
||||
|
||||
return (
|
||||
node for node in self.graph.values()
|
||||
if node.depth < self.depth
|
||||
if self in node.children
|
||||
)
|
||||
652
odoo-bringout-oca-ocb-base/odoo/modules/loading.py
Normal file
652
odoo-bringout-oca-ocb-base/odoo/modules/loading.py
Normal file
|
|
@ -0,0 +1,652 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
""" Modules (also called addons) management.
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import odoo
|
||||
import odoo.modules.db
|
||||
import odoo.modules.graph
|
||||
import odoo.modules.migration
|
||||
import odoo.modules.registry
|
||||
from .. import SUPERUSER_ID, api, tools
|
||||
from .module import adapt_version, initialize_sys_path, load_openerp_module
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_test_logger = logging.getLogger('odoo.tests')
|
||||
|
||||
|
||||
def load_data(cr, idref, mode, kind, package):
|
||||
"""
|
||||
|
||||
kind: data, demo, test, init_xml, update_xml, demo_xml.
|
||||
|
||||
noupdate is False, unless it is demo data or it is csv data in
|
||||
init mode.
|
||||
|
||||
:returns: Whether a file was loaded
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
def _get_files_of_kind(kind):
|
||||
if kind == 'demo':
|
||||
keys = ['demo_xml', 'demo']
|
||||
elif kind == 'data':
|
||||
keys = ['init_xml', 'update_xml', 'data']
|
||||
if isinstance(kind, str):
|
||||
keys = [kind]
|
||||
files = []
|
||||
for k in keys:
|
||||
for f in package.data[k]:
|
||||
if f in files:
|
||||
_logger.warning("File %s is imported twice in module %s %s", f, package.name, kind)
|
||||
files.append(f)
|
||||
if k.endswith('_xml') and not (k == 'init_xml' and not f.endswith('.xml')):
|
||||
# init_xml, update_xml and demo_xml are deprecated except
|
||||
# for the case of init_xml with csv and sql files as
|
||||
# we can't specify noupdate for those file.
|
||||
correct_key = 'demo' if k.count('demo') else 'data'
|
||||
_logger.warning(
|
||||
"module %s: key '%s' is deprecated in favor of '%s' for file '%s'.",
|
||||
package.name, k, correct_key, f
|
||||
)
|
||||
return files
|
||||
|
||||
filename = None
|
||||
try:
|
||||
if kind in ('demo', 'test'):
|
||||
threading.current_thread().testing = True
|
||||
for filename in _get_files_of_kind(kind):
|
||||
_logger.info("loading %s/%s", package.name, filename)
|
||||
noupdate = False
|
||||
if kind in ('demo', 'demo_xml') or (filename.endswith('.csv') and kind in ('init', 'init_xml')):
|
||||
noupdate = True
|
||||
tools.convert_file(cr, package.name, filename, idref, mode, noupdate, kind)
|
||||
finally:
|
||||
if kind in ('demo', 'test'):
|
||||
threading.current_thread().testing = False
|
||||
|
||||
return bool(filename)
|
||||
|
||||
def load_demo(cr, package, idref, mode):
|
||||
"""
|
||||
Loads demo data for the specified package.
|
||||
"""
|
||||
if not package.should_have_demo():
|
||||
return False
|
||||
|
||||
try:
|
||||
if package.data.get('demo') or package.data.get('demo_xml'):
|
||||
_logger.info("Module %s: loading demo", package.name)
|
||||
with cr.savepoint(flush=False):
|
||||
load_data(cr, idref, mode, kind='demo', package=package)
|
||||
return True
|
||||
except Exception: # noqa: BLE001
|
||||
# If we could not install demo data for this module
|
||||
_logger.warning(
|
||||
"Module %s demo data failed to install, installed without demo data",
|
||||
package.name, exc_info=True)
|
||||
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
todo = env.ref('base.demo_failure_todo', raise_if_not_found=False)
|
||||
Failure = env.get('ir.demo_failure')
|
||||
if todo and Failure is not None:
|
||||
todo.state = 'open'
|
||||
Failure.create({'module_id': package.id, 'error': traceback.format_exc()})
|
||||
return False
|
||||
|
||||
|
||||
def force_demo(cr):
|
||||
"""
|
||||
Forces the `demo` flag on all modules, and installs demo data for all installed modules.
|
||||
"""
|
||||
graph = odoo.modules.graph.Graph()
|
||||
cr.execute('UPDATE ir_module_module SET demo=True')
|
||||
cr.execute(
|
||||
"SELECT name FROM ir_module_module WHERE state IN ('installed', 'to upgrade', 'to remove')"
|
||||
)
|
||||
module_list = [name for (name,) in cr.fetchall()]
|
||||
graph.add_modules(cr, module_list, ['demo'])
|
||||
|
||||
for package in graph:
|
||||
load_demo(cr, package, {}, 'init')
|
||||
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
env['ir.module.module'].invalidate_model(['demo'])
|
||||
env['res.groups']._update_user_groups_view()
|
||||
|
||||
|
||||
def load_module_graph(cr, graph, status=None, perform_checks=True,
|
||||
skip_modules=None, report=None, models_to_check=None):
|
||||
"""Migrates+Updates or Installs all module nodes from ``graph``
|
||||
|
||||
:param cr:
|
||||
:param graph: graph of module nodes to load
|
||||
:param status: deprecated parameter, unused, left to avoid changing signature in 8.0
|
||||
:param perform_checks: whether module descriptors should be checked for validity (prints warnings
|
||||
for same cases)
|
||||
:param skip_modules: optional list of module names (packages) which have previously been loaded and can be skipped
|
||||
:param report:
|
||||
:param set models_to_check:
|
||||
:return: list of modules that were installed or updated
|
||||
"""
|
||||
if models_to_check is None:
|
||||
models_to_check = set()
|
||||
|
||||
processed_modules = []
|
||||
loaded_modules = []
|
||||
registry = odoo.registry(cr.dbname)
|
||||
migrations = odoo.modules.migration.MigrationManager(cr, graph)
|
||||
module_count = len(graph)
|
||||
_logger.info('loading %d modules...', module_count)
|
||||
|
||||
# register, instantiate and initialize models for each modules
|
||||
t0 = time.time()
|
||||
loading_extra_query_count = odoo.sql_db.sql_counter
|
||||
loading_cursor_query_count = cr.sql_log_count
|
||||
|
||||
models_updated = set()
|
||||
|
||||
for index, package in enumerate(graph, 1):
|
||||
module_name = package.name
|
||||
module_id = package.id
|
||||
|
||||
if skip_modules and module_name in skip_modules:
|
||||
continue
|
||||
|
||||
module_t0 = time.time()
|
||||
module_cursor_query_count = cr.sql_log_count
|
||||
module_extra_query_count = odoo.sql_db.sql_counter
|
||||
|
||||
needs_update = (
|
||||
hasattr(package, "init")
|
||||
or hasattr(package, "update")
|
||||
or package.state in ("to install", "to upgrade")
|
||||
)
|
||||
module_log_level = logging.DEBUG
|
||||
if needs_update:
|
||||
module_log_level = logging.INFO
|
||||
_logger.log(module_log_level, 'Loading module %s (%d/%d)', module_name, index, module_count)
|
||||
|
||||
new_install = package.state == 'to install'
|
||||
if needs_update:
|
||||
if not new_install:
|
||||
if package.name != 'base':
|
||||
registry.setup_models(cr)
|
||||
migrations.migrate_module(package, 'pre')
|
||||
if package.name != 'base':
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
env.flush_all()
|
||||
|
||||
load_openerp_module(package.name)
|
||||
|
||||
if new_install:
|
||||
py_module = sys.modules['odoo.addons.%s' % (module_name,)]
|
||||
pre_init = package.info.get('pre_init_hook')
|
||||
if pre_init:
|
||||
registry.setup_models(cr)
|
||||
getattr(py_module, pre_init)(cr)
|
||||
|
||||
model_names = registry.load(cr, package)
|
||||
|
||||
mode = 'update'
|
||||
if hasattr(package, 'init') or package.state == 'to install':
|
||||
mode = 'init'
|
||||
|
||||
loaded_modules.append(package.name)
|
||||
if needs_update:
|
||||
models_updated |= set(model_names)
|
||||
models_to_check -= set(model_names)
|
||||
registry.setup_models(cr)
|
||||
registry.init_models(cr, model_names, {'module': package.name}, new_install)
|
||||
elif package.state != 'to remove':
|
||||
# The current module has simply been loaded. The models extended by this module
|
||||
# and for which we updated the schema, must have their schema checked again.
|
||||
# This is because the extension may have changed the model,
|
||||
# e.g. adding required=True to an existing field, but the schema has not been
|
||||
# updated by this module because it's not marked as 'to upgrade/to install'.
|
||||
models_to_check |= set(model_names) & models_updated
|
||||
|
||||
idref = {}
|
||||
|
||||
if needs_update:
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
# Can't put this line out of the loop: ir.module.module will be
|
||||
# registered by init_models() above.
|
||||
module = env['ir.module.module'].browse(module_id)
|
||||
|
||||
if perform_checks:
|
||||
module._check()
|
||||
|
||||
if package.state == 'to upgrade':
|
||||
# upgrading the module information
|
||||
module.write(module.get_values_from_terp(package.data))
|
||||
load_data(cr, idref, mode, kind='data', package=package)
|
||||
demo_loaded = package.dbdemo = load_demo(cr, package, idref, mode)
|
||||
cr.execute('update ir_module_module set demo=%s where id=%s', (demo_loaded, module_id))
|
||||
module.invalidate_model(['demo'])
|
||||
|
||||
migrations.migrate_module(package, 'post')
|
||||
|
||||
# Update translations for all installed languages
|
||||
overwrite = odoo.tools.config["overwrite_existing_translations"]
|
||||
module._update_translations(overwrite=overwrite)
|
||||
|
||||
if package.name is not None:
|
||||
registry._init_modules.add(package.name)
|
||||
|
||||
if needs_update:
|
||||
if new_install:
|
||||
post_init = package.info.get('post_init_hook')
|
||||
if post_init:
|
||||
getattr(py_module, post_init)(cr, registry)
|
||||
|
||||
if mode == 'update':
|
||||
# validate the views that have not been checked yet
|
||||
env['ir.ui.view']._validate_module_views(module_name)
|
||||
|
||||
# need to commit any modification the module's installation or
|
||||
# update made to the schema or data so the tests can run
|
||||
# (separately in their own transaction)
|
||||
cr.commit()
|
||||
concrete_models = [model for model in model_names if not registry[model]._abstract]
|
||||
if concrete_models:
|
||||
cr.execute("""
|
||||
SELECT model FROM ir_model
|
||||
WHERE id NOT IN (SELECT DISTINCT model_id FROM ir_model_access) AND model IN %s
|
||||
""", [tuple(concrete_models)])
|
||||
models = [model for [model] in cr.fetchall()]
|
||||
if models:
|
||||
lines = [
|
||||
f"The models {models} have no access rules in module {module_name}, consider adding some, like:",
|
||||
"id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink"
|
||||
]
|
||||
for model in models:
|
||||
xmlid = model.replace('.', '_')
|
||||
lines.append(f"{module_name}.access_{xmlid},access_{xmlid},{module_name}.model_{xmlid},base.group_user,1,0,0,0")
|
||||
_logger.warning('\n'.join(lines))
|
||||
|
||||
updating = tools.config.options['init'] or tools.config.options['update']
|
||||
test_time = test_queries = 0
|
||||
test_results = None
|
||||
if tools.config.options['test_enable'] and (needs_update or not updating):
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
loader = odoo.tests.loader
|
||||
suite = loader.make_suite([module_name], 'at_install')
|
||||
if suite.countTestCases():
|
||||
if not needs_update:
|
||||
registry.setup_models(cr)
|
||||
# Python tests
|
||||
env['ir.http']._clear_routing_map() # force routing map to be rebuilt
|
||||
|
||||
tests_t0, tests_q0 = time.time(), odoo.sql_db.sql_counter
|
||||
test_results = loader.run_suite(suite, module_name, global_report=report)
|
||||
report.update(test_results)
|
||||
test_time = time.time() - tests_t0
|
||||
test_queries = odoo.sql_db.sql_counter - tests_q0
|
||||
|
||||
# tests may have reset the environment
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
module = env['ir.module.module'].browse(module_id)
|
||||
|
||||
if needs_update:
|
||||
processed_modules.append(package.name)
|
||||
|
||||
ver = adapt_version(package.data['version'])
|
||||
# Set new modules and dependencies
|
||||
module.write({'state': 'installed', 'latest_version': ver})
|
||||
|
||||
package.load_state = package.state
|
||||
package.load_version = package.installed_version
|
||||
package.state = 'installed'
|
||||
for kind in ('init', 'demo', 'update'):
|
||||
if hasattr(package, kind):
|
||||
delattr(package, kind)
|
||||
module.env.flush_all()
|
||||
|
||||
extra_queries = odoo.sql_db.sql_counter - module_extra_query_count - test_queries
|
||||
extras = []
|
||||
if test_queries:
|
||||
extras.append(f'+{test_queries} test')
|
||||
if extra_queries:
|
||||
extras.append(f'+{extra_queries} other')
|
||||
_logger.log(
|
||||
module_log_level, "Module %s loaded in %.2fs%s, %s queries%s",
|
||||
module_name, time.time() - module_t0,
|
||||
f' (incl. {test_time:.2f}s test)' if test_time else '',
|
||||
cr.sql_log_count - module_cursor_query_count,
|
||||
f' ({", ".join(extras)})' if extras else ''
|
||||
)
|
||||
if test_results and not test_results.wasSuccessful():
|
||||
_logger.error(
|
||||
"Module %s: %d failures, %d errors of %d tests",
|
||||
module_name, test_results.failures_count, test_results.errors_count,
|
||||
test_results.testsRun
|
||||
)
|
||||
|
||||
_logger.runbot("%s modules loaded in %.2fs, %s queries (+%s extra)",
|
||||
len(graph),
|
||||
time.time() - t0,
|
||||
cr.sql_log_count - loading_cursor_query_count,
|
||||
odoo.sql_db.sql_counter - loading_extra_query_count) # extra queries: testes, notify, any other closed cursor
|
||||
|
||||
return loaded_modules, processed_modules
|
||||
|
||||
def _check_module_names(cr, module_names):
|
||||
mod_names = set(module_names)
|
||||
if 'base' in mod_names:
|
||||
# ignore dummy 'all' module
|
||||
if 'all' in mod_names:
|
||||
mod_names.remove('all')
|
||||
if mod_names:
|
||||
cr.execute("SELECT count(id) AS count FROM ir_module_module WHERE name in %s", (tuple(mod_names),))
|
||||
if cr.dictfetchone()['count'] != len(mod_names):
|
||||
# find out what module name(s) are incorrect:
|
||||
cr.execute("SELECT name FROM ir_module_module")
|
||||
incorrect_names = mod_names.difference([x['name'] for x in cr.dictfetchall()])
|
||||
_logger.warning('invalid module names, ignored: %s', ", ".join(incorrect_names))
|
||||
|
||||
def load_marked_modules(cr, graph, states, force, progressdict, report,
|
||||
loaded_modules, perform_checks, models_to_check=None):
|
||||
"""Loads modules marked with ``states``, adding them to ``graph`` and
|
||||
``loaded_modules`` and returns a list of installed/upgraded modules."""
|
||||
|
||||
if models_to_check is None:
|
||||
models_to_check = set()
|
||||
|
||||
processed_modules = []
|
||||
while True:
|
||||
cr.execute("SELECT name from ir_module_module WHERE state IN %s" ,(tuple(states),))
|
||||
module_list = [name for (name,) in cr.fetchall() if name not in graph]
|
||||
if not module_list:
|
||||
break
|
||||
graph.add_modules(cr, module_list, force)
|
||||
_logger.debug('Updating graph with %d more modules', len(module_list))
|
||||
loaded, processed = load_module_graph(
|
||||
cr, graph, progressdict, report=report, skip_modules=loaded_modules,
|
||||
perform_checks=perform_checks, models_to_check=models_to_check
|
||||
)
|
||||
processed_modules.extend(processed)
|
||||
loaded_modules.extend(loaded)
|
||||
if not processed:
|
||||
break
|
||||
return processed_modules
|
||||
|
||||
def load_modules(registry, force_demo=False, status=None, update_module=False):
|
||||
""" Load the modules for a registry object that has just been created. This
|
||||
function is part of Registry.new() and should not be used anywhere else.
|
||||
"""
|
||||
initialize_sys_path()
|
||||
|
||||
force = []
|
||||
if force_demo:
|
||||
force.append('demo')
|
||||
|
||||
models_to_check = set()
|
||||
|
||||
with registry.cursor() as cr:
|
||||
# prevent endless wait for locks on schema changes (during online
|
||||
# installs) if a concurrent transaction has accessed the table;
|
||||
# connection settings are automatically reset when the connection is
|
||||
# borrowed from the pool
|
||||
cr.execute("SET SESSION lock_timeout = '15s'")
|
||||
if not odoo.modules.db.is_initialized(cr):
|
||||
if not update_module:
|
||||
_logger.error("Database %s not initialized, you can force it with `-i base`", cr.dbname)
|
||||
return
|
||||
_logger.info("init db")
|
||||
odoo.modules.db.initialize(cr)
|
||||
update_module = True # process auto-installed modules
|
||||
tools.config["init"]["all"] = 1
|
||||
if not tools.config['without_demo']:
|
||||
tools.config["demo"]['all'] = 1
|
||||
|
||||
if 'base' in tools.config['update'] or 'all' in tools.config['update']:
|
||||
cr.execute("update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed'))
|
||||
|
||||
# STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps)
|
||||
graph = odoo.modules.graph.Graph()
|
||||
graph.add_module(cr, 'base', force)
|
||||
if not graph:
|
||||
_logger.critical('module base cannot be loaded! (hint: verify addons-path)')
|
||||
raise ImportError('Module `base` cannot be loaded! (hint: verify addons-path)')
|
||||
if update_module and tools.config['update']:
|
||||
for pyfile in tools.config['pre_upgrade_scripts'].split(','):
|
||||
odoo.modules.migration.exec_script(cr, graph['base'].installed_version, pyfile, 'base', 'pre')
|
||||
|
||||
if update_module and odoo.tools.table_exists(cr, 'ir_model_fields'):
|
||||
# determine the fields which are currently translated in the database
|
||||
cr.execute("SELECT model || '.' || name FROM ir_model_fields WHERE translate IS TRUE")
|
||||
registry._database_translated_fields = {row[0] for row in cr.fetchall()}
|
||||
|
||||
# processed_modules: for cleanup step after install
|
||||
# loaded_modules: to avoid double loading
|
||||
report = registry._assertion_report
|
||||
loaded_modules, processed_modules = load_module_graph(
|
||||
cr, graph, status, perform_checks=update_module,
|
||||
report=report, models_to_check=models_to_check)
|
||||
|
||||
load_lang = tools.config.pop('load_language')
|
||||
if load_lang or update_module:
|
||||
# some base models are used below, so make sure they are set up
|
||||
registry.setup_models(cr)
|
||||
|
||||
if load_lang:
|
||||
for lang in load_lang.split(','):
|
||||
tools.load_language(cr, lang)
|
||||
|
||||
# STEP 2: Mark other modules to be loaded/updated
|
||||
if update_module:
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
Module = env['ir.module.module']
|
||||
_logger.info('updating modules list')
|
||||
Module.update_list()
|
||||
|
||||
_check_module_names(cr, itertools.chain(tools.config['init'], tools.config['update']))
|
||||
|
||||
module_names = [k for k, v in tools.config['init'].items() if v]
|
||||
if module_names:
|
||||
modules = Module.search([('state', '=', 'uninstalled'), ('name', 'in', module_names)])
|
||||
if modules:
|
||||
modules.button_install()
|
||||
|
||||
module_names = [k for k, v in tools.config['update'].items() if v]
|
||||
if module_names:
|
||||
modules = Module.search([('state', 'in', ('installed', 'to upgrade')), ('name', 'in', module_names)])
|
||||
if modules:
|
||||
modules.button_upgrade()
|
||||
|
||||
env.flush_all()
|
||||
cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base'))
|
||||
Module.invalidate_model(['state'])
|
||||
|
||||
# STEP 3: Load marked modules (skipping base which was done in STEP 1)
|
||||
# IMPORTANT: this is done in two parts, first loading all installed or
|
||||
# partially installed modules (i.e. installed/to upgrade), to
|
||||
# offer a consistent system to the second part: installing
|
||||
# newly selected modules.
|
||||
# We include the modules 'to remove' in the first step, because
|
||||
# they are part of the "currently installed" modules. They will
|
||||
# be dropped in STEP 6 later, before restarting the loading
|
||||
# process.
|
||||
# IMPORTANT 2: We have to loop here until all relevant modules have been
|
||||
# processed, because in some rare cases the dependencies have
|
||||
# changed, and modules that depend on an uninstalled module
|
||||
# will not be processed on the first pass.
|
||||
# It's especially useful for migrations.
|
||||
previously_processed = -1
|
||||
while previously_processed < len(processed_modules):
|
||||
previously_processed = len(processed_modules)
|
||||
processed_modules += load_marked_modules(cr, graph,
|
||||
['installed', 'to upgrade', 'to remove'],
|
||||
force, status, report, loaded_modules, update_module, models_to_check)
|
||||
if update_module:
|
||||
processed_modules += load_marked_modules(cr, graph,
|
||||
['to install'], force, status, report,
|
||||
loaded_modules, update_module, models_to_check)
|
||||
|
||||
if update_module:
|
||||
# set up the registry without the patch for translated fields
|
||||
database_translated_fields = registry._database_translated_fields
|
||||
registry._database_translated_fields = ()
|
||||
registry.setup_models(cr)
|
||||
# determine which translated fields should no longer be translated,
|
||||
# and make their model fix the database schema
|
||||
models_to_untranslate = set()
|
||||
for full_name in database_translated_fields:
|
||||
model_name, field_name = full_name.rsplit('.', 1)
|
||||
if model_name in registry:
|
||||
field = registry[model_name]._fields.get(field_name)
|
||||
if field and not field.translate:
|
||||
_logger.debug("Making field %s non-translated", field)
|
||||
models_to_untranslate.add(model_name)
|
||||
registry.init_models(cr, list(models_to_untranslate), {'models_to_check': True})
|
||||
|
||||
registry.loaded = True
|
||||
registry.setup_models(cr)
|
||||
|
||||
# check that all installed modules have been loaded by the registry
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
Module = env['ir.module.module']
|
||||
modules = Module.search(Module._get_modules_to_load_domain(), order='name')
|
||||
missing = [name for name in modules.mapped('name') if name not in graph]
|
||||
if missing:
|
||||
_logger.error("Some modules are not loaded, some dependencies or manifest may be missing: %s", missing)
|
||||
|
||||
# STEP 3.5: execute migration end-scripts
|
||||
migrations = odoo.modules.migration.MigrationManager(cr, graph)
|
||||
for package in graph:
|
||||
migrations.migrate_module(package, 'end')
|
||||
|
||||
# check that new module dependencies have been properly installed after a migration/upgrade
|
||||
cr.execute("SELECT name from ir_module_module WHERE state IN ('to install', 'to upgrade')")
|
||||
module_list = [name for (name,) in cr.fetchall()]
|
||||
if module_list:
|
||||
_logger.error("Some modules have inconsistent states, some dependencies may be missing: %s", sorted(module_list))
|
||||
|
||||
# STEP 3.6: apply remaining constraints in case of an upgrade
|
||||
registry.finalize_constraints()
|
||||
|
||||
# STEP 4: Finish and cleanup installations
|
||||
if processed_modules:
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
|
||||
cr.execute("SELECT model from ir_model")
|
||||
for (model,) in cr.fetchall():
|
||||
if model in registry:
|
||||
env[model]._check_removed_columns(log=True)
|
||||
elif _logger.isEnabledFor(logging.INFO): # more an info that a warning...
|
||||
_logger.runbot("Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model)
|
||||
|
||||
# Cleanup orphan records
|
||||
env['ir.model.data']._process_end(processed_modules)
|
||||
# Cleanup cron
|
||||
vacuum_cron = env.ref('base.autovacuum_job', raise_if_not_found=False)
|
||||
if vacuum_cron:
|
||||
# trigger after a small delay to give time for assets to regenerate
|
||||
vacuum_cron._trigger(at=datetime.datetime.now() + datetime.timedelta(minutes=1))
|
||||
|
||||
env.flush_all()
|
||||
|
||||
for kind in ('init', 'demo', 'update'):
|
||||
tools.config[kind] = {}
|
||||
|
||||
# STEP 5: Uninstall modules to remove
|
||||
if update_module:
|
||||
# Remove records referenced from ir_model_data for modules to be
|
||||
# removed (and removed the references from ir_model_data).
|
||||
cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove',))
|
||||
modules_to_remove = dict(cr.fetchall())
|
||||
if modules_to_remove:
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
pkgs = reversed([p for p in graph if p.name in modules_to_remove])
|
||||
for pkg in pkgs:
|
||||
uninstall_hook = pkg.info.get('uninstall_hook')
|
||||
if uninstall_hook:
|
||||
py_module = sys.modules['odoo.addons.%s' % (pkg.name,)]
|
||||
getattr(py_module, uninstall_hook)(cr, registry)
|
||||
env.flush_all()
|
||||
|
||||
Module = env['ir.module.module']
|
||||
Module.browse(modules_to_remove.values()).module_uninstall()
|
||||
# Recursive reload, should only happen once, because there should be no
|
||||
# modules to remove next time
|
||||
cr.commit()
|
||||
_logger.info('Reloading registry once more after uninstalling modules')
|
||||
registry = odoo.modules.registry.Registry.new(
|
||||
cr.dbname, force_demo, status, update_module
|
||||
)
|
||||
cr.reset()
|
||||
registry.check_tables_exist(cr)
|
||||
cr.commit()
|
||||
return registry
|
||||
|
||||
# STEP 5.5: Verify extended fields on every model
|
||||
# This will fix the schema of all models in a situation such as:
|
||||
# - module A is loaded and defines model M;
|
||||
# - module B is installed/upgraded and extends model M;
|
||||
# - module C is loaded and extends model M;
|
||||
# - module B and C depend on A but not on each other;
|
||||
# The changes introduced by module C are not taken into account by the upgrade of B.
|
||||
if models_to_check:
|
||||
registry.init_models(cr, list(models_to_check), {'models_to_check': True})
|
||||
|
||||
# STEP 6: verify custom views on every model
|
||||
if update_module:
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
env['res.groups']._update_user_groups_view()
|
||||
View = env['ir.ui.view']
|
||||
for model in registry:
|
||||
try:
|
||||
View._validate_custom_views(model)
|
||||
except Exception as e:
|
||||
_logger.warning('invalid custom view(s) for model %s: %s', model, tools.ustr(e))
|
||||
|
||||
if report.wasSuccessful():
|
||||
_logger.info('Modules loaded.')
|
||||
else:
|
||||
_logger.error('At least one test failed when loading the modules.')
|
||||
|
||||
|
||||
# STEP 8: save installed/updated modules for post-install tests and _register_hook
|
||||
registry.updated_modules += processed_modules
|
||||
|
||||
# STEP 9: call _register_hook on every model
|
||||
# This is done *exactly once* when the registry is being loaded. See the
|
||||
# management of those hooks in `Registry.setup_models`: all the calls to
|
||||
# setup_models() done here do not mess up with hooks, as registry.ready
|
||||
# is False.
|
||||
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||
for model in env.values():
|
||||
model._register_hook()
|
||||
env.flush_all()
|
||||
|
||||
|
||||
def reset_modules_state(db_name):
|
||||
"""
|
||||
Resets modules flagged as "to x" to their original state
|
||||
"""
|
||||
# Warning, this function was introduced in response to commit 763d714
|
||||
# which locks cron jobs for dbs which have modules marked as 'to %'.
|
||||
# The goal of this function is to be called ONLY when module
|
||||
# installation/upgrade/uninstallation fails, which is the only known case
|
||||
# for which modules can stay marked as 'to %' for an indefinite amount
|
||||
# of time
|
||||
db = odoo.sql_db.db_connect(db_name)
|
||||
with db.cursor() as cr:
|
||||
cr.execute(
|
||||
"UPDATE ir_module_module SET state='installed' WHERE state IN ('to remove', 'to upgrade')"
|
||||
)
|
||||
cr.execute(
|
||||
"UPDATE ir_module_module SET state='uninstalled' WHERE state='to install'"
|
||||
)
|
||||
_logger.warning("Transient module states were reset")
|
||||
191
odoo-bringout-oca-ocb-base/odoo/modules/migration.py
Normal file
191
odoo-bringout-oca-ocb-base/odoo/modules/migration.py
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
""" Modules migration handling. """
|
||||
|
||||
from collections import defaultdict
|
||||
import glob
|
||||
import importlib.util
|
||||
import logging
|
||||
import os
|
||||
from os.path import join as opj
|
||||
|
||||
from odoo.modules.module import get_resource_path
|
||||
import odoo.release as release
|
||||
import odoo.upgrade
|
||||
from odoo.tools.parse_version import parse_version
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_script(path, module_name):
|
||||
full_path = get_resource_path(*path.split(os.path.sep)) if not os.path.isabs(path) else path
|
||||
spec = importlib.util.spec_from_file_location(module_name, full_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
class MigrationManager(object):
|
||||
""" Manages the migration of modules.
|
||||
|
||||
Migrations files must be python files containing a ``migrate(cr, installed_version)``
|
||||
function. These files must respect a directory tree structure: A 'migrations' folder
|
||||
which contains a folder by version. Version can be 'module' version or 'server.module'
|
||||
version (in this case, the files will only be processed by this version of the server).
|
||||
Python file names must start by ``pre-`` or ``post-`` and will be executed, respectively,
|
||||
before and after the module initialisation. ``end-`` scripts are run after all modules have
|
||||
been updated.
|
||||
|
||||
A special folder named ``0.0.0`` can contain scripts that will be run on any version change.
|
||||
In `pre` stage, ``0.0.0`` scripts are run first, while in ``post`` and ``end``, they are run last.
|
||||
|
||||
Example::
|
||||
|
||||
<moduledir>
|
||||
`-- migrations
|
||||
|-- 1.0
|
||||
| |-- pre-update_table_x.py
|
||||
| |-- pre-update_table_y.py
|
||||
| |-- post-create_plop_records.py
|
||||
| |-- end-cleanup.py
|
||||
| `-- README.txt # not processed
|
||||
|-- 9.0.1.1 # processed only on a 9.0 server
|
||||
| |-- pre-delete_table_z.py
|
||||
| `-- post-clean-data.py
|
||||
|-- 0.0.0
|
||||
| `-- end-invariants.py # processed on all version update
|
||||
`-- foo.py # not processed
|
||||
"""
|
||||
|
||||
def __init__(self, cr, graph):
|
||||
self.cr = cr
|
||||
self.graph = graph
|
||||
self.migrations = defaultdict(dict)
|
||||
self._get_files()
|
||||
|
||||
def _get_files(self):
|
||||
def _get_upgrade_path(pkg):
|
||||
for path in odoo.upgrade.__path__:
|
||||
upgrade_path = opj(path, pkg)
|
||||
if os.path.exists(upgrade_path):
|
||||
yield upgrade_path
|
||||
|
||||
def get_scripts(path):
|
||||
if not path:
|
||||
return {}
|
||||
return {
|
||||
version: glob.glob(opj(path, version, '*.py'))
|
||||
for version in os.listdir(path)
|
||||
if os.path.isdir(opj(path, version))
|
||||
}
|
||||
|
||||
for pkg in self.graph:
|
||||
if not (hasattr(pkg, 'update') or pkg.state == 'to upgrade' or
|
||||
getattr(pkg, 'load_state', None) == 'to upgrade'):
|
||||
continue
|
||||
|
||||
self.migrations[pkg.name] = {
|
||||
'module': get_scripts(get_resource_path(pkg.name, 'migrations')),
|
||||
'module_upgrades': get_scripts(get_resource_path(pkg.name, 'upgrades')),
|
||||
}
|
||||
|
||||
scripts = defaultdict(list)
|
||||
for p in _get_upgrade_path(pkg.name):
|
||||
for v, s in get_scripts(p).items():
|
||||
scripts[v].extend(s)
|
||||
self.migrations[pkg.name]["upgrade"] = scripts
|
||||
|
||||
def migrate_module(self, pkg, stage):
|
||||
assert stage in ('pre', 'post', 'end')
|
||||
stageformat = {
|
||||
'pre': '[>%s]',
|
||||
'post': '[%s>]',
|
||||
'end': '[$%s]',
|
||||
}
|
||||
state = pkg.state if stage in ('pre', 'post') else getattr(pkg, 'load_state', None)
|
||||
|
||||
if not (hasattr(pkg, 'update') or state == 'to upgrade') or state == 'to install':
|
||||
return
|
||||
|
||||
def convert_version(version):
|
||||
if version.count('.') >= 2:
|
||||
return version # the version number already contains the server version
|
||||
return "%s.%s" % (release.major_version, version)
|
||||
|
||||
def _get_migration_versions(pkg, stage):
|
||||
versions = sorted({
|
||||
ver
|
||||
for lv in self.migrations[pkg.name].values()
|
||||
for ver, lf in lv.items()
|
||||
if lf
|
||||
}, key=lambda k: parse_version(convert_version(k)))
|
||||
if "0.0.0" in versions:
|
||||
# reorder versions
|
||||
versions.remove("0.0.0")
|
||||
if stage == "pre":
|
||||
versions.insert(0, "0.0.0")
|
||||
else:
|
||||
versions.append("0.0.0")
|
||||
return versions
|
||||
|
||||
def _get_migration_files(pkg, version, stage):
|
||||
""" return a list of migration script files
|
||||
"""
|
||||
m = self.migrations[pkg.name]
|
||||
|
||||
return sorted(
|
||||
(
|
||||
f
|
||||
for k in m
|
||||
for f in m[k].get(version, [])
|
||||
if os.path.basename(f).startswith(f"{stage}-")
|
||||
),
|
||||
key=os.path.basename,
|
||||
)
|
||||
|
||||
installed_version = getattr(pkg, 'load_version', pkg.installed_version) or ''
|
||||
parsed_installed_version = parse_version(installed_version)
|
||||
current_version = parse_version(convert_version(pkg.data['version']))
|
||||
|
||||
def compare(version):
|
||||
if version == "0.0.0" and parsed_installed_version < current_version:
|
||||
return True
|
||||
|
||||
full_version = convert_version(version)
|
||||
majorless_version = (version != full_version)
|
||||
|
||||
if majorless_version:
|
||||
# We should not re-execute major-less scripts when upgrading to new Odoo version
|
||||
# a module in `9.0.2.0` should not re-execute a `2.0` script when upgrading to `10.0.2.0`.
|
||||
# In which case we must compare just the module version
|
||||
return parsed_installed_version[2:] < parse_version(full_version)[2:] <= current_version[2:]
|
||||
|
||||
return parsed_installed_version < parse_version(full_version) <= current_version
|
||||
|
||||
versions = _get_migration_versions(pkg, stage)
|
||||
for version in versions:
|
||||
if compare(version):
|
||||
for pyfile in _get_migration_files(pkg, version, stage):
|
||||
exec_script(self.cr, installed_version, pyfile, pkg.name, stage, stageformat[stage] % version)
|
||||
|
||||
def exec_script(cr, installed_version, pyfile, addon, stage, version=None):
|
||||
version = version or installed_version
|
||||
name, ext = os.path.splitext(os.path.basename(pyfile))
|
||||
if ext.lower() != '.py':
|
||||
return
|
||||
mod = None
|
||||
try:
|
||||
mod = load_script(pyfile, name)
|
||||
_logger.info('module %(addon)s: Running migration %(version)s %(name)s' % dict(locals(), name=mod.__name__))
|
||||
migrate = mod.migrate
|
||||
except ImportError:
|
||||
_logger.exception('module %(addon)s: Unable to load %(stage)s-migration file %(file)s' % dict(locals(), file=pyfile))
|
||||
raise
|
||||
except AttributeError:
|
||||
_logger.error('module %(addon)s: Each %(stage)s-migration file must have a "migrate(cr, installed_version)" function' % locals())
|
||||
else:
|
||||
migrate(cr, installed_version)
|
||||
finally:
|
||||
if mod:
|
||||
del mod
|
||||
566
odoo-bringout-oca-ocb-base/odoo/modules/module.py
Normal file
566
odoo-bringout-oca-ocb-base/odoo/modules/module.py
Normal file
|
|
@ -0,0 +1,566 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import ast
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import pkg_resources
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from os.path import join as opj, normpath
|
||||
|
||||
import odoo
|
||||
import odoo.tools as tools
|
||||
import odoo.release as release
|
||||
from odoo.tools import pycompat
|
||||
from odoo.tools.misc import file_path
|
||||
|
||||
|
||||
MANIFEST_NAMES = ('__manifest__.py', '__openerp__.py')
|
||||
README = ['README.rst', 'README.md', 'README.txt']
|
||||
|
||||
_DEFAULT_MANIFEST = {
|
||||
#addons_path: f'/path/to/the/addons/path/of/{module}', # automatic
|
||||
'application': False,
|
||||
'bootstrap': False, # web
|
||||
'assets': {},
|
||||
'author': 'Odoo S.A.',
|
||||
'auto_install': False,
|
||||
'category': 'Uncategorized',
|
||||
'data': [],
|
||||
'demo': [],
|
||||
'demo_xml': [],
|
||||
'depends': [],
|
||||
'description': '',
|
||||
'external_dependencies': [],
|
||||
#icon: f'/{module}/static/description/icon.png', # automatic
|
||||
'init_xml': [],
|
||||
'installable': True,
|
||||
'images': [], # website
|
||||
'images_preview_theme': {}, # website themes
|
||||
#license, mandatory
|
||||
'live_test_url': '', # website themes
|
||||
#name, mandatory
|
||||
'post_init_hook': '',
|
||||
'post_load': '',
|
||||
'pre_init_hook': '',
|
||||
'sequence': 100,
|
||||
'snippet_lists': {}, # website themes
|
||||
'summary': '',
|
||||
'test': [],
|
||||
'update_xml': [],
|
||||
'uninstall_hook': '',
|
||||
'version': '1.0',
|
||||
'web': False,
|
||||
'website': '',
|
||||
}
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# addons path as a list
|
||||
# ad_paths is a deprecated alias, please use odoo.addons.__path__
|
||||
@tools.lazy
|
||||
def ad_paths():
|
||||
warnings.warn(
|
||||
'"odoo.modules.module.ad_paths" is a deprecated proxy to '
|
||||
'"odoo.addons.__path__".', DeprecationWarning, stacklevel=2)
|
||||
return odoo.addons.__path__
|
||||
|
||||
# Modules already loaded
|
||||
loaded = []
|
||||
|
||||
class AddonsHook(object):
|
||||
""" Makes modules accessible through openerp.addons.* """
|
||||
|
||||
def find_module(self, name, path=None):
|
||||
if name.startswith('openerp.addons.') and name.count('.') == 2:
|
||||
warnings.warn(
|
||||
'"openerp.addons" is a deprecated alias to "odoo.addons".',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return self
|
||||
|
||||
def find_spec(self, fullname, path=None, target=None):
|
||||
if fullname.startswith('openerp.addons.') and fullname.count('.') == 2:
|
||||
warnings.warn(
|
||||
'"openerp.addons" is a deprecated alias to "odoo.addons".',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return importlib.util.spec_from_loader(fullname, self)
|
||||
|
||||
def load_module(self, name):
|
||||
assert name not in sys.modules
|
||||
|
||||
odoo_name = re.sub(r'^openerp.addons.(\w+)$', r'odoo.addons.\g<1>', name)
|
||||
|
||||
odoo_module = sys.modules.get(odoo_name)
|
||||
if not odoo_module:
|
||||
odoo_module = importlib.import_module(odoo_name)
|
||||
|
||||
sys.modules[name] = odoo_module
|
||||
|
||||
return odoo_module
|
||||
|
||||
class OdooHook(object):
|
||||
""" Makes odoo package also available as openerp """
|
||||
|
||||
def find_module(self, name, path=None):
|
||||
# openerp.addons.<identifier> should already be matched by AddonsHook,
|
||||
# only framework and subdirectories of modules should match
|
||||
if re.match(r'^openerp\b', name):
|
||||
warnings.warn(
|
||||
'openerp is a deprecated alias to odoo.',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return self
|
||||
|
||||
def find_spec(self, fullname, path=None, target=None):
|
||||
# openerp.addons.<identifier> should already be matched by AddonsHook,
|
||||
# only framework and subdirectories of modules should match
|
||||
if re.match(r'^openerp\b', fullname):
|
||||
warnings.warn(
|
||||
'openerp is a deprecated alias to odoo.',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
return importlib.util.spec_from_loader(fullname, self)
|
||||
|
||||
def load_module(self, name):
|
||||
assert name not in sys.modules
|
||||
|
||||
canonical = re.sub(r'^openerp(.*)', r'odoo\g<1>', name)
|
||||
|
||||
if canonical in sys.modules:
|
||||
mod = sys.modules[canonical]
|
||||
else:
|
||||
# probable failure: canonical execution calling old naming -> corecursion
|
||||
mod = importlib.import_module(canonical)
|
||||
|
||||
# just set the original module at the new location. Don't proxy,
|
||||
# it breaks *-import (unless you can find how `from a import *` lists
|
||||
# what's supposed to be imported by `*`, and manage to override it)
|
||||
sys.modules[name] = mod
|
||||
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class UpgradeHook(object):
|
||||
"""Makes the legacy `migrations` package being `odoo.upgrade`"""
|
||||
|
||||
def find_module(self, name, path=None):
|
||||
if re.match(r"^odoo\.addons\.base\.maintenance\.migrations\b", name):
|
||||
# We can't trigger a DeprecationWarning in this case.
|
||||
# In order to be cross-versions, the multi-versions upgrade scripts (0.0.0 scripts),
|
||||
# the tests, and the common files (utility functions) still needs to import from the
|
||||
# legacy name.
|
||||
return self
|
||||
|
||||
def find_spec(self, fullname, path=None, target=None):
|
||||
if re.match(r"^odoo\.addons\.base\.maintenance\.migrations\b", fullname):
|
||||
# We can't trigger a DeprecationWarning in this case.
|
||||
# In order to be cross-versions, the multi-versions upgrade scripts (0.0.0 scripts),
|
||||
# the tests, and the common files (utility functions) still needs to import from the
|
||||
# legacy name.
|
||||
return importlib.util.spec_from_loader(fullname, self)
|
||||
|
||||
def load_module(self, name):
|
||||
assert name not in sys.modules
|
||||
|
||||
canonical_upgrade = name.replace("odoo.addons.base.maintenance.migrations", "odoo.upgrade")
|
||||
|
||||
if canonical_upgrade in sys.modules:
|
||||
mod = sys.modules[canonical_upgrade]
|
||||
else:
|
||||
mod = importlib.import_module(canonical_upgrade)
|
||||
|
||||
sys.modules[name] = mod
|
||||
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
def initialize_sys_path():
|
||||
"""
|
||||
Setup the addons path ``odoo.addons.__path__`` with various defaults
|
||||
and explicit directories.
|
||||
"""
|
||||
# hook odoo.addons on data dir
|
||||
dd = os.path.normcase(tools.config.addons_data_dir)
|
||||
if os.access(dd, os.R_OK) and dd not in odoo.addons.__path__:
|
||||
odoo.addons.__path__.append(dd)
|
||||
|
||||
# hook odoo.addons on addons paths
|
||||
for ad in tools.config['addons_path'].split(','):
|
||||
ad = os.path.normcase(os.path.abspath(tools.ustr(ad.strip())))
|
||||
if ad not in odoo.addons.__path__:
|
||||
odoo.addons.__path__.append(ad)
|
||||
|
||||
# hook odoo.addons on base module path
|
||||
base_path = os.path.normcase(os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'addons')))
|
||||
if base_path not in odoo.addons.__path__ and os.path.isdir(base_path):
|
||||
odoo.addons.__path__.append(base_path)
|
||||
|
||||
# hook odoo.upgrade on upgrade-path
|
||||
from odoo import upgrade
|
||||
legacy_upgrade_path = os.path.join(base_path, 'base', 'maintenance', 'migrations')
|
||||
for up in (tools.config['upgrade_path'] or legacy_upgrade_path).split(','):
|
||||
up = os.path.normcase(os.path.abspath(tools.ustr(up.strip())))
|
||||
if os.path.isdir(up) and up not in upgrade.__path__:
|
||||
upgrade.__path__.append(up)
|
||||
|
||||
# create decrecated module alias from odoo.addons.base.maintenance.migrations to odoo.upgrade
|
||||
spec = importlib.machinery.ModuleSpec("odoo.addons.base.maintenance", None, is_package=True)
|
||||
maintenance_pkg = importlib.util.module_from_spec(spec)
|
||||
maintenance_pkg.migrations = upgrade
|
||||
sys.modules["odoo.addons.base.maintenance"] = maintenance_pkg
|
||||
sys.modules["odoo.addons.base.maintenance.migrations"] = upgrade
|
||||
|
||||
# hook deprecated module alias from openerp to odoo and "crm"-like to odoo.addons
|
||||
if not getattr(initialize_sys_path, 'called', False): # only initialize once
|
||||
sys.meta_path.insert(0, UpgradeHook())
|
||||
sys.meta_path.insert(0, OdooHook())
|
||||
sys.meta_path.insert(0, AddonsHook())
|
||||
initialize_sys_path.called = True
|
||||
|
||||
|
||||
def get_module_path(module, downloaded=False, display_warning=True):
|
||||
"""Return the path of the given module.
|
||||
|
||||
Search the addons paths and return the first path where the given
|
||||
module is found. If downloaded is True, return the default addons
|
||||
path if nothing else is found.
|
||||
|
||||
"""
|
||||
if re.search(r"[\/\\]", module):
|
||||
return False
|
||||
for adp in odoo.addons.__path__:
|
||||
files = [opj(adp, module, manifest) for manifest in MANIFEST_NAMES] +\
|
||||
[opj(adp, module + '.zip')]
|
||||
if any(os.path.exists(f) for f in files):
|
||||
return opj(adp, module)
|
||||
|
||||
if downloaded:
|
||||
return opj(tools.config.addons_data_dir, module)
|
||||
if display_warning:
|
||||
_logger.warning('module %s: module not found', module)
|
||||
return False
|
||||
|
||||
def get_module_filetree(module, dir='.'):
|
||||
warnings.warn(
|
||||
"Since 16.0: use os.walk or a recursive glob or something",
|
||||
DeprecationWarning,
|
||||
stacklevel=2
|
||||
)
|
||||
path = get_module_path(module)
|
||||
if not path:
|
||||
return False
|
||||
|
||||
dir = os.path.normpath(dir)
|
||||
if dir == '.':
|
||||
dir = ''
|
||||
if dir.startswith('..') or (dir and dir[0] == '/'):
|
||||
raise Exception('Cannot access file outside the module')
|
||||
|
||||
files = odoo.tools.osutil.listdir(path, True)
|
||||
|
||||
tree = {}
|
||||
for f in files:
|
||||
if not f.startswith(dir):
|
||||
continue
|
||||
|
||||
if dir:
|
||||
f = f[len(dir)+int(not dir.endswith('/')):]
|
||||
lst = f.split(os.sep)
|
||||
current = tree
|
||||
while len(lst) != 1:
|
||||
current = current.setdefault(lst.pop(0), {})
|
||||
current[lst.pop(0)] = None
|
||||
|
||||
return tree
|
||||
|
||||
def get_resource_path(module, *args):
|
||||
"""Return the full path of a resource of the given module.
|
||||
|
||||
:param module: module name
|
||||
:param list(str) args: resource path components within module
|
||||
|
||||
:rtype: str
|
||||
:return: absolute path to the resource
|
||||
"""
|
||||
resource_path = opj(module, *args)
|
||||
try:
|
||||
return file_path(resource_path)
|
||||
except (FileNotFoundError, ValueError):
|
||||
return False
|
||||
|
||||
def check_resource_path(mod_path, *args):
|
||||
resource_path = opj(mod_path, *args)
|
||||
try:
|
||||
return file_path(resource_path)
|
||||
except (FileNotFoundError, ValueError):
|
||||
return False
|
||||
|
||||
# backwards compatibility
|
||||
get_module_resource = get_resource_path
|
||||
|
||||
def get_resource_from_path(path):
|
||||
"""Tries to extract the module name and the resource's relative path
|
||||
out of an absolute resource path.
|
||||
|
||||
If operation is successful, returns a tuple containing the module name, the relative path
|
||||
to the resource using '/' as filesystem seperator[1] and the same relative path using
|
||||
os.path.sep seperators.
|
||||
|
||||
[1] same convention as the resource path declaration in manifests
|
||||
|
||||
:param path: absolute resource path
|
||||
|
||||
:rtype: tuple
|
||||
:return: tuple(module_name, relative_path, os_relative_path) if possible, else None
|
||||
"""
|
||||
resource = False
|
||||
sorted_paths = sorted(odoo.addons.__path__, key=len, reverse=True)
|
||||
for adpath in sorted_paths:
|
||||
# force trailing separator
|
||||
adpath = os.path.join(adpath, "")
|
||||
if os.path.commonprefix([adpath, path]) == adpath:
|
||||
resource = path.replace(adpath, "", 1)
|
||||
break
|
||||
|
||||
if resource:
|
||||
relative = resource.split(os.path.sep)
|
||||
if not relative[0]:
|
||||
relative.pop(0)
|
||||
module = relative.pop(0)
|
||||
return (module, '/'.join(relative), os.path.sep.join(relative))
|
||||
return None
|
||||
|
||||
def get_module_icon(module):
|
||||
iconpath = ['static', 'description', 'icon.png']
|
||||
if get_module_resource(module, *iconpath):
|
||||
return ('/' + module + '/') + '/'.join(iconpath)
|
||||
return '/base/' + '/'.join(iconpath)
|
||||
|
||||
def get_module_icon_path(module):
|
||||
iconpath = ['static', 'description', 'icon.png']
|
||||
path = get_module_resource(module.name, *iconpath)
|
||||
if not path:
|
||||
path = get_module_resource('base', *iconpath)
|
||||
return path
|
||||
|
||||
def module_manifest(path):
|
||||
"""Returns path to module manifest if one can be found under `path`, else `None`."""
|
||||
if not path:
|
||||
return None
|
||||
for manifest_name in MANIFEST_NAMES:
|
||||
if os.path.isfile(opj(path, manifest_name)):
|
||||
return opj(path, manifest_name)
|
||||
|
||||
def get_module_root(path):
|
||||
"""
|
||||
Get closest module's root beginning from path
|
||||
|
||||
# Given:
|
||||
# /foo/bar/module_dir/static/src/...
|
||||
|
||||
get_module_root('/foo/bar/module_dir/static/')
|
||||
# returns '/foo/bar/module_dir'
|
||||
|
||||
get_module_root('/foo/bar/module_dir/')
|
||||
# returns '/foo/bar/module_dir'
|
||||
|
||||
get_module_root('/foo/bar')
|
||||
# returns None
|
||||
|
||||
@param path: Path from which the lookup should start
|
||||
|
||||
@return: Module root path or None if not found
|
||||
"""
|
||||
while not module_manifest(path):
|
||||
new_path = os.path.abspath(opj(path, os.pardir))
|
||||
if path == new_path:
|
||||
return None
|
||||
path = new_path
|
||||
return path
|
||||
|
||||
def load_manifest(module, mod_path=None):
|
||||
""" Load the module manifest from the file system. """
|
||||
|
||||
if not mod_path:
|
||||
mod_path = get_module_path(module, downloaded=True)
|
||||
manifest_file = module_manifest(mod_path)
|
||||
|
||||
if not manifest_file:
|
||||
_logger.debug('module %s: no manifest file found %s', module, MANIFEST_NAMES)
|
||||
return {}
|
||||
|
||||
manifest = copy.deepcopy(_DEFAULT_MANIFEST)
|
||||
manifest['icon'] = get_module_icon(module)
|
||||
|
||||
with tools.file_open(manifest_file, mode='r') as f:
|
||||
manifest.update(ast.literal_eval(f.read()))
|
||||
|
||||
if not manifest['description']:
|
||||
readme_path = [opj(mod_path, x) for x in README
|
||||
if os.path.isfile(opj(mod_path, x))]
|
||||
if readme_path:
|
||||
with tools.file_open(readme_path[0]) as fd:
|
||||
manifest['description'] = fd.read()
|
||||
|
||||
if not manifest.get('license'):
|
||||
manifest['license'] = 'LGPL-3'
|
||||
_logger.warning("Missing `license` key in manifest for %r, defaulting to LGPL-3", module)
|
||||
|
||||
# auto_install is either `False` (by default) in which case the module
|
||||
# is opt-in, either a list of dependencies in which case the module is
|
||||
# automatically installed if all dependencies are (special case: [] to
|
||||
# always install the module), either `True` to auto-install the module
|
||||
# in case all dependencies declared in `depends` are installed.
|
||||
if isinstance(manifest['auto_install'], collections.abc.Iterable):
|
||||
manifest['auto_install'] = set(manifest['auto_install'])
|
||||
non_dependencies = manifest['auto_install'].difference(manifest['depends'])
|
||||
assert not non_dependencies,\
|
||||
"auto_install triggers must be dependencies, found " \
|
||||
"non-dependencies [%s] for module %s" % (
|
||||
', '.join(non_dependencies), module
|
||||
)
|
||||
elif manifest['auto_install']:
|
||||
manifest['auto_install'] = set(manifest['depends'])
|
||||
|
||||
manifest['version'] = adapt_version(manifest['version'])
|
||||
manifest['addons_path'] = normpath(opj(mod_path, os.pardir))
|
||||
|
||||
return manifest
|
||||
|
||||
def get_manifest(module, mod_path=None):
|
||||
"""
|
||||
Get the module manifest.
|
||||
|
||||
:param str module: The name of the module (sale, purchase, ...).
|
||||
:param Optional[str] mod_path: The optional path to the module on
|
||||
the file-system. If not set, it is determined by scanning the
|
||||
addons-paths.
|
||||
:returns: The module manifest as a dict or an empty dict
|
||||
when the manifest was not found.
|
||||
:rtype: dict
|
||||
"""
|
||||
return copy.deepcopy(_get_manifest_cached(module, mod_path))
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _get_manifest_cached(module, mod_path=None):
|
||||
return load_manifest(module, mod_path)
|
||||
|
||||
def load_information_from_description_file(module, mod_path=None):
|
||||
warnings.warn(
|
||||
'load_information_from_description_file() is a deprecated '
|
||||
'alias to get_manifest()', DeprecationWarning, stacklevel=2)
|
||||
return get_manifest(module, mod_path)
|
||||
|
||||
def load_openerp_module(module_name):
|
||||
""" Load an OpenERP module, if not already loaded.
|
||||
|
||||
This loads the module and register all of its models, thanks to either
|
||||
the MetaModel metaclass, or the explicit instantiation of the model.
|
||||
This is also used to load server-wide module (i.e. it is also used
|
||||
when there is no model to register).
|
||||
"""
|
||||
global loaded
|
||||
if module_name in loaded:
|
||||
return
|
||||
|
||||
try:
|
||||
__import__('odoo.addons.' + module_name)
|
||||
|
||||
# Call the module's post-load hook. This can done before any model or
|
||||
# data has been initialized. This is ok as the post-load hook is for
|
||||
# server-wide (instead of registry-specific) functionalities.
|
||||
info = get_manifest(module_name)
|
||||
if info['post_load']:
|
||||
getattr(sys.modules['odoo.addons.' + module_name], info['post_load'])()
|
||||
|
||||
except Exception as e:
|
||||
msg = "Couldn't load module %s" % (module_name)
|
||||
_logger.critical(msg)
|
||||
_logger.critical(e)
|
||||
raise
|
||||
else:
|
||||
loaded.append(module_name)
|
||||
|
||||
def get_modules():
|
||||
"""Returns the list of module names
|
||||
"""
|
||||
def listdir(dir):
|
||||
def clean(name):
|
||||
name = os.path.basename(name)
|
||||
if name[-4:] == '.zip':
|
||||
name = name[:-4]
|
||||
return name
|
||||
|
||||
def is_really_module(name):
|
||||
for mname in MANIFEST_NAMES:
|
||||
if os.path.isfile(opj(dir, name, mname)):
|
||||
return True
|
||||
return [
|
||||
clean(it)
|
||||
for it in os.listdir(dir)
|
||||
if is_really_module(it)
|
||||
]
|
||||
|
||||
plist = []
|
||||
for ad in odoo.addons.__path__:
|
||||
if not os.path.exists(ad):
|
||||
_logger.warning("addons path does not exist: %s", ad)
|
||||
continue
|
||||
plist.extend(listdir(ad))
|
||||
return sorted(set(plist))
|
||||
|
||||
def get_modules_with_version():
|
||||
modules = get_modules()
|
||||
res = dict.fromkeys(modules, adapt_version('1.0'))
|
||||
for module in modules:
|
||||
try:
|
||||
info = get_manifest(module)
|
||||
res[module] = info['version']
|
||||
except Exception:
|
||||
continue
|
||||
return res
|
||||
|
||||
def adapt_version(version):
|
||||
serie = release.major_version
|
||||
if version == serie or not version.startswith(serie + '.'):
|
||||
version = '%s.%s' % (serie, version)
|
||||
return version
|
||||
|
||||
current_test = False
|
||||
|
||||
|
||||
def check_python_external_dependency(pydep):
|
||||
try:
|
||||
pkg_resources.get_distribution(pydep)
|
||||
except pkg_resources.DistributionNotFound as e:
|
||||
try:
|
||||
importlib.import_module(pydep)
|
||||
_logger.info("python external dependency on '%s' does not appear to be a valid PyPI package. Using a PyPI package name is recommended.", pydep)
|
||||
except ImportError:
|
||||
# backward compatibility attempt failed
|
||||
_logger.warning("DistributionNotFound: %s", e)
|
||||
raise Exception('Python library not installed: %s' % (pydep,))
|
||||
except pkg_resources.VersionConflict as e:
|
||||
_logger.warning("VersionConflict: %s", e)
|
||||
raise Exception('Python library version conflict: %s' % (pydep,))
|
||||
except Exception as e:
|
||||
_logger.warning("get_distribution(%s) failed: %s", pydep, e)
|
||||
raise Exception('Error finding python library %s' % (pydep,))
|
||||
|
||||
|
||||
def check_manifest_dependencies(manifest):
|
||||
depends = manifest.get('external_dependencies')
|
||||
if not depends:
|
||||
return
|
||||
for pydep in depends.get('python', []):
|
||||
check_python_external_dependency(pydep)
|
||||
|
||||
for binary in depends.get('bin', []):
|
||||
try:
|
||||
tools.find_in_path(binary)
|
||||
except IOError:
|
||||
raise Exception('Unable to find %r in path' % (binary,))
|
||||
30
odoo-bringout-oca-ocb-base/odoo/modules/neutralize.py
Normal file
30
odoo-bringout-oca-ocb-base/odoo/modules/neutralize.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import odoo
|
||||
import logging
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
def get_installed_modules(cursor):
|
||||
cursor.execute('''
|
||||
SELECT name
|
||||
FROM ir_module_module
|
||||
WHERE state IN ('installed', 'to upgrade', 'to remove');
|
||||
''')
|
||||
return [result[0] for result in cursor.fetchall()]
|
||||
|
||||
def get_neutralization_queries(modules):
|
||||
# neutralization for each module
|
||||
for module in modules:
|
||||
filename = odoo.modules.get_module_resource(module, 'data/neutralize.sql')
|
||||
if filename:
|
||||
with odoo.tools.misc.file_open(filename) as file:
|
||||
yield file.read().strip()
|
||||
|
||||
def neutralize_database(cursor):
|
||||
installed_modules = get_installed_modules(cursor)
|
||||
queries = get_neutralization_queries(installed_modules)
|
||||
for query in queries:
|
||||
cursor.execute(query)
|
||||
_logger.info("Neutralization finished")
|
||||
931
odoo-bringout-oca-ocb-base/odoo/modules/registry.py
Normal file
931
odoo-bringout-oca-ocb-base/odoo/modules/registry.py
Normal file
|
|
@ -0,0 +1,931 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
""" Models registries.
|
||||
|
||||
"""
|
||||
from collections import defaultdict, deque
|
||||
from collections.abc import Mapping
|
||||
from contextlib import closing, contextmanager
|
||||
from functools import partial
|
||||
from operator import attrgetter
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import warnings
|
||||
|
||||
import psycopg2
|
||||
|
||||
import odoo
|
||||
from odoo.modules.db import FunctionStatus
|
||||
from odoo.osv.expression import get_unaccent_wrapper
|
||||
from .. import SUPERUSER_ID
|
||||
from odoo.sql_db import TestCursor
|
||||
from odoo.tools import (config, existing_tables, lazy_classproperty,
|
||||
lazy_property, sql, Collector, OrderedSet)
|
||||
from odoo.tools.func import locked
|
||||
from odoo.tools.lru import LRU
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_schema = logging.getLogger('odoo.schema')
|
||||
|
||||
|
||||
class Registry(Mapping):
|
||||
""" Model registry for a particular database.
|
||||
|
||||
The registry is essentially a mapping between model names and model classes.
|
||||
There is one registry instance per database.
|
||||
|
||||
"""
|
||||
_lock = threading.RLock()
|
||||
_saved_lock = None
|
||||
|
||||
@lazy_classproperty
|
||||
def registries(cls):
|
||||
""" A mapping from database names to registries. """
|
||||
size = config.get('registry_lru_size', None)
|
||||
if not size:
|
||||
# Size the LRU depending of the memory limits
|
||||
if os.name != 'posix':
|
||||
# cannot specify the memory limit soft on windows...
|
||||
size = 42
|
||||
else:
|
||||
# A registry takes 10MB of memory on average, so we reserve
|
||||
# 10Mb (registry) + 5Mb (working memory) per registry
|
||||
avgsz = 15 * 1024 * 1024
|
||||
size = int(config['limit_memory_soft'] / avgsz)
|
||||
return LRU(size)
|
||||
|
||||
def __new__(cls, db_name):
|
||||
""" Return the registry for the given database name."""
|
||||
with cls._lock:
|
||||
try:
|
||||
return cls.registries[db_name]
|
||||
except KeyError:
|
||||
return cls.new(db_name)
|
||||
|
||||
@classmethod
|
||||
@locked
|
||||
def new(cls, db_name, force_demo=False, status=None, update_module=False):
|
||||
""" Create and return a new registry for the given database name. """
|
||||
t0 = time.time()
|
||||
registry = object.__new__(cls)
|
||||
registry.init(db_name)
|
||||
registry.new = registry.init = registry.registries = None
|
||||
|
||||
# Initializing a registry will call general code which will in
|
||||
# turn call Registry() to obtain the registry being initialized.
|
||||
# Make it available in the registries dictionary then remove it
|
||||
# if an exception is raised.
|
||||
cls.delete(db_name)
|
||||
cls.registries[db_name] = registry # pylint: disable=unsupported-assignment-operation
|
||||
try:
|
||||
registry.setup_signaling()
|
||||
# This should be a method on Registry
|
||||
try:
|
||||
odoo.modules.load_modules(registry, force_demo, status, update_module)
|
||||
except Exception:
|
||||
odoo.modules.reset_modules_state(db_name)
|
||||
raise
|
||||
except Exception:
|
||||
_logger.exception('Failed to load registry')
|
||||
del cls.registries[db_name] # pylint: disable=unsupported-delete-operation
|
||||
raise
|
||||
|
||||
# load_modules() above can replace the registry by calling
|
||||
# indirectly new() again (when modules have to be uninstalled).
|
||||
# Yeah, crazy.
|
||||
registry = cls.registries[db_name] # pylint: disable=unsubscriptable-object
|
||||
|
||||
registry._init = False
|
||||
registry.ready = True
|
||||
registry.registry_invalidated = bool(update_module)
|
||||
|
||||
_logger.info("Registry loaded in %.3fs", time.time() - t0)
|
||||
return registry
|
||||
|
||||
def init(self, db_name):
|
||||
self.models = {} # model name/model instance mapping
|
||||
self._sql_constraints = set()
|
||||
self._init = True
|
||||
self._database_translated_fields = () # names of translated fields in database
|
||||
self._assertion_report = odoo.tests.result.OdooTestResult()
|
||||
self._fields_by_model = None
|
||||
self._ordinary_tables = None
|
||||
self._constraint_queue = deque()
|
||||
self.__cache = LRU(8192)
|
||||
|
||||
# modules fully loaded (maintained during init phase by `loading` module)
|
||||
self._init_modules = set()
|
||||
self.updated_modules = [] # installed/updated modules
|
||||
self.loaded_xmlids = set()
|
||||
|
||||
self.db_name = db_name
|
||||
self._db = odoo.sql_db.db_connect(db_name)
|
||||
|
||||
# cursor for test mode; None means "normal" mode
|
||||
self.test_cr = None
|
||||
self.test_lock = None
|
||||
|
||||
# Indicates that the registry is
|
||||
self.loaded = False # whether all modules are loaded
|
||||
self.ready = False # whether everything is set up
|
||||
|
||||
# field dependencies
|
||||
self.field_depends = Collector()
|
||||
self.field_depends_context = Collector()
|
||||
self.field_inverses = Collector()
|
||||
|
||||
# cache of methods get_field_trigger_tree() and is_modifying_relations()
|
||||
self._field_trigger_trees = {}
|
||||
self._is_modifying_relations = {}
|
||||
|
||||
# Inter-process signaling:
|
||||
# The `base_registry_signaling` sequence indicates the whole registry
|
||||
# must be reloaded.
|
||||
# The `base_cache_signaling sequence` indicates all caches must be
|
||||
# invalidated (i.e. cleared).
|
||||
self.registry_sequence = None
|
||||
self.cache_sequence = None
|
||||
|
||||
# Flags indicating invalidation of the registry or the cache.
|
||||
self._invalidation_flags = threading.local()
|
||||
|
||||
with closing(self.cursor()) as cr:
|
||||
self.has_unaccent = odoo.modules.db.has_unaccent(cr)
|
||||
self.has_trigram = odoo.modules.db.has_trigram(cr)
|
||||
|
||||
@classmethod
|
||||
@locked
|
||||
def delete(cls, db_name):
|
||||
""" Delete the registry linked to a given database. """
|
||||
if db_name in cls.registries: # pylint: disable=unsupported-membership-test
|
||||
del cls.registries[db_name] # pylint: disable=unsupported-delete-operation
|
||||
|
||||
@classmethod
|
||||
@locked
|
||||
def delete_all(cls):
|
||||
""" Delete all the registries. """
|
||||
cls.registries.clear()
|
||||
|
||||
#
|
||||
# Mapping abstract methods implementation
|
||||
# => mixin provides methods keys, items, values, get, __eq__, and __ne__
|
||||
#
|
||||
def __len__(self):
|
||||
""" Return the size of the registry. """
|
||||
return len(self.models)
|
||||
|
||||
def __iter__(self):
|
||||
""" Return an iterator over all model names. """
|
||||
return iter(self.models)
|
||||
|
||||
def __getitem__(self, model_name):
|
||||
""" Return the model with the given name or raise KeyError if it doesn't exist."""
|
||||
return self.models[model_name]
|
||||
|
||||
def __call__(self, model_name):
|
||||
""" Same as ``self[model_name]``. """
|
||||
return self.models[model_name]
|
||||
|
||||
def __setitem__(self, model_name, model):
|
||||
""" Add or replace a model in the registry."""
|
||||
self.models[model_name] = model
|
||||
|
||||
def __delitem__(self, model_name):
|
||||
""" Remove a (custom) model from the registry. """
|
||||
del self.models[model_name]
|
||||
# the custom model can inherit from mixins ('mail.thread', ...)
|
||||
for Model in self.models.values():
|
||||
Model._inherit_children.discard(model_name)
|
||||
|
||||
def descendants(self, model_names, *kinds):
|
||||
""" Return the models corresponding to ``model_names`` and all those
|
||||
that inherit/inherits from them.
|
||||
"""
|
||||
assert all(kind in ('_inherit', '_inherits') for kind in kinds)
|
||||
funcs = [attrgetter(kind + '_children') for kind in kinds]
|
||||
|
||||
models = OrderedSet()
|
||||
queue = deque(model_names)
|
||||
while queue:
|
||||
model = self[queue.popleft()]
|
||||
models.add(model._name)
|
||||
for func in funcs:
|
||||
queue.extend(func(model))
|
||||
return models
|
||||
|
||||
def load(self, cr, module):
|
||||
""" Load a given module in the registry, and return the names of the
|
||||
modified models.
|
||||
|
||||
At the Python level, the modules are already loaded, but not yet on a
|
||||
per-registry level. This method populates a registry with the given
|
||||
modules, i.e. it instantiates all the classes of a the given module
|
||||
and registers them in the registry.
|
||||
|
||||
"""
|
||||
from .. import models
|
||||
|
||||
# clear cache to ensure consistency, but do not signal it
|
||||
self.__cache.clear()
|
||||
|
||||
lazy_property.reset_all(self)
|
||||
self._field_trigger_trees.clear()
|
||||
self._is_modifying_relations.clear()
|
||||
|
||||
# Instantiate registered classes (via the MetaModel automatic discovery
|
||||
# or via explicit constructor call), and add them to the pool.
|
||||
model_names = []
|
||||
for cls in models.MetaModel.module_to_models.get(module.name, []):
|
||||
# models register themselves in self.models
|
||||
model = cls._build_model(self, cr)
|
||||
model_names.append(model._name)
|
||||
|
||||
return self.descendants(model_names, '_inherit', '_inherits')
|
||||
|
||||
@locked
|
||||
def setup_models(self, cr):
|
||||
""" Complete the setup of models.
|
||||
This must be called after loading modules and before using the ORM.
|
||||
"""
|
||||
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
|
||||
env.invalidate_all()
|
||||
|
||||
# Uninstall registry hooks. Because of the condition, this only happens
|
||||
# on a fully loaded registry, and not on a registry being loaded.
|
||||
if self.ready:
|
||||
for model in env.values():
|
||||
model._unregister_hook()
|
||||
|
||||
# clear cache to ensure consistency, but do not signal it
|
||||
self.__cache.clear()
|
||||
|
||||
lazy_property.reset_all(self)
|
||||
self._field_trigger_trees.clear()
|
||||
self._is_modifying_relations.clear()
|
||||
self.registry_invalidated = True
|
||||
|
||||
# we must setup ir.model before adding manual fields because _add_manual_models may
|
||||
# depend on behavior that is implemented through overrides, such as is_mail_thread which
|
||||
# is implemented through an override to env['ir.model']._instanciate
|
||||
env['ir.model']._prepare_setup()
|
||||
|
||||
# add manual models
|
||||
if self._init_modules:
|
||||
env['ir.model']._add_manual_models()
|
||||
|
||||
# prepare the setup on all models
|
||||
models = list(env.values())
|
||||
for model in models:
|
||||
model._prepare_setup()
|
||||
|
||||
self.field_depends.clear()
|
||||
self.field_depends_context.clear()
|
||||
self.field_inverses.clear()
|
||||
|
||||
# do the actual setup
|
||||
for model in models:
|
||||
model._setup_base()
|
||||
|
||||
self._m2m = defaultdict(list)
|
||||
for model in models:
|
||||
model._setup_fields()
|
||||
del self._m2m
|
||||
|
||||
for model in models:
|
||||
model._setup_complete()
|
||||
|
||||
# determine field_depends and field_depends_context
|
||||
for model in models:
|
||||
for field in model._fields.values():
|
||||
depends, depends_context = field.get_depends(model)
|
||||
self.field_depends[field] = tuple(depends)
|
||||
self.field_depends_context[field] = tuple(depends_context)
|
||||
|
||||
# clean the lazy_property again in case they are cached by another ongoing registry readonly request
|
||||
lazy_property.reset_all(self)
|
||||
|
||||
# Reinstall registry hooks. Because of the condition, this only happens
|
||||
# on a fully loaded registry, and not on a registry being loaded.
|
||||
if self.ready:
|
||||
for model in env.values():
|
||||
model._register_hook()
|
||||
env.flush_all()
|
||||
|
||||
@lazy_property
|
||||
def field_computed(self):
|
||||
""" Return a dict mapping each field to the fields computed by the same method. """
|
||||
computed = {}
|
||||
for model_name, Model in self.models.items():
|
||||
groups = defaultdict(list)
|
||||
for field in Model._fields.values():
|
||||
if field.compute:
|
||||
computed[field] = group = groups[field.compute]
|
||||
group.append(field)
|
||||
for fields in groups.values():
|
||||
if len({field.compute_sudo for field in fields}) > 1:
|
||||
_logger.warning("%s: inconsistent 'compute_sudo' for computed fields: %s",
|
||||
model_name, ", ".join(field.name for field in fields))
|
||||
if len({field.precompute for field in fields}) > 1:
|
||||
_logger.warning("%s: inconsistent 'precompute' for computed fields: %s",
|
||||
model_name, ", ".join(field.name for field in fields))
|
||||
return computed
|
||||
|
||||
def get_trigger_tree(self, fields: list, select=bool) -> "TriggerTree":
|
||||
""" Return the trigger tree to traverse when ``fields`` have been modified.
|
||||
The function ``select`` is called on every field to determine which fields
|
||||
should be kept in the tree nodes. This enables to discard some unnecessary
|
||||
fields from the tree nodes.
|
||||
"""
|
||||
trees = [
|
||||
self.get_field_trigger_tree(field)
|
||||
for field in fields
|
||||
if field in self._field_triggers
|
||||
]
|
||||
return TriggerTree.merge(trees, select)
|
||||
|
||||
def get_dependent_fields(self, field):
|
||||
""" Return an iterable on the fields that depend on ``field``. """
|
||||
if field not in self._field_triggers:
|
||||
return ()
|
||||
|
||||
return (
|
||||
dependent
|
||||
for tree in self.get_field_trigger_tree(field).depth_first()
|
||||
for dependent in tree.root
|
||||
)
|
||||
|
||||
def _discard_fields(self, fields: list):
|
||||
""" Discard the given fields from the registry's internal data structures. """
|
||||
for f in fields:
|
||||
# tests usually don't reload the registry, so when they create
|
||||
# custom fields those may not have the entire dependency setup, and
|
||||
# may be missing from these maps
|
||||
self.field_depends.pop(f, None)
|
||||
|
||||
# discard fields from field triggers
|
||||
self.__dict__.pop('_field_triggers', None)
|
||||
self._field_trigger_trees.clear()
|
||||
self._is_modifying_relations.clear()
|
||||
|
||||
# discard fields from field inverses
|
||||
self.field_inverses.discard_keys_and_values(fields)
|
||||
|
||||
def get_field_trigger_tree(self, field) -> "TriggerTree":
|
||||
""" Return the trigger tree of a field by computing it from the transitive
|
||||
closure of field triggers.
|
||||
"""
|
||||
try:
|
||||
return self._field_trigger_trees[field]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
triggers = self._field_triggers
|
||||
|
||||
if field not in triggers:
|
||||
return TriggerTree()
|
||||
|
||||
def transitive_triggers(field, prefix=(), seen=()):
|
||||
if field in seen or field not in triggers:
|
||||
return
|
||||
for path, targets in triggers[field].items():
|
||||
full_path = concat(prefix, path)
|
||||
yield full_path, targets
|
||||
for target in targets:
|
||||
yield from transitive_triggers(target, full_path, seen + (field,))
|
||||
|
||||
def concat(seq1, seq2):
|
||||
if seq1 and seq2:
|
||||
f1, f2 = seq1[-1], seq2[0]
|
||||
if (
|
||||
f1.type == 'many2one' and f2.type == 'one2many'
|
||||
and f1.name == f2.inverse_name
|
||||
and f1.model_name == f2.comodel_name
|
||||
and f1.comodel_name == f2.model_name
|
||||
):
|
||||
return concat(seq1[:-1], seq2[1:])
|
||||
return seq1 + seq2
|
||||
|
||||
tree = TriggerTree()
|
||||
for path, targets in transitive_triggers(field):
|
||||
current = tree
|
||||
for label in path:
|
||||
current = current.increase(label)
|
||||
if current.root:
|
||||
current.root.update(targets)
|
||||
else:
|
||||
current.root = OrderedSet(targets)
|
||||
|
||||
self._field_trigger_trees[field] = tree
|
||||
|
||||
return tree
|
||||
|
||||
@lazy_property
|
||||
def _field_triggers(self):
|
||||
""" Return the field triggers, i.e., the inverse of field dependencies,
|
||||
as a dictionary like ``{field: {path: fields}}``, where ``field`` is a
|
||||
dependency, ``path`` is a sequence of fields to inverse and ``fields``
|
||||
is a collection of fields that depend on ``field``.
|
||||
"""
|
||||
triggers = defaultdict(lambda: defaultdict(OrderedSet))
|
||||
|
||||
for Model in self.models.values():
|
||||
if Model._abstract:
|
||||
continue
|
||||
for field in Model._fields.values():
|
||||
try:
|
||||
dependencies = list(field.resolve_depends(self))
|
||||
except Exception:
|
||||
# dependencies of custom fields may not exist; ignore that case
|
||||
if not field.base_field.manual:
|
||||
raise
|
||||
else:
|
||||
for dependency in dependencies:
|
||||
*path, dep_field = dependency
|
||||
triggers[dep_field][tuple(reversed(path))].add(field)
|
||||
|
||||
return triggers
|
||||
|
||||
def is_modifying_relations(self, field):
|
||||
""" Return whether ``field`` has dependent fields on some records, and
|
||||
that modifying ``field`` might change the dependent records.
|
||||
"""
|
||||
try:
|
||||
return self._is_modifying_relations[field]
|
||||
except KeyError:
|
||||
result = field in self._field_triggers and (
|
||||
field.relational or self.field_inverses[field] or any(
|
||||
dep.relational or self.field_inverses[dep]
|
||||
for dep in self.get_dependent_fields(field)
|
||||
)
|
||||
)
|
||||
self._is_modifying_relations[field] = result
|
||||
return result
|
||||
|
||||
def post_init(self, func, *args, **kwargs):
|
||||
""" Register a function to call at the end of :meth:`~.init_models`. """
|
||||
self._post_init_queue.append(partial(func, *args, **kwargs))
|
||||
|
||||
def post_constraint(self, func, *args, **kwargs):
|
||||
""" Call the given function, and delay it if it fails during an upgrade. """
|
||||
try:
|
||||
if (func, args, kwargs) not in self._constraint_queue:
|
||||
# Module A may try to apply a constraint and fail but another module B inheriting
|
||||
# from Module A may try to reapply the same constraint and succeed, however the
|
||||
# constraint would already be in the _constraint_queue and would be executed again
|
||||
# at the end of the registry cycle, this would fail (already-existing constraint)
|
||||
# and generate an error, therefore a constraint should only be applied if it's
|
||||
# not already marked as "to be applied".
|
||||
func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
if self._is_install:
|
||||
_schema.error(*e.args)
|
||||
else:
|
||||
_schema.info(*e.args)
|
||||
self._constraint_queue.append((func, args, kwargs))
|
||||
|
||||
def finalize_constraints(self):
|
||||
""" Call the delayed functions from above. """
|
||||
while self._constraint_queue:
|
||||
func, args, kwargs = self._constraint_queue.popleft()
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
# warn only, this is not a deployment showstopper, and
|
||||
# can sometimes be a transient error
|
||||
_schema.warning(*e.args)
|
||||
|
||||
def init_models(self, cr, model_names, context, install=True):
|
||||
""" Initialize a list of models (given by their name). Call methods
|
||||
``_auto_init`` and ``init`` on each model to create or update the
|
||||
database tables supporting the models.
|
||||
|
||||
The ``context`` may contain the following items:
|
||||
- ``module``: the name of the module being installed/updated, if any;
|
||||
- ``update_custom_fields``: whether custom fields should be updated.
|
||||
"""
|
||||
if not model_names:
|
||||
return
|
||||
|
||||
if 'module' in context:
|
||||
_logger.info('module %s: creating or updating database tables', context['module'])
|
||||
elif context.get('models_to_check', False):
|
||||
_logger.info("verifying fields for every extended model")
|
||||
|
||||
env = odoo.api.Environment(cr, SUPERUSER_ID, context)
|
||||
models = [env[model_name] for model_name in model_names]
|
||||
|
||||
try:
|
||||
self._post_init_queue = deque()
|
||||
self._foreign_keys = {}
|
||||
self._is_install = install
|
||||
|
||||
for model in models:
|
||||
model._auto_init()
|
||||
model.init()
|
||||
|
||||
env['ir.model']._reflect_models(model_names)
|
||||
env['ir.model.fields']._reflect_fields(model_names)
|
||||
env['ir.model.fields.selection']._reflect_selections(model_names)
|
||||
env['ir.model.constraint']._reflect_constraints(model_names)
|
||||
|
||||
self._ordinary_tables = None
|
||||
|
||||
while self._post_init_queue:
|
||||
func = self._post_init_queue.popleft()
|
||||
func()
|
||||
|
||||
self.check_indexes(cr, model_names)
|
||||
self.check_foreign_keys(cr)
|
||||
|
||||
env.flush_all()
|
||||
|
||||
# make sure all tables are present
|
||||
self.check_tables_exist(cr)
|
||||
|
||||
finally:
|
||||
del self._post_init_queue
|
||||
del self._foreign_keys
|
||||
del self._is_install
|
||||
|
||||
def check_indexes(self, cr, model_names):
|
||||
""" Create or drop column indexes for the given models. """
|
||||
expected = [
|
||||
(f"{Model._table}_{field.name}_index", Model._table, field, getattr(field, 'unaccent', False))
|
||||
for model_name in model_names
|
||||
for Model in [self.models[model_name]]
|
||||
if Model._auto and not Model._abstract
|
||||
for field in Model._fields.values()
|
||||
if field.column_type and field.store
|
||||
]
|
||||
if not expected:
|
||||
return
|
||||
|
||||
# retrieve existing indexes with their corresponding table
|
||||
cr.execute("SELECT indexname, tablename FROM pg_indexes WHERE indexname IN %s",
|
||||
[tuple(row[0] for row in expected)])
|
||||
existing = dict(cr.fetchall())
|
||||
|
||||
for indexname, tablename, field, unaccent in expected:
|
||||
column_expression = f'"{field.name}"'
|
||||
index = field.index
|
||||
assert index in ('btree', 'btree_not_null', 'trigram', True, False, None)
|
||||
if index and indexname not in existing and \
|
||||
((not field.translate and index != 'trigram') or (index == 'trigram' and self.has_trigram)):
|
||||
|
||||
if index == 'trigram':
|
||||
if field.translate:
|
||||
column_expression = f'''(jsonb_path_query_array({column_expression}, '$.*')::text)'''
|
||||
# add `unaccent` to the trigram index only because the
|
||||
# trigram indexes are mainly used for (i/=)like search and
|
||||
# unaccent is added only in these cases when searching
|
||||
if unaccent and self.has_unaccent:
|
||||
if self.has_unaccent == FunctionStatus.INDEXABLE:
|
||||
column_expression = get_unaccent_wrapper(cr)(column_expression)
|
||||
else:
|
||||
warnings.warn(
|
||||
"PostgreSQL function 'unaccent' is present but not immutable, "
|
||||
"therefore trigram indexes may not be effective.",
|
||||
)
|
||||
expression = f'{column_expression} gin_trgm_ops'
|
||||
method = 'gin'
|
||||
where = ''
|
||||
else: # index in ['btree', 'btree_not_null', True]
|
||||
expression = f'{column_expression}'
|
||||
method = 'btree'
|
||||
where = f'{column_expression} IS NOT NULL' if index == 'btree_not_null' else ''
|
||||
try:
|
||||
with cr.savepoint(flush=False):
|
||||
sql.create_index(cr, indexname, tablename, [expression], method, where)
|
||||
except psycopg2.OperationalError:
|
||||
_schema.error("Unable to add index for %s", self)
|
||||
|
||||
elif not index and tablename == existing.get(indexname):
|
||||
_schema.info("Keep unexpected index %s on table %s", indexname, tablename)
|
||||
|
||||
def add_foreign_key(self, table1, column1, table2, column2, ondelete,
|
||||
model, module, force=True):
|
||||
""" Specify an expected foreign key. """
|
||||
key = (table1, column1)
|
||||
val = (table2, column2, ondelete, model, module)
|
||||
if force:
|
||||
self._foreign_keys[key] = val
|
||||
else:
|
||||
self._foreign_keys.setdefault(key, val)
|
||||
|
||||
def check_foreign_keys(self, cr):
|
||||
""" Create or update the expected foreign keys. """
|
||||
if not self._foreign_keys:
|
||||
return
|
||||
|
||||
# determine existing foreign keys on the tables
|
||||
query = """
|
||||
SELECT fk.conname, c1.relname, a1.attname, c2.relname, a2.attname, fk.confdeltype
|
||||
FROM pg_constraint AS fk
|
||||
JOIN pg_class AS c1 ON fk.conrelid = c1.oid
|
||||
JOIN pg_class AS c2 ON fk.confrelid = c2.oid
|
||||
JOIN pg_attribute AS a1 ON a1.attrelid = c1.oid AND fk.conkey[1] = a1.attnum
|
||||
JOIN pg_attribute AS a2 ON a2.attrelid = c2.oid AND fk.confkey[1] = a2.attnum
|
||||
WHERE fk.contype = 'f' AND c1.relname IN %s
|
||||
"""
|
||||
cr.execute(query, [tuple({table for table, column in self._foreign_keys})])
|
||||
existing = {
|
||||
(table1, column1): (name, table2, column2, deltype)
|
||||
for name, table1, column1, table2, column2, deltype in cr.fetchall()
|
||||
}
|
||||
|
||||
# create or update foreign keys
|
||||
for key, val in self._foreign_keys.items():
|
||||
table1, column1 = key
|
||||
table2, column2, ondelete, model, module = val
|
||||
deltype = sql._CONFDELTYPES[ondelete.upper()]
|
||||
spec = existing.get(key)
|
||||
if spec is None:
|
||||
sql.add_foreign_key(cr, table1, column1, table2, column2, ondelete)
|
||||
conname = sql.get_foreign_keys(cr, table1, column1, table2, column2, ondelete)[0]
|
||||
model.env['ir.model.constraint']._reflect_constraint(model, conname, 'f', None, module)
|
||||
elif (spec[1], spec[2], spec[3]) != (table2, column2, deltype):
|
||||
sql.drop_constraint(cr, table1, spec[0])
|
||||
sql.add_foreign_key(cr, table1, column1, table2, column2, ondelete)
|
||||
conname = sql.get_foreign_keys(cr, table1, column1, table2, column2, ondelete)[0]
|
||||
model.env['ir.model.constraint']._reflect_constraint(model, conname, 'f', None, module)
|
||||
|
||||
def check_tables_exist(self, cr):
|
||||
"""
|
||||
Verify that all tables are present and try to initialize those that are missing.
|
||||
"""
|
||||
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
|
||||
table2model = {
|
||||
model._table: name
|
||||
for name, model in env.registry.items()
|
||||
if not model._abstract and model._table_query is None
|
||||
}
|
||||
missing_tables = set(table2model).difference(existing_tables(cr, table2model))
|
||||
|
||||
if missing_tables:
|
||||
missing = {table2model[table] for table in missing_tables}
|
||||
_logger.info("Models have no table: %s.", ", ".join(missing))
|
||||
# recreate missing tables
|
||||
for name in missing:
|
||||
_logger.info("Recreate table of model %s.", name)
|
||||
env[name].init()
|
||||
env.flush_all()
|
||||
# check again, and log errors if tables are still missing
|
||||
missing_tables = set(table2model).difference(existing_tables(cr, table2model))
|
||||
for table in missing_tables:
|
||||
_logger.error("Model %s has no table.", table2model[table])
|
||||
|
||||
def _clear_cache(self):
|
||||
""" Clear the cache and mark it as invalidated. """
|
||||
self.__cache.clear()
|
||||
self.cache_invalidated = True
|
||||
|
||||
def clear_caches(self):
|
||||
""" Clear the caches associated to methods decorated with
|
||||
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
|
||||
"""
|
||||
for model in self.models.values():
|
||||
model.clear_caches()
|
||||
|
||||
def is_an_ordinary_table(self, model):
|
||||
""" Return whether the given model has an ordinary table. """
|
||||
if self._ordinary_tables is None:
|
||||
cr = model.env.cr
|
||||
query = """
|
||||
SELECT c.relname
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace n ON (n.oid = c.relnamespace)
|
||||
WHERE c.relname IN %s
|
||||
AND c.relkind = 'r'
|
||||
AND n.nspname = 'public'
|
||||
"""
|
||||
tables = tuple(m._table for m in self.models.values())
|
||||
cr.execute(query, [tables])
|
||||
self._ordinary_tables = {row[0] for row in cr.fetchall()}
|
||||
|
||||
return model._table in self._ordinary_tables
|
||||
|
||||
@property
|
||||
def registry_invalidated(self):
|
||||
""" Determine whether the current thread has modified the registry. """
|
||||
return getattr(self._invalidation_flags, 'registry', False)
|
||||
|
||||
@registry_invalidated.setter
|
||||
def registry_invalidated(self, value):
|
||||
self._invalidation_flags.registry = value
|
||||
|
||||
@property
|
||||
def cache_invalidated(self):
|
||||
""" Determine whether the current thread has modified the cache. """
|
||||
return getattr(self._invalidation_flags, 'cache', False)
|
||||
|
||||
@cache_invalidated.setter
|
||||
def cache_invalidated(self, value):
|
||||
self._invalidation_flags.cache = value
|
||||
|
||||
def setup_signaling(self):
|
||||
""" Setup the inter-process signaling on this registry. """
|
||||
if self.in_test_mode():
|
||||
return
|
||||
|
||||
with self.cursor() as cr:
|
||||
# The `base_registry_signaling` sequence indicates when the registry
|
||||
# must be reloaded.
|
||||
# The `base_cache_signaling` sequence indicates when all caches must
|
||||
# be invalidated (i.e. cleared).
|
||||
cr.execute("SELECT sequence_name FROM information_schema.sequences WHERE sequence_name='base_registry_signaling'")
|
||||
if not cr.fetchall():
|
||||
cr.execute("CREATE SEQUENCE base_registry_signaling INCREMENT BY 1 START WITH 1")
|
||||
cr.execute("SELECT nextval('base_registry_signaling')")
|
||||
cr.execute("CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1")
|
||||
cr.execute("SELECT nextval('base_cache_signaling')")
|
||||
|
||||
cr.execute(""" SELECT base_registry_signaling.last_value,
|
||||
base_cache_signaling.last_value
|
||||
FROM base_registry_signaling, base_cache_signaling""")
|
||||
self.registry_sequence, self.cache_sequence = cr.fetchone()
|
||||
_logger.debug("Multiprocess load registry signaling: [Registry: %s] [Cache: %s]",
|
||||
self.registry_sequence, self.cache_sequence)
|
||||
|
||||
def check_signaling(self):
|
||||
""" Check whether the registry has changed, and performs all necessary
|
||||
operations to update the registry. Return an up-to-date registry.
|
||||
"""
|
||||
if self.in_test_mode():
|
||||
return self
|
||||
|
||||
with closing(self.cursor()) as cr:
|
||||
cr.execute(""" SELECT base_registry_signaling.last_value,
|
||||
base_cache_signaling.last_value
|
||||
FROM base_registry_signaling, base_cache_signaling""")
|
||||
r, c = cr.fetchone()
|
||||
_logger.debug("Multiprocess signaling check: [Registry - %s -> %s] [Cache - %s -> %s]",
|
||||
self.registry_sequence, r, self.cache_sequence, c)
|
||||
# Check if the model registry must be reloaded
|
||||
if self.registry_sequence != r:
|
||||
_logger.info("Reloading the model registry after database signaling.")
|
||||
self = Registry.new(self.db_name)
|
||||
# Check if the model caches must be invalidated.
|
||||
elif self.cache_sequence != c:
|
||||
_logger.info("Invalidating all model caches after database signaling.")
|
||||
self.clear_caches()
|
||||
|
||||
# prevent re-signaling the clear_caches() above, or any residual one that
|
||||
# would be inherited from the master process (first request in pre-fork mode)
|
||||
self.cache_invalidated = False
|
||||
|
||||
self.registry_sequence = r
|
||||
self.cache_sequence = c
|
||||
|
||||
return self
|
||||
|
||||
def signal_changes(self):
|
||||
""" Notifies other processes if registry or cache has been invalidated. """
|
||||
if self.registry_invalidated and not self.in_test_mode():
|
||||
_logger.info("Registry changed, signaling through the database")
|
||||
with closing(self.cursor()) as cr:
|
||||
cr.execute("select nextval('base_registry_signaling')")
|
||||
self.registry_sequence = cr.fetchone()[0]
|
||||
|
||||
# no need to notify cache invalidation in case of registry invalidation,
|
||||
# because reloading the registry implies starting with an empty cache
|
||||
elif self.cache_invalidated and not self.in_test_mode():
|
||||
_logger.info("At least one model cache has been invalidated, signaling through the database.")
|
||||
with closing(self.cursor()) as cr:
|
||||
cr.execute("select nextval('base_cache_signaling')")
|
||||
self.cache_sequence = cr.fetchone()[0]
|
||||
|
||||
self.registry_invalidated = False
|
||||
self.cache_invalidated = False
|
||||
|
||||
def reset_changes(self):
|
||||
""" Reset the registry and cancel all invalidations. """
|
||||
if self.registry_invalidated:
|
||||
with closing(self.cursor()) as cr:
|
||||
self.setup_models(cr)
|
||||
self.registry_invalidated = False
|
||||
if self.cache_invalidated:
|
||||
self.__cache.clear()
|
||||
self.cache_invalidated = False
|
||||
|
||||
@contextmanager
|
||||
def manage_changes(self):
|
||||
""" Context manager to signal/discard registry and cache invalidations. """
|
||||
try:
|
||||
yield self
|
||||
self.signal_changes()
|
||||
except Exception:
|
||||
self.reset_changes()
|
||||
raise
|
||||
|
||||
def in_test_mode(self):
|
||||
""" Test whether the registry is in 'test' mode. """
|
||||
return self.test_cr is not None
|
||||
|
||||
def enter_test_mode(self, cr):
|
||||
""" Enter the 'test' mode, where one cursor serves several requests. """
|
||||
assert self.test_cr is None
|
||||
self.test_cr = cr
|
||||
self.test_lock = threading.RLock()
|
||||
assert Registry._saved_lock is None
|
||||
Registry._saved_lock = Registry._lock
|
||||
Registry._lock = DummyRLock()
|
||||
|
||||
def leave_test_mode(self):
|
||||
""" Leave the test mode. """
|
||||
assert self.test_cr is not None
|
||||
self.test_cr = None
|
||||
self.test_lock = None
|
||||
assert Registry._saved_lock is not None
|
||||
Registry._lock = Registry._saved_lock
|
||||
Registry._saved_lock = None
|
||||
|
||||
def cursor(self):
|
||||
""" Return a new cursor for the database. The cursor itself may be used
|
||||
as a context manager to commit/rollback and close automatically.
|
||||
"""
|
||||
if self.test_cr is not None:
|
||||
# in test mode we use a proxy object that uses 'self.test_cr' underneath
|
||||
return TestCursor(self.test_cr, self.test_lock, current_test=odoo.modules.module.current_test)
|
||||
return self._db.cursor()
|
||||
|
||||
|
||||
class DummyRLock(object):
|
||||
""" Dummy reentrant lock, to be used while running rpc and js tests """
|
||||
def acquire(self):
|
||||
pass
|
||||
def release(self):
|
||||
pass
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.release()
|
||||
|
||||
|
||||
class TriggerTree(dict):
|
||||
""" The triggers of a field F is a tree that contains the fields that
|
||||
depend on F, together with the fields to inverse to find out which records
|
||||
to recompute.
|
||||
|
||||
For instance, assume that G depends on F, H depends on X.F, I depends on
|
||||
W.X.F, and J depends on Y.F. The triggers of F will be the tree:
|
||||
|
||||
[G]
|
||||
X/ \\Y
|
||||
[H] [J]
|
||||
W/
|
||||
[I]
|
||||
|
||||
This tree provides perfect support for the trigger mechanism:
|
||||
when F is # modified on records,
|
||||
- mark G to recompute on records,
|
||||
- mark H to recompute on inverse(X, records),
|
||||
- mark I to recompute on inverse(W, inverse(X, records)),
|
||||
- mark J to recompute on inverse(Y, records).
|
||||
"""
|
||||
__slots__ = ['root']
|
||||
|
||||
# pylint: disable=keyword-arg-before-vararg
|
||||
def __init__(self, root=(), *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.root = root
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.root or len(self))
|
||||
|
||||
def increase(self, key):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
subtree = self[key] = TriggerTree()
|
||||
return subtree
|
||||
|
||||
def depth_first(self):
|
||||
yield self
|
||||
for subtree in self.values():
|
||||
yield from subtree.depth_first()
|
||||
|
||||
@classmethod
|
||||
def merge(cls, trees: list, select=bool) -> "TriggerTree":
|
||||
""" Merge trigger trees into a single tree. The function ``select`` is
|
||||
called on every field to determine which fields should be kept in the
|
||||
tree nodes. This enables to discard some fields from the tree nodes.
|
||||
"""
|
||||
root_fields = OrderedSet() # fields in the root node
|
||||
subtrees_to_merge = defaultdict(list) # subtrees to merge grouped by key
|
||||
|
||||
for tree in trees:
|
||||
root_fields.update(tree.root)
|
||||
for label, subtree in tree.items():
|
||||
subtrees_to_merge[label].append(subtree)
|
||||
|
||||
# the root node contains the collected fields for which select is true
|
||||
result = cls([field for field in root_fields if select(field)])
|
||||
for label, subtrees in subtrees_to_merge.items():
|
||||
subtree = cls.merge(subtrees, select)
|
||||
if subtree:
|
||||
result[label] = subtree
|
||||
|
||||
return result
|
||||
Loading…
Add table
Add a link
Reference in a new issue