17.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:05:14 +02:00
parent 2e65bf056a
commit df627a6bba
328 changed files with 578149 additions and 759311 deletions

View file

@ -25,8 +25,9 @@ def initialize(cr):
and ir_model_data entries.
"""
f = odoo.modules.get_module_resource('base', 'data', 'base_data.sql')
if not f:
try:
f = odoo.tools.misc.file_path('base/data/base_data.sql')
except FileNotFoundError:
m = "File not found: 'base.sql' (provided by module 'base')."
_logger.critical(m)
raise IOError(m)
@ -83,7 +84,7 @@ def initialize(cr):
cr.execute("""
SELECT m.name FROM ir_module_module m
WHERE m.auto_install
AND state != 'to install'
AND state not in ('to install', 'uninstallable')
AND NOT EXISTS (
SELECT 1 FROM ir_module_module_dependency d
JOIN ir_module_module mdep ON (d.name = mdep.name)

View file

@ -3,6 +3,7 @@
""" Modules dependency graph. """
import functools
import itertools
import logging
@ -11,6 +12,15 @@ import odoo.tools as tools
_logger = logging.getLogger(__name__)
@functools.lru_cache(maxsize=1)
def _ignored_modules(cr):
result = ['studio_customization']
if tools.sql.column_exists(cr, 'ir_module_module', 'imported'):
cr.execute('SELECT name FROM ir_module_module WHERE imported')
result += [m[0] for m in cr.fetchall()]
return result
class Graph(dict):
""" Modules dependency graph.
@ -61,7 +71,7 @@ class Graph(dict):
info = odoo.modules.module.get_manifest(module)
if info and info['installable']:
packages.append((module, info)) # TODO directly a dict, like in get_modules_with_version
elif module != 'studio_customization':
elif module not in _ignored_modules(cr):
_logger.warning('module %s: not installable, skipped', module)
dependencies = dict([(p, info['depends']) for p, info in packages])

View file

@ -25,7 +25,7 @@ _logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('odoo.tests')
def load_data(cr, idref, mode, kind, package):
def load_data(env, idref, mode, kind, package):
"""
kind: data, demo, test, init_xml, update_xml, demo_xml.
@ -70,14 +70,14 @@ def load_data(cr, idref, mode, kind, package):
noupdate = False
if kind in ('demo', 'demo_xml') or (filename.endswith('.csv') and kind in ('init', 'init_xml')):
noupdate = True
tools.convert_file(cr, package.name, filename, idref, mode, noupdate, kind)
tools.convert_file(env, package.name, filename, idref, mode, noupdate, kind)
finally:
if kind in ('demo', 'test'):
threading.current_thread().testing = False
return bool(filename)
def load_demo(cr, package, idref, mode):
def load_demo(env, package, idref, mode):
"""
Loads demo data for the specified package.
"""
@ -87,8 +87,8 @@ def load_demo(cr, package, idref, mode):
try:
if package.data.get('demo') or package.data.get('demo_xml'):
_logger.info("Module %s: loading demo", package.name)
with cr.savepoint(flush=False):
load_data(cr, idref, mode, kind='demo', package=package)
with env.cr.savepoint(flush=False):
load_data(env(su=True), idref, mode, kind='demo', package=package)
return True
except Exception: # noqa: BLE001
# If we could not install demo data for this module
@ -96,7 +96,6 @@ def load_demo(cr, package, idref, mode):
"Module %s demo data failed to install, installed without demo data",
package.name, exc_info=True)
env = api.Environment(cr, SUPERUSER_ID, {})
todo = env.ref('base.demo_failure_todo', raise_if_not_found=False)
Failure = env.get('ir.demo_failure')
if todo and Failure is not None:
@ -105,31 +104,30 @@ def load_demo(cr, package, idref, mode):
return False
def force_demo(cr):
def force_demo(env):
"""
Forces the `demo` flag on all modules, and installs demo data for all installed modules.
"""
graph = odoo.modules.graph.Graph()
cr.execute('UPDATE ir_module_module SET demo=True')
cr.execute(
env.cr.execute('UPDATE ir_module_module SET demo=True')
env.cr.execute(
"SELECT name FROM ir_module_module WHERE state IN ('installed', 'to upgrade', 'to remove')"
)
module_list = [name for (name,) in cr.fetchall()]
graph.add_modules(cr, module_list, ['demo'])
module_list = [name for (name,) in env.cr.fetchall()]
graph.add_modules(env.cr, module_list, ['demo'])
for package in graph:
load_demo(cr, package, {}, 'init')
load_demo(env, package, {}, 'init')
env = api.Environment(cr, SUPERUSER_ID, {})
env['ir.module.module'].invalidate_model(['demo'])
env['res.groups']._update_user_groups_view()
def load_module_graph(cr, graph, status=None, perform_checks=True,
def load_module_graph(env, graph, status=None, perform_checks=True,
skip_modules=None, report=None, models_to_check=None):
"""Migrates+Updates or Installs all module nodes from ``graph``
:param cr:
:param env:
:param graph: graph of module nodes to load
:param status: deprecated parameter, unused, left to avoid changing signature in 8.0
:param perform_checks: whether module descriptors should be checked for validity (prints warnings
@ -144,15 +142,15 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
processed_modules = []
loaded_modules = []
registry = odoo.registry(cr.dbname)
migrations = odoo.modules.migration.MigrationManager(cr, graph)
registry = env.registry
migrations = odoo.modules.migration.MigrationManager(env.cr, graph)
module_count = len(graph)
_logger.info('loading %d modules...', module_count)
# register, instantiate and initialize models for each modules
t0 = time.time()
loading_extra_query_count = odoo.sql_db.sql_counter
loading_cursor_query_count = cr.sql_log_count
loading_cursor_query_count = env.cr.sql_log_count
models_updated = set()
@ -164,7 +162,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
continue
module_t0 = time.time()
module_cursor_query_count = cr.sql_log_count
module_cursor_query_count = env.cr.sql_log_count
module_extra_query_count = odoo.sql_db.sql_counter
needs_update = (
@ -181,10 +179,9 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
if needs_update:
if not new_install:
if package.name != 'base':
registry.setup_models(cr)
registry.setup_models(env.cr)
migrations.migrate_module(package, 'pre')
if package.name != 'base':
env = api.Environment(cr, SUPERUSER_ID, {})
env.flush_all()
load_openerp_module(package.name)
@ -193,10 +190,10 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
py_module = sys.modules['odoo.addons.%s' % (module_name,)]
pre_init = package.info.get('pre_init_hook')
if pre_init:
registry.setup_models(cr)
getattr(py_module, pre_init)(cr)
registry.setup_models(env.cr)
getattr(py_module, pre_init)(env)
model_names = registry.load(cr, package)
model_names = registry.load(env.cr, package)
mode = 'update'
if hasattr(package, 'init') or package.state == 'to install':
@ -206,8 +203,8 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
if needs_update:
models_updated |= set(model_names)
models_to_check -= set(model_names)
registry.setup_models(cr)
registry.init_models(cr, model_names, {'module': package.name}, new_install)
registry.setup_models(env.cr)
registry.init_models(env.cr, model_names, {'module': package.name}, new_install)
elif package.state != 'to remove':
# The current module has simply been loaded. The models extended by this module
# and for which we updated the schema, must have their schema checked again.
@ -219,7 +216,6 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
idref = {}
if needs_update:
env = api.Environment(cr, SUPERUSER_ID, {})
# Can't put this line out of the loop: ir.module.module will be
# registered by init_models() above.
module = env['ir.module.module'].browse(module_id)
@ -230,9 +226,9 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
if package.state == 'to upgrade':
# upgrading the module information
module.write(module.get_values_from_terp(package.data))
load_data(cr, idref, mode, kind='data', package=package)
demo_loaded = package.dbdemo = load_demo(cr, package, idref, mode)
cr.execute('update ir_module_module set demo=%s where id=%s', (demo_loaded, module_id))
load_data(env, idref, mode, kind='data', package=package)
demo_loaded = package.dbdemo = load_demo(env, package, idref, mode)
env.cr.execute('update ir_module_module set demo=%s where id=%s', (demo_loaded, module_id))
module.invalidate_model(['demo'])
migrations.migrate_module(package, 'post')
@ -248,7 +244,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
if new_install:
post_init = package.info.get('post_init_hook')
if post_init:
getattr(py_module, post_init)(cr, registry)
getattr(py_module, post_init)(env)
if mode == 'update':
# validate the views that have not been checked yet
@ -257,14 +253,14 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
# need to commit any modification the module's installation or
# update made to the schema or data so the tests can run
# (separately in their own transaction)
cr.commit()
env.cr.commit()
concrete_models = [model for model in model_names if not registry[model]._abstract]
if concrete_models:
cr.execute("""
env.cr.execute("""
SELECT model FROM ir_model
WHERE id NOT IN (SELECT DISTINCT model_id FROM ir_model_access) AND model IN %s
""", [tuple(concrete_models)])
models = [model for [model] in cr.fetchall()]
models = [model for [model] in env.cr.fetchall()]
if models:
lines = [
f"The models {models} have no access rules in module {module_name}, consider adding some, like:",
@ -279,15 +275,12 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
test_time = test_queries = 0
test_results = None
if tools.config.options['test_enable'] and (needs_update or not updating):
env = api.Environment(cr, SUPERUSER_ID, {})
loader = odoo.tests.loader
suite = loader.make_suite([module_name], 'at_install')
if suite.countTestCases():
if not needs_update:
registry.setup_models(cr)
registry.setup_models(env.cr)
# Python tests
env['ir.http']._clear_routing_map() # force routing map to be rebuilt
tests_t0, tests_q0 = time.time(), odoo.sql_db.sql_counter
test_results = loader.run_suite(suite, module_name, global_report=report)
report.update(test_results)
@ -295,7 +288,6 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
test_queries = odoo.sql_db.sql_counter - tests_q0
# tests may have reset the environment
env = api.Environment(cr, SUPERUSER_ID, {})
module = env['ir.module.module'].browse(module_id)
if needs_update:
@ -323,7 +315,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
module_log_level, "Module %s loaded in %.2fs%s, %s queries%s",
module_name, time.time() - module_t0,
f' (incl. {test_time:.2f}s test)' if test_time else '',
cr.sql_log_count - module_cursor_query_count,
env.cr.sql_log_count - module_cursor_query_count,
f' ({", ".join(extras)})' if extras else ''
)
if test_results and not test_results.wasSuccessful():
@ -336,7 +328,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
_logger.runbot("%s modules loaded in %.2fs, %s queries (+%s extra)",
len(graph),
time.time() - t0,
cr.sql_log_count - loading_cursor_query_count,
env.cr.sql_log_count - loading_cursor_query_count,
odoo.sql_db.sql_counter - loading_extra_query_count) # extra queries: testes, notify, any other closed cursor
return loaded_modules, processed_modules
@ -355,7 +347,7 @@ def _check_module_names(cr, module_names):
incorrect_names = mod_names.difference([x['name'] for x in cr.dictfetchall()])
_logger.warning('invalid module names, ignored: %s', ", ".join(incorrect_names))
def load_marked_modules(cr, graph, states, force, progressdict, report,
def load_marked_modules(env, graph, states, force, progressdict, report,
loaded_modules, perform_checks, models_to_check=None):
"""Loads modules marked with ``states``, adding them to ``graph`` and
``loaded_modules`` and returns a list of installed/upgraded modules."""
@ -365,14 +357,14 @@ def load_marked_modules(cr, graph, states, force, progressdict, report,
processed_modules = []
while True:
cr.execute("SELECT name from ir_module_module WHERE state IN %s" ,(tuple(states),))
module_list = [name for (name,) in cr.fetchall() if name not in graph]
env.cr.execute("SELECT name from ir_module_module WHERE state IN %s", (tuple(states),))
module_list = [name for (name,) in env.cr.fetchall() if name not in graph]
if not module_list:
break
graph.add_modules(cr, module_list, force)
graph.add_modules(env.cr, module_list, force)
_logger.debug('Updating graph with %d more modules', len(module_list))
loaded, processed = load_module_graph(
cr, graph, progressdict, report=report, skip_modules=loaded_modules,
env, graph, progressdict, report=report, skip_modules=loaded_modules,
perform_checks=perform_checks, models_to_check=models_to_check
)
processed_modules.extend(processed)
@ -431,8 +423,9 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
# processed_modules: for cleanup step after install
# loaded_modules: to avoid double loading
report = registry._assertion_report
env = api.Environment(cr, SUPERUSER_ID, {})
loaded_modules, processed_modules = load_module_graph(
cr, graph, status, perform_checks=update_module,
env, graph, status, perform_checks=update_module,
report=report, models_to_check=models_to_check)
load_lang = tools.config.pop('load_language')
@ -446,7 +439,6 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
# STEP 2: Mark other modules to be loaded/updated
if update_module:
env = api.Environment(cr, SUPERUSER_ID, {})
Module = env['ir.module.module']
_logger.info('updating modules list')
Module.update_list()
@ -486,11 +478,11 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
previously_processed = -1
while previously_processed < len(processed_modules):
previously_processed = len(processed_modules)
processed_modules += load_marked_modules(cr, graph,
processed_modules += load_marked_modules(env, graph,
['installed', 'to upgrade', 'to remove'],
force, status, report, loaded_modules, update_module, models_to_check)
if update_module:
processed_modules += load_marked_modules(cr, graph,
processed_modules += load_marked_modules(env, graph,
['to install'], force, status, report,
loaded_modules, update_module, models_to_check)
@ -515,9 +507,8 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
registry.setup_models(cr)
# check that all installed modules have been loaded by the registry
env = api.Environment(cr, SUPERUSER_ID, {})
Module = env['ir.module.module']
modules = Module.search(Module._get_modules_to_load_domain(), order='name')
modules = Module.search_fetch(Module._get_modules_to_load_domain(), ['name'], order='name')
missing = [name for name in modules.mapped('name') if name not in graph]
if missing:
_logger.error("Some modules are not loaded, some dependencies or manifest may be missing: %s", missing)
@ -538,7 +529,6 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
# STEP 4: Finish and cleanup installations
if processed_modules:
env = api.Environment(cr, SUPERUSER_ID, {})
cr.execute("SELECT model from ir_model")
for (model,) in cr.fetchall():
@ -567,13 +557,12 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove',))
modules_to_remove = dict(cr.fetchall())
if modules_to_remove:
env = api.Environment(cr, SUPERUSER_ID, {})
pkgs = reversed([p for p in graph if p.name in modules_to_remove])
for pkg in pkgs:
uninstall_hook = pkg.info.get('uninstall_hook')
if uninstall_hook:
py_module = sys.modules['odoo.addons.%s' % (pkg.name,)]
getattr(py_module, uninstall_hook)(cr, registry)
getattr(py_module, uninstall_hook)(env)
env.flush_all()
Module = env['ir.module.module']
@ -602,7 +591,6 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
# STEP 6: verify custom views on every model
if update_module:
env = api.Environment(cr, SUPERUSER_ID, {})
env['res.groups']._update_user_groups_view()
View = env['ir.ui.view']
for model in registry:
@ -625,7 +613,6 @@ def load_modules(registry, force_demo=False, status=None, update_module=False):
# management of those hooks in `Registry.setup_models`: all the calls to
# setup_models() done here do not mess up with hooks, as registry.ready
# is False.
env = api.Environment(cr, SUPERUSER_ID, {})
for model in env.values():
model._register_hook()
env.flush_all()
@ -643,6 +630,10 @@ def reset_modules_state(db_name):
# of time
db = odoo.sql_db.db_connect(db_name)
with db.cursor() as cr:
cr.execute("SELECT 1 FROM information_schema.tables WHERE table_name='ir_module_module'")
if not cr.fetchall():
_logger.info('skipping reset_modules_state, ir_module_module table does not exists')
return
cr.execute(
"UPDATE ir_module_module SET state='installed' WHERE state IN ('to remove', 'to upgrade')"
)

View file

@ -8,18 +8,46 @@ import glob
import importlib.util
import logging
import os
import re
from os.path import join as opj
from odoo.modules.module import get_resource_path
import odoo.release as release
import odoo.upgrade
from odoo.tools.parse_version import parse_version
from odoo.tools.misc import file_path
_logger = logging.getLogger(__name__)
VERSION_RE = re.compile(
r"""^
# Optional prefix with Odoo version
((
6\.1|
# "x.0" version, with x >= 6.
[6-9]\.0|
# multi digits "x.0" versions
[1-9]\d+\.0|
# x.saas~y, where x >= 7 and x <= 10
(7|8|9|10)\.saas~[1-9]\d*|
# saas~x.y, where x >= 11 and y between 1 and 9
# FIXME handle version >= saas~100 (expected in year 2106)
saas~(1[1-9]|[2-9]\d+)\.[1-9]
)\.)?
# After Odoo version we allow precisely 2 or 3 parts
# note this will also allow 0.0.0 which has a special meaning
\d+\.\d+(\.\d+)?
$""",
re.VERBOSE | re.ASCII,
)
def load_script(path, module_name):
full_path = get_resource_path(*path.split(os.path.sep)) if not os.path.isabs(path) else path
full_path = file_path(path) if not os.path.isabs(path) else path
spec = importlib.util.spec_from_file_location(module_name, full_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
@ -71,23 +99,44 @@ class MigrationManager(object):
if os.path.exists(upgrade_path):
yield upgrade_path
def _verify_upgrade_version(path, version):
full_path = opj(path, version)
if not os.path.isdir(full_path):
return False
if version == "tests":
return False
if not VERSION_RE.match(version):
_logger.warning("Invalid version for upgrade script %r", full_path)
return False
return True
def get_scripts(path):
if not path:
return {}
return {
version: glob.glob(opj(path, version, '*.py'))
for version in os.listdir(path)
if os.path.isdir(opj(path, version))
if _verify_upgrade_version(path, version)
}
def check_path(path):
try:
return file_path(path)
except FileNotFoundError:
return False
for pkg in self.graph:
if not (hasattr(pkg, 'update') or pkg.state == 'to upgrade' or
getattr(pkg, 'load_state', None) == 'to upgrade'):
continue
self.migrations[pkg.name] = {
'module': get_scripts(get_resource_path(pkg.name, 'migrations')),
'module_upgrades': get_scripts(get_resource_path(pkg.name, 'upgrades')),
'module': get_scripts(check_path(pkg.name + '/migrations')),
'module_upgrades': get_scripts(check_path(pkg.name + '/upgrades')),
}
scripts = defaultdict(list)
@ -109,8 +158,10 @@ class MigrationManager(object):
return
def convert_version(version):
if version.count('.') >= 2:
return version # the version number already contains the server version
if version == "0.0.0":
return version
if version.count(".") > 2:
return version # the version number already contains the server version, see VERSION_RE for details
return "%s.%s" % (release.major_version, version)
def _get_migration_versions(pkg, stage):

View file

@ -32,12 +32,14 @@ _DEFAULT_MANIFEST = {
'author': 'Odoo S.A.',
'auto_install': False,
'category': 'Uncategorized',
'configurator_snippets': {}, # website themes
'countries': [],
'data': [],
'demo': [],
'demo_xml': [],
'depends': [],
'description': '',
'external_dependencies': [],
'external_dependencies': {},
#icon: f'/{module}/static/description/icon.png', # automatic
'init_xml': [],
'installable': True,
@ -45,12 +47,12 @@ _DEFAULT_MANIFEST = {
'images_preview_theme': {}, # website themes
#license, mandatory
'live_test_url': '', # website themes
'new_page_templates': {}, # website themes
#name, mandatory
'post_init_hook': '',
'post_load': '',
'pre_init_hook': '',
'sequence': 100,
'snippet_lists': {}, # website themes
'summary': '',
'test': [],
'update_xml': [],
@ -62,99 +64,10 @@ _DEFAULT_MANIFEST = {
_logger = logging.getLogger(__name__)
# addons path as a list
# ad_paths is a deprecated alias, please use odoo.addons.__path__
@tools.lazy
def ad_paths():
warnings.warn(
'"odoo.modules.module.ad_paths" is a deprecated proxy to '
'"odoo.addons.__path__".', DeprecationWarning, stacklevel=2)
return odoo.addons.__path__
# Modules already loaded
loaded = []
class AddonsHook(object):
""" Makes modules accessible through openerp.addons.* """
def find_module(self, name, path=None):
if name.startswith('openerp.addons.') and name.count('.') == 2:
warnings.warn(
'"openerp.addons" is a deprecated alias to "odoo.addons".',
DeprecationWarning, stacklevel=2)
return self
def find_spec(self, fullname, path=None, target=None):
if fullname.startswith('openerp.addons.') and fullname.count('.') == 2:
warnings.warn(
'"openerp.addons" is a deprecated alias to "odoo.addons".',
DeprecationWarning, stacklevel=2)
return importlib.util.spec_from_loader(fullname, self)
def load_module(self, name):
assert name not in sys.modules
odoo_name = re.sub(r'^openerp.addons.(\w+)$', r'odoo.addons.\g<1>', name)
odoo_module = sys.modules.get(odoo_name)
if not odoo_module:
odoo_module = importlib.import_module(odoo_name)
sys.modules[name] = odoo_module
return odoo_module
class OdooHook(object):
""" Makes odoo package also available as openerp """
def find_module(self, name, path=None):
# openerp.addons.<identifier> should already be matched by AddonsHook,
# only framework and subdirectories of modules should match
if re.match(r'^openerp\b', name):
warnings.warn(
'openerp is a deprecated alias to odoo.',
DeprecationWarning, stacklevel=2)
return self
def find_spec(self, fullname, path=None, target=None):
# openerp.addons.<identifier> should already be matched by AddonsHook,
# only framework and subdirectories of modules should match
if re.match(r'^openerp\b', fullname):
warnings.warn(
'openerp is a deprecated alias to odoo.',
DeprecationWarning, stacklevel=2)
return importlib.util.spec_from_loader(fullname, self)
def load_module(self, name):
assert name not in sys.modules
canonical = re.sub(r'^openerp(.*)', r'odoo\g<1>', name)
if canonical in sys.modules:
mod = sys.modules[canonical]
else:
# probable failure: canonical execution calling old naming -> corecursion
mod = importlib.import_module(canonical)
# just set the original module at the new location. Don't proxy,
# it breaks *-import (unless you can find how `from a import *` lists
# what's supposed to be imported by `*`, and manage to override it)
sys.modules[name] = mod
return sys.modules[name]
class UpgradeHook(object):
"""Makes the legacy `migrations` package being `odoo.upgrade`"""
def find_module(self, name, path=None):
if re.match(r"^odoo\.addons\.base\.maintenance\.migrations\b", name):
# We can't trigger a DeprecationWarning in this case.
# In order to be cross-versions, the multi-versions upgrade scripts (0.0.0 scripts),
# the tests, and the common files (utility functions) still needs to import from the
# legacy name.
return self
def find_spec(self, fullname, path=None, target=None):
if re.match(r"^odoo\.addons\.base\.maintenance\.migrations\b", fullname):
# We can't trigger a DeprecationWarning in this case.
@ -217,8 +130,6 @@ def initialize_sys_path():
# hook deprecated module alias from openerp to odoo and "crm"-like to odoo.addons
if not getattr(initialize_sys_path, 'called', False): # only initialize once
sys.meta_path.insert(0, UpgradeHook())
sys.meta_path.insert(0, OdooHook())
sys.meta_path.insert(0, AddonsHook())
initialize_sys_path.called = True
@ -286,21 +197,19 @@ def get_resource_path(module, *args):
:rtype: str
:return: absolute path to the resource
"""
warnings.warn(
f"Since 17.0: use tools.misc.file_path instead of get_resource_path({module}, {args})",
DeprecationWarning,
)
resource_path = opj(module, *args)
try:
return file_path(resource_path)
except (FileNotFoundError, ValueError):
return False
def check_resource_path(mod_path, *args):
resource_path = opj(mod_path, *args)
try:
return file_path(resource_path)
except (FileNotFoundError, ValueError):
return False
# backwards compatibility
get_module_resource = get_resource_path
check_resource_path = get_resource_path
def get_resource_from_path(path):
"""Tries to extract the module name and the resource's relative path
@ -335,25 +244,34 @@ def get_resource_from_path(path):
return None
def get_module_icon(module):
iconpath = ['static', 'description', 'icon.png']
if get_module_resource(module, *iconpath):
return ('/' + module + '/') + '/'.join(iconpath)
return '/base/' + '/'.join(iconpath)
fpath = f"{module}/static/description/icon.png"
try:
file_path(fpath)
return "/" + fpath
except FileNotFoundError:
return "/base/static/description/icon.png"
def get_module_icon_path(module):
iconpath = ['static', 'description', 'icon.png']
path = get_module_resource(module.name, *iconpath)
if not path:
path = get_module_resource('base', *iconpath)
return path
try:
return file_path(f"{module}/static/description/icon.png")
except FileNotFoundError:
return file_path("base/static/description/icon.png")
def module_manifest(path):
"""Returns path to module manifest if one can be found under `path`, else `None`."""
if not path:
return None
for manifest_name in MANIFEST_NAMES:
if os.path.isfile(opj(path, manifest_name)):
return opj(path, manifest_name)
candidate = opj(path, manifest_name)
if os.path.isfile(candidate):
if manifest_name == '__openerp__.py':
warnings.warn(
"__openerp__.py manifests are deprecated since 17.0, "
f"rename {candidate!r} to __manifest__.py "
"(valid since 10.0)",
category=DeprecationWarning
)
return candidate
def get_module_root(path):
"""
@ -394,6 +312,7 @@ def load_manifest(module, mod_path=None):
return {}
manifest = copy.deepcopy(_DEFAULT_MANIFEST)
manifest['icon'] = get_module_icon(module)
with tools.file_open(manifest_file, mode='r') as f:
@ -426,7 +345,11 @@ def load_manifest(module, mod_path=None):
elif manifest['auto_install']:
manifest['auto_install'] = set(manifest['depends'])
manifest['version'] = adapt_version(manifest['version'])
try:
manifest['version'] = adapt_version(manifest['version'])
except ValueError as e:
if manifest.get("installable", True):
raise ValueError(f"Module {module}: invalid manifest") from e
manifest['addons_path'] = normpath(opj(mod_path, os.pardir))
return manifest
@ -463,27 +386,24 @@ def load_openerp_module(module_name):
This is also used to load server-wide module (i.e. it is also used
when there is no model to register).
"""
global loaded
if module_name in loaded:
qualname = f'odoo.addons.{module_name}'
if qualname in sys.modules:
return
try:
__import__('odoo.addons.' + module_name)
__import__(qualname)
# Call the module's post-load hook. This can done before any model or
# data has been initialized. This is ok as the post-load hook is for
# server-wide (instead of registry-specific) functionalities.
info = get_manifest(module_name)
if info['post_load']:
getattr(sys.modules['odoo.addons.' + module_name], info['post_load'])()
getattr(sys.modules[qualname], info['post_load'])()
except Exception as e:
msg = "Couldn't load module %s" % (module_name)
_logger.critical(msg)
_logger.critical(e)
except Exception:
_logger.critical("Couldn't load module %s", module_name)
raise
else:
loaded.append(module_name)
def get_modules():
"""Returns the list of module names
@ -527,9 +447,18 @@ def get_modules_with_version():
def adapt_version(version):
serie = release.major_version
if version == serie or not version.startswith(serie + '.'):
base_version = version
version = '%s.%s' % (serie, version)
else:
base_version = version[len(serie) + 1:]
if not re.match(r"^[0-9]+\.[0-9]+(?:\.[0-9]+)?$", base_version):
raise ValueError(f"Invalid version {base_version!r}. Modules should have a version in format `x.y`, `x.y.z`,"
f" `{serie}.x.y` or `{serie}.x.y.z`.")
return version
current_test = False

View file

@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from contextlib import suppress
import odoo
import logging
@ -17,8 +19,8 @@ def get_installed_modules(cursor):
def get_neutralization_queries(modules):
# neutralization for each module
for module in modules:
filename = odoo.modules.get_module_resource(module, 'data/neutralize.sql')
if filename:
filename = f'{module}/data/neutralize.sql'
with suppress(FileNotFoundError):
with odoo.tools.misc.file_open(filename) as file:
yield file.read().strip()

View file

@ -9,6 +9,8 @@ from collections.abc import Mapping
from contextlib import closing, contextmanager
from functools import partial
from operator import attrgetter
import inspect
import logging
import os
import threading
@ -22,8 +24,11 @@ from odoo.modules.db import FunctionStatus
from odoo.osv.expression import get_unaccent_wrapper
from .. import SUPERUSER_ID
from odoo.sql_db import TestCursor
from odoo.tools import (config, existing_tables, lazy_classproperty,
lazy_property, sql, Collector, OrderedSet)
from odoo.tools import (
config, existing_tables, lazy_classproperty,
lazy_property, sql, Collector, OrderedSet, SQL,
format_frame
)
from odoo.tools.func import locked
from odoo.tools.lru import LRU
@ -31,6 +36,24 @@ _logger = logging.getLogger(__name__)
_schema = logging.getLogger('odoo.schema')
_REGISTRY_CACHES = {
'default': 8192,
'assets': 512, # arbitrary
'templates': 1024, # arbitrary
'routing': 1024, # 2 entries per website
'routing.rewrites': 8192, # url_rewrite entries
'templates.cached_values': 2048, # arbitrary
}
# cache invalidation dependencies, as follows:
# { 'cache_key': ('cache_container_1', 'cache_container_3', ...) }
_CACHES_BY_KEY = {
'default': ('default', 'templates.cached_values'),
'assets': ('assets', 'templates.cached_values'),
'templates': ('templates', 'templates.cached_values'),
'routing': ('routing', 'routing.rewrites', 'templates.cached_values'),
}
class Registry(Mapping):
""" Model registry for a particular database.
@ -114,7 +137,7 @@ class Registry(Mapping):
self._fields_by_model = None
self._ordinary_tables = None
self._constraint_queue = deque()
self.__cache = LRU(8192)
self.__caches = {cache_name: LRU(cache_size) for cache_name, cache_size in _REGISTRY_CACHES.items()}
# modules fully loaded (maintained during init phase by `loading` module)
self._init_modules = set()
@ -147,7 +170,7 @@ class Registry(Mapping):
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
self.registry_sequence = None
self.cache_sequence = None
self.cache_sequences = {}
# Flags indicating invalidation of the registry or the cache.
self._invalidation_flags = threading.local()
@ -229,7 +252,8 @@ class Registry(Mapping):
from .. import models
# clear cache to ensure consistency, but do not signal it
self.__cache.clear()
for cache in self.__caches.values():
cache.clear()
lazy_property.reset_all(self)
self._field_trigger_trees.clear()
@ -260,7 +284,8 @@ class Registry(Mapping):
model._unregister_hook()
# clear cache to ensure consistency, but do not signal it
self.__cache.clear()
for cache in self.__caches.values():
cache.clear()
lazy_property.reset_all(self)
self._field_trigger_trees.clear()
@ -325,12 +350,30 @@ class Registry(Mapping):
computed[field] = group = groups[field.compute]
group.append(field)
for fields in groups.values():
if len(fields) < 2:
continue
if len({field.compute_sudo for field in fields}) > 1:
_logger.warning("%s: inconsistent 'compute_sudo' for computed fields: %s",
model_name, ", ".join(field.name for field in fields))
fnames = ", ".join(field.name for field in fields)
warnings.warn(
f"{model_name}: inconsistent 'compute_sudo' for computed fields {fnames}. "
f"Either set 'compute_sudo' to the same value on all those fields, or "
f"use distinct compute methods for sudoed and non-sudoed fields."
)
if len({field.precompute for field in fields}) > 1:
_logger.warning("%s: inconsistent 'precompute' for computed fields: %s",
model_name, ", ".join(field.name for field in fields))
fnames = ", ".join(field.name for field in fields)
warnings.warn(
f"{model_name}: inconsistent 'precompute' for computed fields {fnames}. "
f"Either set all fields as precompute=True (if possible), or "
f"use distinct compute methods for precomputed and non-precomputed fields."
)
if len({field.store for field in fields}) > 1:
fnames1 = ", ".join(field.name for field in fields if not field.store)
fnames2 = ", ".join(field.name for field in fields if field.store)
warnings.warn(
f"{model_name}: inconsistent 'store' for computed fields, "
f"accessing {fnames1} may recompute and update {fnames2}. "
f"Use distinct compute methods for stored and non-stored fields."
)
return computed
def get_trigger_tree(self, fields: list, select=bool) -> "TriggerTree":
@ -530,6 +573,7 @@ class Registry(Mapping):
env['ir.model.fields']._reflect_fields(model_names)
env['ir.model.fields.selection']._reflect_selections(model_names)
env['ir.model.constraint']._reflect_constraints(model_names)
env['ir.model.inherit']._reflect_inherits(model_names)
self._ordinary_tables = None
@ -552,8 +596,9 @@ class Registry(Mapping):
def check_indexes(self, cr, model_names):
""" Create or drop column indexes for the given models. """
expected = [
(f"{Model._table}_{field.name}_index", Model._table, field, getattr(field, 'unaccent', False))
(sql.make_index_name(Model._table, field.name), Model._table, field, getattr(field, 'unaccent', False))
for model_name in model_names
for Model in [self.models[model_name]]
if Model._auto and not Model._abstract
@ -569,12 +614,11 @@ class Registry(Mapping):
existing = dict(cr.fetchall())
for indexname, tablename, field, unaccent in expected:
column_expression = f'"{field.name}"'
index = field.index
assert index in ('btree', 'btree_not_null', 'trigram', True, False, None)
if index and indexname not in existing and \
((not field.translate and index != 'trigram') or (index == 'trigram' and self.has_trigram)):
column_expression = f'"{field.name}"'
if index == 'trigram':
if field.translate:
column_expression = f'''(jsonb_path_query_array({column_expression}, '$.*')::text)'''
@ -677,17 +721,35 @@ class Registry(Mapping):
for table in missing_tables:
_logger.error("Model %s has no table.", table2model[table])
def _clear_cache(self):
""" Clear the cache and mark it as invalidated. """
self.__cache.clear()
self.cache_invalidated = True
def clear_caches(self):
def clear_cache(self, *cache_names):
""" Clear the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
``tools.ormcache``if cache is in `cache_name` subset. """
cache_names = cache_names or ('default',)
assert not any('.' in cache_name for cache_name in cache_names)
for cache_name in cache_names:
for cache in _CACHES_BY_KEY[cache_name]:
self.__caches[cache].clear()
self.cache_invalidated.add(cache_name)
# log information about invalidation_cause
if _logger.isEnabledFor(logging.DEBUG):
# could be interresting to log in info but this will need to minimize invalidation first,
# mainly in some setupclass and crons
caller_info = format_frame(inspect.currentframe().f_back)
_logger.debug('Invalidating %s model caches from %s', ','.join(cache_names), caller_info)
def clear_all_caches(self):
""" Clear the caches associated to methods decorated with
``tools.ormcache``.
"""
for model in self.models.values():
model.clear_caches()
for cache_name, caches in _CACHES_BY_KEY.items():
for cache in caches:
self.__caches[cache].clear()
self.cache_invalidated.add(cache_name)
caller_info = format_frame(inspect.currentframe().f_back)
log = _logger.info if self.loaded else _logger.debug
log('Invalidating all model caches from %s', caller_info)
def is_an_ordinary_table(self, model):
""" Return whether the given model has an ordinary table. """
@ -719,11 +781,11 @@ class Registry(Mapping):
@property
def cache_invalidated(self):
""" Determine whether the current thread has modified the cache. """
return getattr(self._invalidation_flags, 'cache', False)
@cache_invalidated.setter
def cache_invalidated(self, value):
self._invalidation_flags.cache = value
try:
return self._invalidation_flags.cache
except AttributeError:
names = self._invalidation_flags.cache = set()
return names
def setup_signaling(self):
""" Setup the inter-process signaling on this registry. """
@ -733,21 +795,37 @@ class Registry(Mapping):
with self.cursor() as cr:
# The `base_registry_signaling` sequence indicates when the registry
# must be reloaded.
# The `base_cache_signaling` sequence indicates when all caches must
# The `base_cache_signaling_...` sequences indicates when caches must
# be invalidated (i.e. cleared).
cr.execute("SELECT sequence_name FROM information_schema.sequences WHERE sequence_name='base_registry_signaling'")
if not cr.fetchall():
cr.execute("CREATE SEQUENCE base_registry_signaling INCREMENT BY 1 START WITH 1")
cr.execute("SELECT nextval('base_registry_signaling')")
cr.execute("CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1")
cr.execute("SELECT nextval('base_cache_signaling')")
sequence_names = ('base_registry_signaling', *(f'base_cache_signaling_{cache_name}' for cache_name in _CACHES_BY_KEY))
cr.execute("SELECT sequence_name FROM information_schema.sequences WHERE sequence_name IN %s", [sequence_names])
existing_sequences = tuple(s[0] for s in cr.fetchall()) # could be a set but not efficient with such a little list
cr.execute(""" SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
self.registry_sequence, self.cache_sequence = cr.fetchone()
_logger.debug("Multiprocess load registry signaling: [Registry: %s] [Cache: %s]",
self.registry_sequence, self.cache_sequence)
for sequence_name in sequence_names:
if sequence_name not in existing_sequences:
cr.execute(SQL(
"CREATE SEQUENCE %s INCREMENT BY 1 START WITH 1",
SQL.identifier(sequence_name),
))
cr.execute(SQL("SELECT nextval(%s)", sequence_name))
db_registry_sequence, db_cache_sequences = self.get_sequences(cr)
self.registry_sequence = db_registry_sequence
self.cache_sequences.update(db_cache_sequences)
_logger.debug("Multiprocess load registry signaling: [Registry: %s] %s",
self.registry_sequence, ' '.join('[Cache %s: %s]' % cs for cs in self.cache_sequences.items()))
def get_sequences(self, cr):
cache_sequences_query = ', '.join([f'base_cache_signaling_{cache_name}' for cache_name in _CACHES_BY_KEY])
cache_sequences_values_query = ',\n'.join([f'base_cache_signaling_{cache_name}.last_value' for cache_name in _CACHES_BY_KEY])
cr.execute(f"""
SELECT base_registry_signaling.last_value, {cache_sequences_values_query}
FROM base_registry_signaling, {cache_sequences_query}
""")
registry_sequence, *cache_sequences_values = cr.fetchone()
cache_sequences = dict(zip(_CACHES_BY_KEY, cache_sequences_values))
return registry_sequence, cache_sequences
def check_signaling(self):
""" Check whether the registry has changed, and performs all necessary
@ -757,33 +835,46 @@ class Registry(Mapping):
return self
with closing(self.cursor()) as cr:
cr.execute(""" SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess signaling check: [Registry - %s -> %s] [Cache - %s -> %s]",
self.registry_sequence, r, self.cache_sequence, c)
db_registry_sequence, db_cache_sequences = self.get_sequences(cr)
changes = ''
# Check if the model registry must be reloaded
if self.registry_sequence != r:
if self.registry_sequence != db_registry_sequence:
_logger.info("Reloading the model registry after database signaling.")
self = Registry.new(self.db_name)
self.registry_sequence = db_registry_sequence
if _logger.isEnabledFor(logging.DEBUG):
changes += "[Registry - %s -> %s]" % (self.registry_sequence, db_registry_sequence)
# Check if the model caches must be invalidated.
elif self.cache_sequence != c:
_logger.info("Invalidating all model caches after database signaling.")
self.clear_caches()
# prevent re-signaling the clear_caches() above, or any residual one that
# would be inherited from the master process (first request in pre-fork mode)
self.cache_invalidated = False
self.registry_sequence = r
self.cache_sequence = c
else:
invalidated = []
for cache_name, cache_sequence in self.cache_sequences.items():
expected_sequence = db_cache_sequences[cache_name]
if cache_sequence != expected_sequence:
for cache in _CACHES_BY_KEY[cache_name]: # don't call clear_cache to avoid signal loop
if cache not in invalidated:
invalidated.append(cache)
self.__caches[cache].clear()
self.cache_sequences[cache_name] = expected_sequence
if _logger.isEnabledFor(logging.DEBUG):
changes += "[Cache %s - %s -> %s]" % (cache_name, cache_sequence, expected_sequence)
if invalidated:
_logger.info("Invalidating caches after database signaling: %s", sorted(invalidated))
if changes:
_logger.debug("Multiprocess signaling check: %s", changes)
return self
def signal_changes(self):
""" Notifies other processes if registry or cache has been invalidated. """
if self.registry_invalidated and not self.in_test_mode():
if self.in_test_mode():
if self.registry_invalidated:
self.registry_sequence += 1
for cache_name in self.cache_invalidated or ():
self.cache_sequences[cache_name] += 1
self.registry_invalidated = False
self.cache_invalidated.clear()
return
if self.registry_invalidated:
_logger.info("Registry changed, signaling through the database")
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_registry_signaling')")
@ -791,14 +882,15 @@ class Registry(Mapping):
# no need to notify cache invalidation in case of registry invalidation,
# because reloading the registry implies starting with an empty cache
elif self.cache_invalidated and not self.in_test_mode():
_logger.info("At least one model cache has been invalidated, signaling through the database.")
elif self.cache_invalidated:
_logger.info("Caches invalidated, signaling through the database: %s", sorted(self.cache_invalidated))
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_cache_signaling')")
self.cache_sequence = cr.fetchone()[0]
for cache_name in self.cache_invalidated:
cr.execute("select nextval(%s)", [f'base_cache_signaling_{cache_name}'])
self.cache_sequences[cache_name] = cr.fetchone()[0]
self.registry_invalidated = False
self.cache_invalidated = False
self.cache_invalidated.clear()
def reset_changes(self):
""" Reset the registry and cancel all invalidations. """
@ -807,8 +899,10 @@ class Registry(Mapping):
self.setup_models(cr)
self.registry_invalidated = False
if self.cache_invalidated:
self.__cache.clear()
self.cache_invalidated = False
for cache_name in self.cache_invalidated:
for cache in _CACHES_BY_KEY[cache_name]:
self.__caches[cache].clear()
self.cache_invalidated.clear()
@contextmanager
def manage_changes(self):
@ -895,6 +989,9 @@ class TriggerTree(dict):
def __bool__(self):
return bool(self.root or len(self))
def __repr__(self) -> str:
return f"TriggerTree(root={self.root!r}, {super().__repr__()})"
def increase(self, key):
try:
return self[key]