18.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:06:50 +02:00
parent d72e748793
commit 0a7ae8db93
337 changed files with 399651 additions and 232598 deletions

View file

@ -8,6 +8,7 @@ from . import ir_ui_menu
from . import ir_ui_view
from . import ir_asset
from . import ir_actions
from . import ir_embedded_actions
from . import ir_actions_report
from . import ir_attachment
from . import ir_binary
@ -24,7 +25,6 @@ from . import ir_qweb
from . import ir_qweb_fields
from . import ir_http
from . import ir_logging
from . import ir_property
from . import ir_module
from . import ir_demo
from . import ir_demo_failure
@ -45,5 +45,6 @@ from . import res_company
from . import res_users
from . import res_users_settings
from . import res_users_deletion
from . import res_device
from . import decimal_precision

View file

@ -1,22 +1,16 @@
# -*- coding: utf-8 -*-
from contextlib import closing
from collections import OrderedDict
from datetime import datetime
from lxml import etree
from subprocess import Popen, PIPE
import base64
import copy
import hashlib
import io
import itertools
import json
import logging
import os
import re
import textwrap
import uuid
import psycopg2
try:
import sass as libsass
except ImportError:
@ -29,11 +23,10 @@ from rjsmin import jsmin as rjsmin
from odoo import release, SUPERUSER_ID, _
from odoo.http import request
from odoo.tools import (func, misc, transpile_javascript,
is_odoo_module, SourceMapGenerator, profiler,
apply_inheritance_specs)
is_odoo_module, SourceMapGenerator, profiler, OrderedSet)
from odoo.tools.json import scriptsafe as json
from odoo.tools.constants import SCRIPT_EXTENSIONS, STYLE_EXTENSIONS
from odoo.tools.misc import file_open, file_path
from odoo.tools.pycompat import to_text
_logger = logging.getLogger(__name__)
@ -49,6 +42,8 @@ class AssetError(Exception):
class AssetNotFound(AssetError):
pass
class XMLAssetError(Exception):
pass
class AssetsBundle(object):
rx_css_import = re.compile("(@import[^;{]+;?)", re.M)
@ -326,22 +321,20 @@ class AssetsBundle(object):
if not js_attachment:
template_bundle = ''
if self.templates:
content = ['<?xml version="1.0" encoding="UTF-8"?>']
content.append('<templates xml:space="preserve">')
content.append(self.xml(show_inherit_info=not is_minified))
content.append('</templates>')
templates = '\n'.join(content).replace("\\", "\\\\").replace("`", "\\`").replace("${", "\\${")
templates = self.generate_xml_bundle()
template_bundle = textwrap.dedent(f"""
/*******************************************
* Templates *
*******************************************/
odoo.define('{self.name}.bundle.xml', ['@web/core/registry'], function(require){{
'use strict';
const {{ registry }} = require('@web/core/registry');
registry.category(`xml_templates`).add(`{self.name}`, `{templates}`);
}});""")
odoo.define("{self.name}.bundle.xml", ["@web/core/templates"], function(require) {{
"use strict";
const {{ checkPrimaryTemplateParents, registerTemplate, registerTemplateExtension }} = require("@web/core/templates");
/* {self.name} */
{templates}
}});
""")
if is_minified:
content_bundle = ';\n'.join(asset.minify() for asset in self.javascripts)
@ -394,29 +387,64 @@ class AssetsBundle(object):
return js_attachment
def xml(self, show_inherit_info=False):
def generate_xml_bundle(self):
content = []
blocks = []
try:
blocks = self.xml()
except XMLAssetError as e:
content.append(f'throw new Error({json.dumps(str(e))});')
def get_template(element):
element.set("{http://www.w3.org/XML/1998/namespace}space", "preserve")
string = etree.tostring(element, encoding='unicode')
return string.replace("\\", "\\\\").replace("`", "\\`").replace("${", "\\${")
names = OrderedSet()
primary_parents = OrderedSet()
extension_parents = OrderedSet()
for block in blocks:
if block["type"] == "templates":
for (element, url, inherit_from) in block["templates"]:
if inherit_from:
primary_parents.add(inherit_from)
name = element.get("t-name")
names.add(name)
template = get_template(element)
content.append(f'registerTemplate("{name}", `{url}`, `{template}`);')
else:
for inherit_from, elements in block["extensions"].items():
extension_parents.add(inherit_from)
for (element, url) in elements:
template = get_template(element)
content.append(f'registerTemplateExtension("{inherit_from}", `{url}`, `{template}`);')
missing_names_for_primary = primary_parents - names
if missing_names_for_primary:
content.append(f'checkPrimaryTemplateParents({json.dumps(list(missing_names_for_primary))});')
missing_names_for_extension = extension_parents - names
if missing_names_for_extension:
content.append(f'console.error("Missing (extension) parent templates: {", ".join(missing_names_for_extension)}");')
return '\n'.join(content)
def xml(self):
"""
Create the ir.attachment representing the content of the bundle XML.
The xml contents are loaded and parsed with etree. Inheritances are
applied in the order of files and templates.
Create a list of blocks. A block can have one of the two types "templates" or "extensions".
A template with no parent or template with t-inherit-mode="primary" goes in a block of type "templates".
A template with t-inherit-mode="extension" goes in a block of type "extensions".
Used parsed attributes:
* `t-name`: template name
* `t-inherit`: inherited template name. The template use the
`apply_inheritance_specs` method from `ir.ui.view` to apply
inheritance (with xpath and position).
* 't-inherit-mode': 'primary' to create a new template with the
update, or 'extension' to apply the update on the inherited
template.
* `t-extend` deprecated attribute, used by the JavaScript Qweb.
* `t-inherit`: inherited template name.
* 't-inherit-mode': 'primary' or 'extension'.
:param show_inherit_info: if true add the file url and inherit
information in the template.
:return ir.attachment representing the content of the bundle XML
:return a list of blocks
"""
template_dict = OrderedDict()
parser = etree.XMLParser(ns_clean=True, recover=True, remove_comments=True)
blocks = []
block = None
for asset in self.templates:
# Load content.
try:
@ -425,106 +453,36 @@ class AssetsBundle(object):
io_content = io.BytesIO(template.encode('utf-8'))
content_templates_tree = etree.parse(io_content, parser=parser).getroot()
except etree.ParseError as e:
_logger.error("Could not parse file %s: %s", asset.url, e.msg)
raise
addon = asset.url.split('/')[1]
template_dict.setdefault(addon, OrderedDict())
return asset.generate_error(f'Could not parse file: {e.msg}')
# Process every templates.
for template_tree in list(content_templates_tree):
template_name = None
if 't-name' in template_tree.attrib:
template_name = template_tree.attrib['t-name']
dotted_names = template_name.split('.', 1)
if len(dotted_names) > 1 and dotted_names[0] == addon:
template_name = dotted_names[1]
if 't-inherit' in template_tree.attrib:
inherit_mode = template_tree.attrib.get('t-inherit-mode', 'primary')
template_name = template_tree.get("t-name")
inherit_from = template_tree.get("t-inherit")
inherit_mode = None
if inherit_from:
inherit_mode = template_tree.get('t-inherit-mode', 'primary')
if inherit_mode not in ['primary', 'extension']:
raise ValueError(_("Invalid inherit mode. Module %r and template name %r", addon, template_name))
# Get inherited template, the identifier can be "addon.name", just "name" or (silly) "just.name.with.dots"
parent_dotted_name = template_tree.attrib['t-inherit']
split_name_attempt = parent_dotted_name.split('.', 1)
parent_addon, parent_name = split_name_attempt if len(split_name_attempt) == 2 else (addon, parent_dotted_name)
if parent_addon not in template_dict:
if parent_dotted_name in template_dict[addon]:
parent_addon = addon
parent_name = parent_dotted_name
else:
raise ValueError(_("Module %r not loaded or inexistent (try to inherit %r), or templates of addon being loaded %r are misordered (template %r)", parent_addon, parent_name, addon, template_name))
if parent_name not in template_dict[parent_addon]:
raise ValueError(_("Cannot create %r because the template to inherit %r is not found.", '%s.%s' % (addon, template_name), '%s.%s' % (parent_addon, parent_name)))
# After several performance tests, we found out that deepcopy is the most efficient
# solution in this case (compared with copy, xpath with '.' and stringifying).
parent_tree, parent_urls = template_dict[parent_addon][parent_name]
parent_tree = copy.deepcopy(parent_tree)
if show_inherit_info:
# Add inheritance information as xml comment for debugging.
xpaths = []
for item in template_tree:
position = item.get('position')
attrib = dict(**item.attrib)
attrib.pop('position', None)
comment = etree.Comment(f""" Filepath: {asset.url} ; position="{position}" ; {attrib} """)
if position == "attributes":
if item.get('expr'):
comment_node = etree.Element('xpath', {'expr': item.get('expr'), 'position': 'before'})
else:
comment_node = etree.Element(item.tag, item.attrib)
comment_node.attrib['position'] = 'before'
comment_node.append(comment)
xpaths.append(comment_node)
else:
if len(item) > 0:
item[0].addprevious(comment)
else:
item.append(comment)
xpaths.append(item)
else:
xpaths = list(template_tree)
# Apply inheritance.
if inherit_mode == 'primary':
parent_tree.tag = template_tree.tag
inherited_template = apply_inheritance_specs(parent_tree, xpaths)
if inherit_mode == 'primary': # New template_tree: A' = B(A)
for attr_name, attr_val in template_tree.attrib.items():
if attr_name not in ('t-inherit', 't-inherit-mode'):
inherited_template.set(attr_name, attr_val)
if not template_name:
raise ValueError(_("Template name is missing in file %r.", asset.url))
template_dict[addon][template_name] = (inherited_template, parent_urls + [asset.url])
else: # Modifies original: A = B(A)
template_dict[parent_addon][parent_name] = (inherited_template, parent_urls + [asset.url])
addon = asset.url.split('/')[1]
return asset.generate_error(_(
'Invalid inherit mode. Module "%(module)s" and template name "%(template_name)s"',
module=addon,
template_name=template_name,
))
if inherit_mode == "extension":
if block is None or block["type"] != "extensions":
block = {"type": "extensions", "extensions": OrderedDict()}
blocks.append(block)
block["extensions"].setdefault(inherit_from, [])
block["extensions"][inherit_from].append((template_tree, asset.url))
elif template_name:
if template_name in template_dict[addon]:
raise ValueError(_("Template %r already exists in module %r", template_name, addon))
template_dict[addon][template_name] = (template_tree, [asset.url])
elif template_tree.attrib.get('t-extend'):
template_name = '%s__extend_%s' % (template_tree.attrib.get('t-extend'), len(template_dict[addon]))
template_dict[addon][template_name] = (template_tree, [asset.url])
if block is None or block["type"] != "templates":
block = {"type": "templates", "templates": []}
blocks.append(block)
block["templates"].append((template_tree, asset.url, inherit_from))
else:
raise ValueError(_("Template name is missing in file %r.", asset.url))
return asset.generate_error(_("Template name is missing."))
return blocks
# Concat and render inherited templates
root = etree.Element('root')
for addon in template_dict.values():
for template, urls in addon.values():
if show_inherit_info:
tail = "\n"
if len(root) > 0:
tail = root[-1].tail
root[-1].tail = "\n\n"
comment = etree.Comment(f""" Filepath: {' => '.join(urls)} """)
comment.tail = tail
root.append(comment)
root.append(template)
# Returns the string by removing the <root> tag.
return etree.tostring(root, encoding='unicode')[6:-7]
def css(self):
is_minified = not self.is_debug_assets
@ -652,7 +610,7 @@ css_error_message {
"""Sanitizes @import rules, remove duplicates @import rules, then compile"""
imports = []
def handle_compile_error(e, source):
error = self.get_preprocessor_error(e, source=source)
error = self.get_preprocessor_error(str(e), source=source)
_logger.warning(error)
self.css_errors.append(error)
return ''
@ -668,7 +626,6 @@ css_error_message {
return ''
source = re.sub(self.rx_preprocess_imports, sanitize, source)
compiled = ''
try:
compiled = compiler(source)
except CompileError as e:
@ -700,7 +657,7 @@ css_error_message {
cmd = [rtlcss, '-c', file_path("base/data/rtlcss.json"), '-']
try:
rtlcss = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
rtlcss = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, encoding='utf-8')
except Exception:
# Check the presence of rtlcss, if rtlcss not available then we should return normal less file
@ -717,23 +674,20 @@ css_error_message {
self.css_errors.append(msg)
return ''
stdout, stderr = rtlcss.communicate(input=source.encode('utf-8'))
if rtlcss.returncode or (source and not stdout):
cmd_output = ''.join(misc.ustr(stderr))
if not cmd_output and rtlcss.returncode:
cmd_output = "Process exited with return code %d\n" % rtlcss.returncode
elif not cmd_output:
cmd_output = "rtlcss: error processing payload\n"
error = self.get_rtlcss_error(cmd_output, source=source)
_logger.warning(error)
out, err = rtlcss.communicate(input=source)
if rtlcss.returncode or (source and not out):
if rtlcss.returncode:
error = self.get_rtlcss_error(err or f"Process exited with return code {rtlcss.returncode}", source=source)
else:
error = "rtlcss: error processing payload\n"
_logger.warning("%s", error)
self.css_errors.append(error)
return ''
rtlcss_result = stdout.strip().decode('utf8')
return rtlcss_result
return out.strip()
def get_preprocessor_error(self, stderr, source=None):
"""Improve and remove sensitive information from sass/less compilator error messages"""
error = misc.ustr(stderr).split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error = stderr.split('Load paths')[0].replace(' Use --trace for backtrace.', '')
if 'Cannot load compass' in error:
error += "Maybe you should install the compass gem using this extra argument:\n\n" \
" $ sudo gem install compass --pre\n"
@ -745,8 +699,8 @@ css_error_message {
def get_rtlcss_error(self, stderr, source=None):
"""Improve and remove sensitive information from sass/less compilator error messages"""
error = misc.ustr(stderr).split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error += "This error occurred while compiling the bundle '%s' containing:" % self.name
error = stderr.split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error = f"{error}This error occurred while compiling the bundle {self.name!r} containing:"
return error
@ -765,6 +719,11 @@ class WebAsset(object):
if not inline and not url:
raise Exception("An asset should either be inlined or url linked, defined in bundle '%s'" % bundle.name)
def generate_error(self, msg):
msg = f'{msg!r} in file {self.url!r}'
_logger.error(msg) # log it in the python console in all cases.
return msg
@func.lazy_property
def id(self):
if self._id is None: self._id = str(uuid.uuid4())
@ -840,6 +799,10 @@ class JavascriptAsset(WebAsset):
self._is_transpiled = None
self._converted_content = None
def generate_error(self, msg):
msg = super().generate_error(msg)
return f'console.error({json.dumps(msg)});'
@property
def bundle_version(self):
return self.bundle.get_version('js')
@ -847,7 +810,7 @@ class JavascriptAsset(WebAsset):
@property
def is_transpiled(self):
if self._is_transpiled is None:
self._is_transpiled = bool(is_odoo_module(super().content))
self._is_transpiled = bool(is_odoo_module(self.url, super().content))
return self._is_transpiled
@property
@ -866,7 +829,7 @@ class JavascriptAsset(WebAsset):
try:
return super()._fetch_content()
except AssetError as e:
return u"console.error(%s);" % json.dumps(to_text(e))
return self.generate_error(str(e))
def with_header(self, content=None, minimal=True):
@ -898,17 +861,21 @@ class XMLAsset(WebAsset):
try:
content = super()._fetch_content()
except AssetError as e:
return u"console.error(%s);" % json.dumps(to_text(e))
return self.generate_error(str(e))
parser = etree.XMLParser(ns_clean=True, remove_comments=True, resolve_entities=False)
try:
root = etree.fromstring(content.encode('utf-8'), parser=parser)
except etree.XMLSyntaxError as e:
return f'<t t-name="parsing_error{self.url.replace("/","_")}"><parsererror>Invalid XML template: {self.url} \n {e.msg} </parsererror></t>'
return self.generate_error(f'Invalid XML template: {e.msg}')
if root.tag in ('templates', 'template'):
return ''.join(etree.tostring(el, encoding='unicode') for el in root)
return etree.tostring(root, encoding='unicode')
def generate_error(self, msg):
msg = super().generate_error(msg)
raise XMLAssetError(msg)
@property
def bundle_version(self):
return self.bundle.get_version('js')
@ -1008,17 +975,17 @@ class PreprocessedCSS(StylesheetAsset):
command = self.get_command()
try:
compiler = Popen(command, stdin=PIPE, stdout=PIPE,
stderr=PIPE)
stderr=PIPE, encoding='utf-8')
except Exception:
raise CompileError("Could not execute command %r" % command[0])
(out, err) = compiler.communicate(input=source.encode('utf-8'))
out, err = compiler.communicate(input=source)
if compiler.returncode:
cmd_output = misc.ustr(out) + misc.ustr(err)
cmd_output = out + err
if not cmd_output:
cmd_output = u"Process exited with return code %d\n" % compiler.returncode
raise CompileError(cmd_output)
return out.decode('utf8')
return out
class SassStylesheetAsset(PreprocessedCSS):
rx_indent = re.compile(r'^( +|\t+)', re.M)

View file

@ -15,6 +15,7 @@ import logging
from operator import getitem
import requests
import json
import re
import contextlib
from pytz import timezone
@ -55,9 +56,12 @@ class IrActions(models.Model):
_order = 'name'
_allow_sudo_commands = False
_sql_constraints = [('path_unique', 'unique(path)', "Path to show in the URL must be unique! Please choose another one.")]
name = fields.Char(string='Action Name', required=True, translate=True)
type = fields.Char(string='Action Type', required=True)
xml_id = fields.Char(compute='_compute_xml_id', string="External ID")
path = fields.Char(string="Path to show in the URL")
help = fields.Html(string='Action Description',
help='Optional help text for the users with a description of the target view, such as its usage and purpose.',
translate=True)
@ -68,6 +72,30 @@ class IrActions(models.Model):
required=True, default='action')
binding_view_types = fields.Char(default='list,form')
@api.constrains('path')
def _check_path(self):
for action in self:
if action.path:
if not re.fullmatch(r'[a-z][a-z0-9_-]*', action.path):
raise ValidationError(_('The path should contain only lowercase alphanumeric characters, underscore, and dash, and it should start with a letter.'))
if action.path.startswith("m-"):
raise ValidationError(_("'m-' is a reserved prefix."))
if action.path.startswith("action-"):
raise ValidationError(_("'action-' is a reserved prefix."))
if action.path == "new":
raise ValidationError(_("'new' is reserved, and can not be used as path."))
# Tables ir_act_window, ir_act_report_xml, ir_act_url, ir_act_server and ir_act_client
# inherit from table ir_actions (see base_data.sql). The path must be unique across
# all these tables. The unique constraint is not enough because a big limitation of
# the inheritance feature is that unique indexes only apply to single tables, and
# not accross all the tables. So we need to check the uniqueness of the path manually.
# For more information, see: https://www.postgresql.org/docs/14/ddl-inherit.html#DDL-INHERIT-CAVEATS
# Note that, we leave the unique constraint in place to check the uniqueness of the path
# within the same table before checking the uniqueness across all the tables.
if (self.env['ir.actions.actions'].search_count([('path', '=', action.path)]) > 1):
raise ValidationError(_("Path to show in the URL must be unique! Please choose another one."))
def _compute_xml_id(self):
res = self.get_external_id()
for record in self:
@ -132,7 +160,7 @@ class IrActions(models.Model):
for action in all_actions:
action = dict(action)
groups = action.pop('groups_id', None)
if groups and not self.user_has_groups(groups):
if groups and not any(self.env.user.has_group(ext_id) for ext_id in groups):
# the user may not perform this action
continue
res_model = action.pop('res_model', None)
@ -167,13 +195,16 @@ class IrActions(models.Model):
try:
action = self.env[action_model].sudo().browse(action_id)
fields = ['name', 'binding_view_types']
for field in ('groups_id', 'res_model', 'sequence'):
for field in ('groups_id', 'res_model', 'sequence', 'domain'):
if field in action._fields:
fields.append(field)
action = action.read(fields)[0]
if action.get('groups_id'):
# transform the list of ids into a list of xml ids
groups = self.env['res.groups'].browse(action['groups_id'])
action['groups_id'] = ','.join(ext_id for ext_id in groups._ensure_xml_id().values())
action['groups_id'] = list(groups._ensure_xml_id().values())
if 'domain' in action and not action.get('domain'):
action.pop('domain')
result[binding_type].append(frozendict(action))
except (MissingError):
continue
@ -217,6 +248,7 @@ class IrActions(models.Model):
return {
"binding_model_id", "binding_type", "binding_view_types",
"display_name", "help", "id", "name", "type", "xml_id",
"path",
}
@ -232,9 +264,9 @@ class IrActionsActWindow(models.Model):
def _check_model(self):
for action in self:
if action.res_model not in self.env:
raise ValidationError(_('Invalid model name %r in action definition.', action.res_model))
raise ValidationError(_('Invalid model name %s in action definition.', action.res_model))
if action.binding_model_id and action.binding_model_id.model not in self.env:
raise ValidationError(_('Invalid model name %r in action definition.', action.binding_model_id.model))
raise ValidationError(_('Invalid model name %s in action definition.', action.binding_model_id.model))
@api.depends('view_ids.view_mode', 'view_mode', 'view_id.type')
def _compute_views(self):
@ -265,7 +297,7 @@ class IrActionsActWindow(models.Model):
if len(modes) != len(set(modes)):
raise ValidationError(_('The modes in view_mode must not be duplicated: %s', modes))
if ' ' in modes:
raise ValidationError(_('No spaces allowed in view_mode: %r', modes))
raise ValidationError(_('No spaces allowed in view_mode: %s', modes))
type = fields.Char(default="ir.actions.act_window")
view_id = fields.Many2one('ir.ui.view', string='View Ref.', ondelete='set null')
@ -277,8 +309,8 @@ class IrActionsActWindow(models.Model):
res_model = fields.Char(string='Destination Model', required=True,
help="Model name of the object to open in the view window")
target = fields.Selection([('current', 'Current Window'), ('new', 'New Window'), ('inline', 'Inline Edit'), ('fullscreen', 'Full Screen'), ('main', 'Main action of Current Window')], default="current", string='Target Window')
view_mode = fields.Char(required=True, default='tree,form',
help="Comma-separated list of allowed view modes, such as 'form', 'tree', 'calendar', etc. (Default: tree,form)")
view_mode = fields.Char(required=True, default='list,form',
help="Comma-separated list of allowed view modes, such as 'form', 'list', 'calendar', etc. (Default: list,form)")
mobile_view_mode = fields.Char(default="kanban", help="First view mode in mobile and small screen environments (default='kanban'). If it can't be found among available view modes, the same mode as for wider screens is used)")
usage = fields.Char(string='Action Usage',
help="Used to filter menu and home actions from the user form.")
@ -291,8 +323,14 @@ class IrActionsActWindow(models.Model):
groups_id = fields.Many2many('res.groups', 'ir_act_window_group_rel',
'act_id', 'gid', string='Groups')
search_view_id = fields.Many2one('ir.ui.view', string='Search View Ref.')
embedded_action_ids = fields.One2many('ir.embedded.actions', compute="_compute_embedded_actions")
filter = fields.Boolean()
def _compute_embedded_actions(self):
embedded_actions = self.env["ir.embedded.actions"].search([('parent_action_id', 'in', self.ids)]).filtered(lambda x: x.is_visible)
for action in self:
action.embedded_action_ids = embedded_actions.filtered(lambda rec: rec.parent_action_id == action)
def read(self, fields=None, load='_classic_read'):
""" call the method get_empty_list_help of the model and set the window action help message
"""
@ -336,20 +374,33 @@ class IrActionsActWindow(models.Model):
def _get_readable_fields(self):
return super()._get_readable_fields() | {
"context", "mobile_view_mode", "domain", "filter", "groups_id", "limit",
"res_id", "res_model", "search_view_id", "target", "view_id", "view_mode", "views",
"res_id", "res_model", "search_view_id", "target", "view_id", "view_mode", "views", "embedded_action_ids",
# `flags` is not a real field of ir.actions.act_window but is used
# to give the parameters to generate the action
"flags"
"flags",
# this is used by frontend, with the document layout wizard before send and print
"close_on_report_download",
}
def _get_action_dict(self):
""" Override to return action content with detailed embedded actions data if available.
:return: A dict with updated action dictionary including embedded actions information.
"""
result = super()._get_action_dict()
if embedded_action_ids := result["embedded_action_ids"]:
EmbeddedActions = self.env["ir.embedded.actions"]
embedded_fields = EmbeddedActions._get_readable_fields()
result["embedded_action_ids"] = EmbeddedActions.browse(embedded_action_ids).read(embedded_fields)
return result
VIEW_TYPES = [
('tree', 'Tree'),
('list', 'List'),
('form', 'Form'),
('graph', 'Graph'),
('pivot', 'Pivot'),
('calendar', 'Calendar'),
('gantt', 'Gantt'),
('kanban', 'Kanban'),
]
@ -463,6 +514,7 @@ class IrActionsServer(models.Model):
# - records: recordset of all records on which the action is triggered in multi-mode; may be void
# - time, datetime, dateutil, timezone: useful Python libraries
# - float_compare: utility function to compare floats based on specific precision
# - b64encode, b64decode: functions to encode/decode binary data
# - log: log(message, level='info'): logging function to record debug information in ir.logging table
# - _logger: _logger.info(message): logger to emit messages in server logs
# - UserError: exception class for raising user-facing warning messages
@ -697,15 +749,15 @@ class IrActionsServer(models.Model):
action.webhook_sample_payload = False
continue
payload = {
'id': 1,
'_id': 1,
'_model': self.model_id.model,
'_name': action.name,
'_action': f'{action.name}(#{action.id})',
}
if self.model_id:
sample_record = self.env[self.model_id.model].with_context(active_test=False).search([], limit=1)
for field in action.webhook_field_ids:
if sample_record:
payload['id'] = sample_record.id
payload['_id'] = sample_record.id
payload.update(sample_record.read(self.webhook_field_ids.mapped('name'), load=None)[0])
else:
payload[field.name] = WEBHOOK_SAMPLE_VALUES[field.ttype] if field.ttype in WEBHOOK_SAMPLE_VALUES else WEBHOOK_SAMPLE_VALUES[None]
@ -725,8 +777,8 @@ class IrActionsServer(models.Model):
raise ValidationError(msg)
@api.constrains('child_ids')
def _check_recursion(self):
if not self._check_m2m_recursion('child_ids'):
def _check_child_recursion(self):
if self._has_cycle('child_ids'):
raise ValidationError(_('Recursion found in child server actions'))
def _get_readable_fields(self):
@ -767,7 +819,7 @@ class IrActionsServer(models.Model):
def unlink_action(self):
""" Remove the contextual actions created for the server actions. """
self.check_access_rights('write', raise_exception=True)
self.check_access('write')
self.filtered('binding_model_id').write({'binding_model_id': False})
return True
@ -790,7 +842,7 @@ class IrActionsServer(models.Model):
record_cached = self._context['onchange_self']
for field, new_value in res.items():
record_cached[field] = new_value
else:
elif self.update_path:
starting_record = self.env[self.model_id.model].browse(self._context.get('active_id'))
_, _, target_records = self._traverse_path(record=starting_record)
target_records.write(res)
@ -915,7 +967,7 @@ class IrActionsServer(models.Model):
else:
model_name = action.model_id.model
try:
self.env[model_name].check_access_rights("write")
self.env[model_name].check_access("write")
except AccessError:
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
action.name, self.env.user.login, model_name,
@ -925,11 +977,11 @@ class IrActionsServer(models.Model):
eval_context = self._get_eval_context(action)
records = eval_context.get('record') or eval_context['model']
records |= eval_context.get('records') or eval_context['model']
if records.ids:
if not action_groups and records.ids:
# check access rules on real records only; base automations of
# type 'onchange' can run server actions on new records
try:
records.check_access_rule('write')
records.check_access('write')
except AccessError:
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
action.name, self.env.user.login, records,
@ -1016,6 +1068,8 @@ class IrActionsServer(models.Model):
elif action.update_field_id.ttype in ['many2one', 'integer']:
try:
expr = int(action.value)
if expr == 0 and action.update_field_id.ttype == 'many2one':
expr = False
except Exception:
pass
elif action.update_field_id.ttype == 'float':
@ -1026,9 +1080,11 @@ class IrActionsServer(models.Model):
def copy_data(self, default=None):
default = default or {}
vals_list = super().copy_data(default=default)
if not default.get('name'):
default['name'] = _('%s (copy)', self.name)
return super().copy_data(default=default)
for vals in vals_list:
vals['name'] = _('%s (copy)', vals.get('name', ''))
return vals_list
class IrActionsTodo(models.Model):
"""

View file

@ -1,15 +1,18 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from contextlib import ExitStack
from markupsafe import Markup
from urllib.parse import urlparse, parse_qs, urlencode
from urllib.parse import urlparse
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.service import security
from odoo.tools.safe_eval import safe_eval, time
from odoo.tools.misc import find_in_path, ustr
from odoo.tools.misc import find_in_path
from odoo.tools import check_barcode_encoding, config, is_html_empty, parse_version, split_every
from odoo.http import request, root
from odoo.tools.pdf import PdfFileWriter, PdfFileReader, PdfReadError
from odoo.http import request
from odoo.osv.expression import NEGATIVE_TERM_OPERATORS, FALSE_DOMAIN
import io
@ -19,6 +22,7 @@ import lxml.html
import tempfile
import subprocess
import re
import requests
import json
from lxml import etree
@ -58,6 +62,11 @@ except Exception:
def _get_wkhtmltopdf_bin():
return find_in_path('wkhtmltopdf')
def _get_wkhtmltoimage_bin():
return find_in_path('wkhtmltoimage')
def _split_table(tree, max_rows):
"""
Walks through the etree and splits tables with more than max_rows rows into
@ -108,6 +117,23 @@ else:
_logger.info('Wkhtmltopdf seems to be broken.')
wkhtmltopdf_state = 'broken'
wkhtmltoimage_version = None
try:
process = subprocess.Popen(
[_get_wkhtmltoimage_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
except OSError:
_logger.info('You need Wkhtmltoimage to generate images from html.')
else:
_logger.info('Will use the Wkhtmltoimage binary at %s', _get_wkhtmltoimage_bin())
out, err = process.communicate()
match = re.search(b'([0-9.]+)', out)
if match:
wkhtmltoimage_version = parse_version(match.group(0).decode('ascii'))
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to convert images to html.')
else:
_logger.info('Wkhtmltoimage seems to be broken.')
class IrActionsReport(models.Model):
_name = 'ir.actions.report'
@ -144,6 +170,7 @@ class IrActionsReport(models.Model):
help='If enabled, then the second time the user prints with same attachment name, it returns the previous report.')
attachment = fields.Char(string='Save as Attachment Prefix',
help='This is the filename of the attachment used to store the printing result. Keep empty to not save the printed reports. You can use a python expression with the object and time variables.')
domain = fields.Char(string='Filter domain', help='If set, the action will only appear on records that matches the domain.')
@api.depends('model')
def _compute_model_id(self):
@ -183,6 +210,7 @@ class IrActionsReport(models.Model):
"context", "data",
# and this one is used by the frontend later on.
"close_on_report_download",
"domain",
}
def associated_view(self):
@ -206,7 +234,7 @@ class IrActionsReport(models.Model):
def unlink_action(self):
""" Remove the contextual actions created for the reports. """
self.check_access_rights('write', raise_exception=True)
self.check_access('write')
self.filtered('binding_model_id').write({'binding_model_id': False})
return True
@ -245,6 +273,9 @@ class IrActionsReport(models.Model):
def get_paperformat(self):
return self.paperformat_id or self.env.company.paperformat_id
def get_paperformat_by_xmlid(self, xml_id):
return self.env.ref(xml_id).get_paperformat() if xml_id else self.env.company.paperformat_id
def _get_layout(self):
return self.env.ref('web.minimal_layout', raise_if_not_found=False)
@ -354,13 +385,6 @@ class IrActionsReport(models.Model):
if not layout:
return {}
base_url = self._get_report_url(layout=layout)
url = urlparse(base_url)
query = parse_qs(url.query or "")
debug = self.env.context.get("debug")
if not isinstance(debug, str):
debug = "1" if debug else "0"
query["debug"] = debug
base_url = url._replace(query=urlencode(query)).geturl()
root = lxml.html.fromstring(html, parser=lxml.html.HTMLParser(encoding='utf-8'))
match_klass = "//div[contains(concat(' ', normalize-space(@class), ' '), ' {} ')]"
@ -417,17 +441,63 @@ class IrActionsReport(models.Model):
'subst': True,
'body': Markup(lxml.html.tostring(header_node, encoding='unicode')),
'base_url': base_url,
'report_xml_id': self.xml_id,
'debug': self.env.context.get("debug"),
})
footer = self.env['ir.qweb']._render(layout.id, {
'subst': True,
'body': Markup(lxml.html.tostring(footer_node, encoding='unicode')),
'base_url': base_url,
'report_xml_id': self.xml_id,
'debug': self.env.context.get("debug"),
})
return bodies, res_ids, header, footer, specific_paperformat_args
def _run_wkhtmltoimage(self, bodies, width, height, image_format="jpg"):
"""
:bodies str: valid html documents as strings
:param width int: width in pixels
:param height int: height in pixels
:param image_format union['jpg', 'png']: format of the image
:return list[bytes|None]:
"""
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_image_rendering'):
return [None] * len(bodies)
if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'):
raise UserError(_('wkhtmltoimage 0.12.0^ is required in order to render images from html'))
command_args = [
'--disable-local-file-access', '--disable-javascript',
'--quiet',
'--width', str(width), '--height', str(height),
'--format', image_format,
]
with ExitStack() as stack:
files = []
for body in bodies:
input_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix='.html', prefix='report_image_html_input.tmp.'))
output_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix=f'.{image_format}', prefix='report_image_output.tmp.'))
input_file.write(body.encode())
files.append((input_file, output_file))
output_images = []
for input_file, output_file in files:
# smaller bodies may be held in a python buffer until close, force flush
input_file.flush()
wkhtmltoimage = [_get_wkhtmltoimage_bin()] + command_args + [input_file.name, output_file.name]
# start and block, no need for parallelism for now
completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False)
if completed_process.returncode:
message = _(
'Wkhtmltoimage failed (error code: %(error_code)s). Message: %(error_message_end)s',
error_code=completed_process.returncode,
error_message_end=completed_process.stderr[-1000:],
)
_logger.warning(message)
output_images.append(None)
else:
output_images.append(output_file.read())
return output_images
@api.model
def _run_wkhtmltopdf(
self,
@ -462,12 +532,24 @@ class IrActionsReport(models.Model):
files_command_args = []
temporary_files = []
temp_session = None
# Passing the cookie to wkhtmltopdf in order to resolve internal links.
if request and request.db:
# Create a temporary session which will not create device logs
temp_session = root.session_store.new()
temp_session.update({
**request.session,
'debug': '',
'_trace_disable': True,
})
if temp_session.uid:
temp_session.session_token = security.compute_session_token(temp_session, self.env)
root.session_store.save(temp_session)
base_url = self._get_report_url()
domain = urlparse(base_url).hostname
cookie = f'session_id={request.session.sid}; HttpOnly; domain={domain}; path=/;'
cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;'
cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.')
temporary_files.append(cookie_jar_file_path)
with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file:
@ -514,22 +596,21 @@ class IrActionsReport(models.Model):
try:
wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args + files_command_args + paths + [pdf_report_path]
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
err = ustr(err)
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8")
_out, err = process.communicate()
if process.returncode not in [0, 1]:
if process.returncode == -11:
message = _(
'Wkhtmltopdf failed (error code: %s). Memory limit too low or maximum file number of subprocess reached. Message : %s',
process.returncode,
err[-1000:],
'Wkhtmltopdf failed (error code: %(error_code)s). Memory limit too low or maximum file number of subprocess reached. Message : %(message)s',
error_code=process.returncode,
message=err[-1000:],
)
else:
message = _(
'Wkhtmltopdf failed (error code: %s). Message: %s',
process.returncode,
err[-1000:],
'Wkhtmltopdf failed (error code: %(error_code)s). Message: %(message)s',
error_code=process.returncode,
message=err[-1000:],
)
_logger.warning(message)
raise UserError(message)
@ -538,6 +619,9 @@ class IrActionsReport(models.Model):
_logger.warning('wkhtmltopdf: %s' % err)
except:
raise
finally:
if temp_session:
root.session_store.delete(temp_session)
with open(pdf_report_path, 'rb') as pdf_document:
pdf_content = pdf_document.read()
@ -688,20 +772,18 @@ class IrActionsReport(models.Model):
)
return view_obj._render_template(template, values).encode()
def _handle_merge_pdfs_error(self, error=None, error_stream=None):
raise UserError(_("Odoo is unable to merge the generated PDFs."))
@api.model
def _merge_pdfs(self, streams):
def _merge_pdfs(self, streams, handle_error=_handle_merge_pdfs_error):
writer = PdfFileWriter()
for stream in streams:
try:
reader = PdfFileReader(stream)
writer.appendPagesFromReader(reader)
except (PdfReadError, TypeError, NotImplementedError, ValueError):
# TODO : make custom_error_handler a parameter in master
custom_error_handler = self.env.context.get('custom_error_handler')
if custom_error_handler:
custom_error_handler(stream)
continue
raise UserError(_("Odoo is unable to merge the generated PDFs."))
except (PdfReadError, TypeError, NotImplementedError, ValueError) as e:
handle_error(error=e, error_stream=stream)
result_stream = io.BytesIO()
streams.append(result_stream)
writer.write(result_stream)
@ -774,13 +856,13 @@ class IrActionsReport(models.Model):
html = self.with_context(**additional_context)._render_qweb_html(report_ref, all_res_ids_wo_stream, data=data)[0]
bodies, html_ids, header, footer, specific_paperformat_args = self.with_context(**additional_context)._prepare_html(html, report_model=report_sudo.model)
bodies, html_ids, header, footer, specific_paperformat_args = report_sudo.with_context(**additional_context)._prepare_html(html, report_model=report_sudo.model)
if not has_duplicated_ids and report_sudo.attachment and set(res_ids_wo_stream) != set(html_ids):
raise UserError(_(
"The report's template %r is wrong, please contact your administrator. \n\n"
"Can not separate file to save as attachment because the report's template does not contains the"
" attributes 'data-oe-model' and 'data-oe-id' on the div with 'article' classname.",
"Report template “%s” has an issue, please contact your administrator. \n\n"
"Cannot separate file to save as attachment because the report's template does not contain the"
" attributes 'data-oe-model' and 'data-oe-id' as part of the div with 'article' classname.",
report_sudo.name,
))
@ -865,9 +947,11 @@ class IrActionsReport(models.Model):
stream = io.BytesIO()
attachment_writer.write(stream)
collected_streams[res_ids_wo_stream[i]]['stream'] = stream
return collected_streams
else:
for res_id in res_ids_wo_stream:
individual_collected_stream = self._render_qweb_pdf_prepare_streams(report_ref=report_ref, data=data, res_ids=[res_id])
collected_streams[res_id]['stream'] = individual_collected_stream[res_id]['stream']
collected_streams[False] = {'stream': pdf_content_stream, 'attachment': None}
return collected_streams
@ -910,7 +994,7 @@ class IrActionsReport(models.Model):
})
return attachment_vals_list
def _render_qweb_pdf(self, report_ref, res_ids=None, data=None):
def _pre_render_qweb_pdf(self, report_ref, res_ids=None, data=None):
if not data:
data = {}
if isinstance(res_ids, int):
@ -922,7 +1006,19 @@ class IrActionsReport(models.Model):
return self._render_qweb_html(report_ref, res_ids, data=data)
self = self.with_context(webp_as_jpg=True)
collected_streams = self._render_qweb_pdf_prepare_streams(report_ref, data, res_ids=res_ids)
return self._render_qweb_pdf_prepare_streams(report_ref, data, res_ids=res_ids), 'pdf'
def _render_qweb_pdf(self, report_ref, res_ids=None, data=None):
if not data:
data = {}
if isinstance(res_ids, int):
res_ids = [res_ids]
data.setdefault('report_type', 'pdf')
collected_streams, report_type = self._pre_render_qweb_pdf(report_ref, res_ids=res_ids, data=data)
if report_type != 'pdf':
return collected_streams, report_type
has_duplicated_ids = res_ids and len(res_ids) != len(set(res_ids))
# access the report details with sudo() but keep evaluation context as current user
@ -940,6 +1036,9 @@ class IrActionsReport(models.Model):
else:
_logger.info("The PDF documents %r are now saved in the database", attachment_names)
def custom_handle_merge_pdfs_error(error, error_stream):
error_record_ids.append(stream_to_ids[error_stream])
stream_to_ids = {v['stream']: k for k, v in collected_streams.items() if v['stream']}
# Merge all streams together for a single record.
streams_to_merge = list(stream_to_ids.keys())
@ -948,9 +1047,7 @@ class IrActionsReport(models.Model):
if len(streams_to_merge) == 1:
pdf_content = streams_to_merge[0].getvalue()
else:
with self.with_context(
custom_error_handler=lambda error_stream: error_record_ids.append(stream_to_ids[error_stream])
)._merge_pdfs(streams_to_merge) as pdf_merged_stream:
with self._merge_pdfs(streams_to_merge, custom_handle_merge_pdfs_error) as pdf_merged_stream:
pdf_content = pdf_merged_stream.getvalue()
if error_record_ids:
@ -959,7 +1056,7 @@ class IrActionsReport(models.Model):
'name': _('Problematic record(s)'),
'res_model': report_sudo.model,
'domain': [('id', 'in', error_record_ids)],
'views': [(False, 'tree'), (False, 'form')],
'views': [(False, 'list'), (False, 'form')],
}
num_errors = len(error_record_ids)
if num_errors == 1:
@ -1065,11 +1162,52 @@ class IrActionsReport(models.Model):
return report_action
def _action_configure_external_report_layout(self, report_action):
action = self.env["ir.actions.actions"]._for_xml_id("web.action_base_document_layout_configurator")
def _action_configure_external_report_layout(self, report_action, xml_id="web.action_base_document_layout_configurator"):
action = self.env["ir.actions.actions"]._for_xml_id(xml_id)
py_ctx = json.loads(action.get('context', {}))
report_action['close_on_report_download'] = True
py_ctx['report_action'] = report_action
py_ctx['dialog_size'] = 'large'
action['context'] = py_ctx
return action
def get_valid_action_reports(self, model, record_ids):
""" Return the list of ids of actions for which the domain is
satisfied by at least one record in record_ids.
:param model: the model of the records to validate
:param record_ids: list of ids of records to validate
"""
records = self.env[model].browse(record_ids)
actions_with_domain = self.filtered('domain')
valid_action_report_ids = (self - actions_with_domain).ids # actions without domain are always valid
for action in actions_with_domain:
if records.filtered_domain(literal_eval(action.domain)):
valid_action_report_ids.append(action.id)
return valid_action_report_ids
@api.model
def _prepare_local_attachments(self, attachments):
attachments_with_data = self.env['ir.attachment']
for attachment in attachments:
if not attachment._is_remote_source():
attachments_with_data |= attachment
elif (stream := attachment._to_http_stream()) and stream.url:
# call `_to_http_stream()` in case the attachment is an url or cloud storage attachment
try:
response = requests.get(stream.url, timeout=10)
response.raise_for_status()
attachment_data = response.content
if not attachment_data:
_logger.warning("Attachment %s at with URL %s retrieved successfully, but no content was found.", attachment.id, attachment.url)
continue
attachments_with_data |= self.env['ir.attachment'].new({
'db_datas': attachment_data,
'name': attachment.name,
'mimetype': attachment.mimetype,
'res_model': attachment.res_model,
'res_id': attachment.res_id
})
except requests.exceptions.RequestException as e:
_logger.error("Request for attachment %s with URL %s failed: %s", attachment.id, attachment.url, e)
else:
_logger.error("Unexpected edge case: Is not being considered as a local or remote attachment, attachment ID:%s will be skipped.", attachment.id)
return attachments_with_data

View file

@ -4,22 +4,21 @@ import base64
import binascii
import contextlib
import hashlib
import io
import itertools
import logging
import mimetypes
import os
import psycopg2
import re
import uuid
import werkzeug
from collections import defaultdict
from PIL import Image
from odoo import api, fields, models, SUPERUSER_ID, tools, _
from odoo.exceptions import AccessError, ValidationError, UserError
from odoo.tools import config, human_size, ImageProcess, str2bool, consteq
from odoo.tools.mimetypes import guess_mimetype
from odoo.http import Stream, root, request
from odoo.tools import config, human_size, image, str2bool, consteq
from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes
from odoo.osv import expression
_logger = logging.getLogger(__name__)
@ -102,12 +101,6 @@ class IrAttachment(models.Model):
@api.model
def _get_path(self, bin_data, sha):
# retro compatibility
fname = sha[:3] + '/' + sha
full_path = self._full_path(fname)
if os.path.isfile(full_path):
return fname, full_path # keep existing path
# scatter files across 256 dirs
# we use '/' in the db (even on windows)
fname = sha[:2] + '/' + sha
@ -115,6 +108,7 @@ class IrAttachment(models.Model):
dirname = os.path.dirname(full_path)
if not os.path.isdir(dirname):
os.makedirs(dirname, exist_ok=True)
# prevent sha-1 collision
if os.path.isfile(full_path) and not self._same_content(bin_data, full_path):
raise UserError(_("The attachment collides with an existing file."))
@ -340,11 +334,10 @@ class IrAttachment(models.Model):
max_resolution = ICP('base.image_autoresize_max_px', '1920x1920')
if str2bool(max_resolution, True):
try:
img = False
if is_raw:
img = ImageProcess(values['raw'], verify_resolution=False)
img = image.ImageProcess(values['raw'], verify_resolution=False)
else: # datas
img = ImageProcess(base64.b64decode(values['datas']), verify_resolution=False)
img = image.ImageProcess(base64.b64decode(values['datas']), verify_resolution=False)
if not img.image:
_logger.info('Post processing ignored : Empty source, SVG, or WEBP')
@ -363,7 +356,8 @@ class IrAttachment(models.Model):
except UserError as e:
# Catch error during test where we provide fake image
# raise UserError(_("This file could not be decoded as an image file. Please try with a different file."))
_logger.info('Post processing ignored : %s', e)
msg = str(e) # the exception can be lazy-translated, resolve it here
_logger.info('Post processing ignored : %s', msg)
return values
def _check_contents(self, values):
@ -372,8 +366,9 @@ class IrAttachment(models.Model):
'xml' in mimetype and # other xml (svg, text/xml, etc)
not mimetype.startswith('application/vnd.openxmlformats')) # exception for Office formats
force_text = xml_like and (
self.env.context.get('attachments_mime_plainxml') or
not self.env['ir.ui.view'].sudo(False).check_access_rights('write', False))
self.env.context.get('attachments_mime_plainxml')
or not self.env['ir.ui.view'].sudo(False).has_access('write')
)
if force_text:
values['mimetype'] = 'text/plain'
if not self.env.context.get('image_no_postprocess'):
@ -407,10 +402,9 @@ class IrAttachment(models.Model):
name = fields.Char('Name', required=True)
description = fields.Text('Description')
res_name = fields.Char('Resource Name', compute='_compute_res_name')
res_model = fields.Char('Resource Model', readonly=True)
res_field = fields.Char('Resource Field', readonly=True)
res_id = fields.Many2oneReference('Resource ID', model_field='res_model',
readonly=True)
res_model = fields.Char('Resource Model')
res_field = fields.Char('Resource Field')
res_id = fields.Many2oneReference('Resource ID', model_field='res_model')
company_id = fields.Many2one('res.company', string='Company', change_default=True,
default=lambda self: self.env.company)
type = fields.Selection([('url', 'URL'), ('binary', 'File')],
@ -426,7 +420,7 @@ class IrAttachment(models.Model):
raw = fields.Binary(string="File Content (raw)", compute='_compute_raw', inverse='_inverse_raw')
datas = fields.Binary(string='File Content (base64)', compute='_compute_datas', inverse='_inverse_datas')
db_datas = fields.Binary('Database Data', attachment=False)
store_fname = fields.Char('Stored Filename', index=True, unaccent=False)
store_fname = fields.Char('Stored Filename', index=True)
file_size = fields.Integer('File Size', readonly=True)
checksum = fields.Char("Checksum/SHA1", size=40, readonly=True)
mimetype = fields.Char('Mime Type', readonly=True)
@ -474,9 +468,8 @@ class IrAttachment(models.Model):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if res_field:
field = self.env[res_model]._fields[res_field]
if field.groups:
if not self.env.user.user_has_groups(field.groups):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if not field.is_accessible(self.env):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if not (res_model and res_id):
continue
model_ids[res_model].add(res_id)
@ -499,8 +492,7 @@ class IrAttachment(models.Model):
# For related models, check if we can write to the model, as unlinking
# and creating attachments can be seen as an update to the model
access_mode = 'write' if mode in ('create', 'unlink') else mode
records.check_access_rights(access_mode)
records.check_access_rule(access_mode)
records.check_access(access_mode)
@api.model
def _filter_attachment_access(self, attachment_ids):
@ -511,7 +503,7 @@ class IrAttachment(models.Model):
"""
ret_attachments = self.env['ir.attachment']
attachments = self.browse(attachment_ids)
if not attachments.check_access_rights('read', raise_exception=False):
if not attachments.has_access('read'):
return ret_attachments
for attachment in attachments.sudo():
@ -525,7 +517,7 @@ class IrAttachment(models.Model):
return ret_attachments
@api.model
def _search(self, domain, offset=0, limit=None, order=None, access_rights_uid=None):
def _search(self, domain, offset=0, limit=None, order=None):
# add res_field=False in domain if not present; the arg[0] trick below
# works for domain items and '&'/'|'/'!' operators too
disable_binary_fields_attachments = False
@ -535,24 +527,17 @@ class IrAttachment(models.Model):
if self.env.is_superuser():
# rules do not apply for the superuser
return super()._search(domain, offset, limit, order, access_rights_uid)
return super()._search(domain, offset, limit, order)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document. For the sake of performance, fetch the fields to
# determine those permissions within the same SQL query.
self.flush_model(['res_model', 'res_id', 'res_field', 'public', 'create_uid'])
query = super()._search(domain, offset, limit, order, access_rights_uid)
query_str, params = query.select(
f'"{self._table}"."id"',
f'"{self._table}"."res_model"',
f'"{self._table}"."res_id"',
f'"{self._table}"."res_field"',
f'"{self._table}"."public"',
f'"{self._table}"."create_uid"',
)
self.env.cr.execute(query_str, params)
rows = self.env.cr.fetchall()
fnames_to_read = ['id', 'res_model', 'res_id', 'res_field', 'public', 'create_uid']
query = super()._search(domain, offset, limit, order)
rows = self.env.execute_query(query.select(
*[self._field_to_sql(self._table, fname) for fname in fnames_to_read],
))
# determine permissions based on linked records
all_ids = []
@ -566,7 +551,7 @@ class IrAttachment(models.Model):
if res_field and not self.env.is_system():
field = self.env[res_model]._fields[res_field]
if field.groups and not self.env.user.user_has_groups(field.groups):
if field.groups and not self.env.user.has_groups(field.groups):
continue
if not res_id and (self.env.is_system() or create_uid == self.env.uid):
@ -580,7 +565,7 @@ class IrAttachment(models.Model):
if res_model not in self.env:
allowed_ids.update(id_ for ids in targets.values() for id_ in ids)
continue
if not self.env[res_model].check_access_rights('read', False):
if not self.env[res_model].has_access('read'):
continue
# filter ids according to what access rules permit
ResModel = self.env[res_model].with_context(active_test=False)
@ -599,7 +584,7 @@ class IrAttachment(models.Model):
if len(all_ids) == limit and len(result) < self._context.get('need', limit):
need = self._context.get('need', limit) - len(result)
more_ids = self.with_context(need=need)._search(
domain, offset + len(all_ids), limit, order, access_rights_uid,
domain, offset + len(all_ids), limit, order,
)
result.extend(list(more_ids)[:limit - len(result)])
@ -614,11 +599,14 @@ class IrAttachment(models.Model):
vals = self._check_contents(vals)
return super(IrAttachment, self).write(vals)
def copy(self, default=None):
if not (default or {}).keys() & {'datas', 'db_datas', 'raw'}:
# ensure the content is kept and recomputes checksum/store_fname
default = dict(default or {}, raw=self.raw)
return super(IrAttachment, self).copy(default)
def copy_data(self, default=None):
default = dict(default or {})
vals_list = super().copy_data(default=default)
for attachment, vals in zip(self, vals_list):
if not default.keys() & {'datas', 'db_datas', 'raw'}:
# ensure the content is kept and recomputes checksum/store_fname
vals['raw'] = attachment.raw
return vals_list
def unlink(self):
if not self:
@ -729,7 +717,7 @@ class IrAttachment(models.Model):
if record_sudo.with_context(prefetch_fields=False).public:
return record_sudo
if self.env.user.has_group('base.group_portal'):
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check('read')
@ -756,3 +744,95 @@ class IrAttachment(models.Model):
('create_uid', '=', SUPERUSER_ID),
]).unlink()
self.env.registry.clear_cache('assets')
def _from_request_file(self, file, *, mimetype, **vals):
"""
Create an attachment out of a request file
:param file: the request file
:param str mimetype:
* "TRUST" to use the mimetype and file extension from the
request file with no verification.
* "GUESS" to determine the mimetype and file extension on
the file's content. The determined extension is added at
the end of the filename unless the filename already had a
valid extension.
* a mimetype in format "{type}/{subtype}" to force the
mimetype to the given value, it adds the corresponding
file extension at the end of the filename unless the
filename already had a valid extension.
"""
if mimetype == 'TRUST':
mimetype = file.content_type
filename = file.filename
elif mimetype == 'GUESS':
head = file.read(1024)
file.seek(-len(head), 1) # rewind
mimetype = guess_mimetype(head)
filename = fix_filename_extension(file.filename, mimetype)
if mimetype in ('application/zip', *_olecf_mimetypes):
mimetype = mimetypes.guess_type(filename)[0]
elif all(mimetype.partition('/')):
filename = fix_filename_extension(file.filename, mimetype)
else:
raise ValueError(f'{mimetype=}')
return self.create({
'name': filename,
'type': 'binary',
'raw': file.read(), # load the entire file in memory :(
'mimetype': mimetype,
**vals,
})
def _to_http_stream(self):
""" Create a :class:`~Stream`: from an ir.attachment record. """
self.ensure_one()
stream = Stream(
mimetype=self.mimetype,
download_name=self.name,
etag=self.checksum,
public=self.public,
)
if self.store_fname:
stream.type = 'path'
stream.path = werkzeug.security.safe_join(
os.path.abspath(config.filestore(request.db)),
self.store_fname
)
stat = os.stat(stream.path)
stream.last_modified = stat.st_mtime
stream.size = stat.st_size
elif self.db_datas:
stream.type = 'data'
stream.data = self.raw
stream.last_modified = self.write_date
stream.size = len(stream.data)
elif self.url:
# When the URL targets a file located in an addon, assume it
# is a path to the resource. It saves an indirection and
# stream the file right away.
static_path = root.get_static_file(
self.url,
host=request.httprequest.environ.get('HTTP_HOST', '')
)
if static_path:
stream = Stream.from_path(static_path, public=True)
else:
stream.type = 'url'
stream.url = self.url
else:
stream.type = 'data'
stream.data = b''
stream.size = 0
return stream
def _is_remote_source(self):
self.ensure_one()
return self.url and not self.file_size and self.url.startswith(('http://', 'https://', 'ftp://'))

View file

@ -9,6 +9,7 @@ from odoo.http import Stream, request
from odoo.tools import file_open, replace_exceptions
from odoo.tools.image import image_process, image_guess_size_from_field_name
from odoo.tools.mimetypes import guess_mimetype, get_extension
from odoo.tools.misc import verify_limited_field_access_token
DEFAULT_PLACEHOLDER_PATH = 'web/static/img/placeholder.png'
@ -21,7 +22,7 @@ class IrBinary(models.AbstractModel):
def _find_record(
self, xmlid=None, res_model='ir.attachment', res_id=None,
access_token=None,
access_token=None, field=None
):
"""
Find and return a record either using an xmlid either a model+id
@ -45,16 +46,16 @@ class IrBinary(models.AbstractModel):
record = self.env[res_model].browse(res_id).exists()
if not record:
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}")
record = self._find_record_check_access(record, access_token)
if access_token and verify_limited_field_access_token(record, field, access_token):
return record.sudo()
record = self._find_record_check_access(record, access_token, field)
return record
def _find_record_check_access(self, record, access_token):
def _find_record_check_access(self, record, access_token, field):
if record._name == 'ir.attachment':
return record.validate_access(access_token)
record.check_access_rights('read')
record.check_access_rule('read')
record.check_access('read')
return record
def _record_to_stream(self, record, field_name):
@ -70,7 +71,7 @@ class IrBinary(models.AbstractModel):
:rtype: odoo.http.Stream
"""
if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'):
return Stream.from_attachment(record)
return record._to_http_stream()
record.check_field_access_rights('read', [field_name])
@ -82,7 +83,7 @@ class IrBinary(models.AbstractModel):
limit=1)
if not field_attachment:
raise MissingError("The related attachment does not exist.")
return Stream.from_attachment(field_attachment)
return field_attachment._to_http_stream()
return Stream.from_binary_field(record, field_name)

View file

@ -65,7 +65,7 @@ class IrConfigParameter(models.Model):
:return: The value of the parameter, or ``default`` if it does not exist.
:rtype: string
"""
self.check_access_rights('read')
self.browse().check_access('read')
return self._get_param(key) or default
@api.model

View file

@ -4,19 +4,26 @@ import threading
import time
import os
import psycopg2
import psycopg2.errors
import pytz
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from psycopg2 import sql
import odoo
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.modules.registry import Registry
from odoo.tools import SQL
_logger = logging.getLogger(__name__)
BASE_VERSION = odoo.modules.get_manifest('base')['version']
MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice
MAX_BATCH_PER_CRON_JOB = 10
CONSECUTIVE_TIMEOUT_FOR_FAILURE = 3
MIN_FAILURE_COUNT_BEFORE_DEACTIVATION = 5
MIN_DELTA_BEFORE_DEACTIVATION = timedelta(days=7)
# crons must satisfy both minimum thresholds before deactivation
# custom function to call instead of default PostgreSQL's `pg_notify`
ODOO_NOTIFY_FUNCTION = os.getenv('ODOO_NOTIFY_FUNCTION', 'pg_notify')
@ -38,6 +45,12 @@ _intervalTypes = {
}
class CompletionStatus: # inherit from enum.StrEnum in 3.11
FULLY_DONE = 'fully done'
PARTIALLY_DONE = 'partially done'
FAILED = 'failed'
class ir_cron(models.Model):
""" Model describing cron jobs (also called actions or tasks).
"""
@ -58,17 +71,25 @@ class ir_cron(models.Model):
cron_name = fields.Char('Name', compute='_compute_cron_name', store=True)
user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True)
active = fields.Boolean(default=True)
interval_number = fields.Integer(default=1, group_operator=None, help="Repeat every x.")
interval_number = fields.Integer(default=1, aggregator=None, help="Repeat every x.", required=True)
interval_type = fields.Selection([('minutes', 'Minutes'),
('hours', 'Hours'),
('days', 'Days'),
('weeks', 'Weeks'),
('months', 'Months')], string='Interval Unit', default='months')
numbercall = fields.Integer(string='Number of Calls', default=1, help='How many times the method is called,\na negative number indicates no limit.')
doall = fields.Boolean(string='Repeat Missed', help="Specify if missed occurrences should be executed when the server restarts.")
('months', 'Months')], string='Interval Unit', default='months', required=True)
nextcall = fields.Datetime(string='Next Execution Date', required=True, default=fields.Datetime.now, help="Next planned execution date for this job.")
lastcall = fields.Datetime(string='Last Execution Date', help="Previous time the cron ran successfully, provided to the job through the context on the `lastcall` key")
priority = fields.Integer(default=5, group_operator=None, help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.')
priority = fields.Integer(default=5, aggregator=None, help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.')
failure_count = fields.Integer(default=0, help="The number of consecutive failures of this job. It is automatically reset on success.")
first_failure_date = fields.Datetime(string='First Failure Date', help="The first time the cron failed. It is automatically reset on success.")
_sql_constraints = [
(
'check_strictly_positive_interval',
'CHECK(interval_number > 0)',
'The interval number must be a strictly positive number.'
),
]
@api.depends('ir_actions_server_id.name')
def _compute_cron_name(self):
@ -90,24 +111,16 @@ class ir_cron(models.Model):
self = self.with_context(default_state='code')
return super(ir_cron, self).default_get(fields_list)
@api.onchange('active', 'interval_number', 'interval_type')
def _onchange_interval_number(self):
if self.active and (self.interval_number <= 0 or not self.interval_type):
self.active = False
return {'warning': {
'title': _("Scheduled action disabled"),
'message': _("This scheduled action has been disabled because its interval number is not a strictly positive value.")}
}
def method_direct_trigger(self):
self.check_access_rights('write')
for cron in self:
cron._try_lock()
_logger.info('Manually starting job `%s`.', cron.name)
cron.with_user(cron.user_id).with_context({'lastcall': cron.lastcall}).ir_actions_server_id.run()
self.env.flush_all()
_logger.info('Job `%s` done.', cron.name)
cron.lastcall = fields.Datetime.now()
self.ensure_one()
self.browse().check_access('write')
self._try_lock()
_logger.info('Job %r (%s) started manually', self.name, self.id)
self, _ = self.with_user(self.user_id).with_context({'lastcall': self.lastcall})._add_progress() # noqa: PLW0642
self.ir_actions_server_id.run()
self.lastcall = fields.Datetime.now()
self.env.flush_all()
_logger.info('Job %r (%s) done', self.name, self.id)
return True
@classmethod
@ -125,7 +138,7 @@ class ir_cron(models.Model):
for job_id in (job['id'] for job in jobs):
try:
job = cls._acquire_one_job(cron_cr, (job_id,))
job = cls._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
@ -135,22 +148,19 @@ class ir_cron(models.Model):
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = odoo.registry(db_name).check_signaling()
registry = Registry(db_name).check_signaling()
registry[cls._name]._process_job(db, cron_cr, job)
cron_cr.commit()
_logger.debug("job %s updated and released", job_id)
except BadVersion:
_logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION)
except BadModuleState:
_logger.warning('Skipping database %s because of modules to install/upgrade/remove.', db_name)
except psycopg2.errors.UndefinedTable:
# The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
except psycopg2.ProgrammingError as e:
if e.pgcode == '42P01':
# Class 42 — Syntax Error or Access Rule Violation; 42P01: undefined_table
# The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
else:
raise
raise
except Exception:
_logger.warning('Exception in cron:', exc_info=True)
finally:
@ -206,7 +216,6 @@ class ir_cron(models.Model):
SELECT *
FROM ir_cron
WHERE active = true
AND numbercall != 0
AND (nextcall <= (now() at time zone 'UTC')
OR id in (
SELECT cron_id
@ -214,48 +223,45 @@ class ir_cron(models.Model):
WHERE call_at <= (now() at time zone 'UTC')
)
)
ORDER BY priority
ORDER BY failure_count, priority, id
""")
return cr.dictfetchall()
@classmethod
def _acquire_one_job(cls, cr, job_ids):
def _acquire_one_job(cls, cr, job_id):
"""
Acquire for update one job that is ready from the job_ids tuple.
Acquire for update the job with id ``job_id``.
The jobs that have already been processed in this worker should
be excluded from the tuple.
The job should not have been processed yet by the current
worker. Another worker may process the job again, may that job
become ready again quickly enough (e.g. self-triggering, high
frequency, or partially done jobs).
This function raises a ``psycopg2.errors.SerializationFailure``
when the ``nextcall`` of one of the job_ids is modified in
another transaction. You should rollback the transaction and try
again later.
Note: It is possible that this function raises a
``psycopg2.errors.SerializationFailure`` in case the job
has been processed in another worker. In such case it is
advised to roll back the transaction and to go on with the
other jobs.
"""
# We have to make sure ALL jobs are executed ONLY ONCE no matter
# how many cron workers may process them. The exlusion mechanism
# is twofold: (i) prevent parallel processing of the same job,
# and (ii) prevent re-processing jobs that have been processed
# already.
# The query must make sure that (i) two cron workers cannot
# process a given job at a same time. The query must also make
# sure that (ii) a job already processed in another worker
# should not be processed again by this one (or at least not
# before the job becomes ready again).
#
# (i) is implemented via `LIMIT 1 FOR UPDATE SKIP LOCKED`, each
# (i) is implemented via `FOR NO KEY UPDATE SKIP LOCKED`, each
# worker just acquire one available job at a time and lock it so
# the other workers don't select it too.
# (ii) is implemented via the `WHERE` statement, when a job has
# been processed, its nextcall is updated to a date in the
# future and the optional triggers are removed.
#
# Note about (ii): it is possible that a job becomes available
# again quickly (e.g. high frequency or self-triggering cron).
# This function doesn't prevent from acquiring that job multiple
# times at different moments. This can block a worker on
# executing a same job in loop. To prevent this problem, the
# callee is responsible of providing a `job_ids` tuple without
# the jobs it has executed already.
# been processed and is fully done, its nextcall is updated to a
# date in the future and the optional triggers are removed. In
# case a job has only been partially done, the job is left ready
# to be acquired again by another cron worker.
#
# An `UPDATE` lock type is the strongest row lock, it conflicts
# with ALL other lock types. Among them the `KEY SHARE` row lock
# which is implicitely aquired by foreign keys to prevent the
# which is implicitly acquired by foreign keys to prevent the
# referenced record from being removed while in use. Because we
# never delete acquired cron jobs, foreign keys are safe to
# concurrently reference cron jobs. Hence, the `NO KEY UPDATE`
@ -265,10 +271,17 @@ class ir_cron(models.Model):
# Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
query = """
WITH last_cron_progress AS (
SELECT id as progress_id, cron_id, timed_out_counter, done, remaining
FROM ir_cron_progress
WHERE cron_id = %s
ORDER BY id DESC
LIMIT 1
)
SELECT *
FROM ir_cron
WHERE active = true
AND numbercall != 0
LEFT JOIN last_cron_progress lcp ON lcp.cron_id = ir_cron.id
WHERE ir_cron.active = true
AND (nextcall <= (now() at time zone 'UTC')
OR EXISTS (
SELECT cron_id
@ -277,12 +290,12 @@ class ir_cron(models.Model):
AND cron_id = ir_cron.id
)
)
AND id in %s
AND id = %s
ORDER BY priority
LIMIT 1 FOR NO KEY UPDATE SKIP LOCKED
FOR NO KEY UPDATE SKIP LOCKED
"""
try:
cr.execute(query, [job_ids], log_exceptions=False)
cr.execute(query, [job_id, job_id], log_exceptions=False)
except psycopg2.extensions.TransactionRollbackError:
# A serialization error can occur when another cron worker
# commits the new `nextcall` value of a cron it just ran and
@ -292,123 +305,281 @@ class ir_cron(models.Model):
except Exception as exc:
_logger.error("bad query: %s\nERROR: %s", query, exc)
raise
return cr.dictfetchone()
job = cr.dictfetchone()
if not job: # Job is already taken
return None
for field_name in ('done', 'remaining', 'timed_out_counter'):
job[field_name] = job[field_name] or 0
return job
def _notify_admin(self, message):
"""
Notify ``message`` to some administrator.
The base implementation of this method does nothing. It is
supposed to be overridden with some actual communication
mechanism.
"""
_logger.warning(message)
@classmethod
def _process_job(cls, db, cron_cr, job):
""" Execute a cron job and re-schedule a call for later. """
"""
Execute the cron's server action in a dedicated transaction.
# Compute how many calls were missed and at what time we should
# recall the cron next. In the example bellow, we fake a cron
# with an interval of 30 (starting at 0) that was last executed
# at 15 and that is executed again at 135.
#
# 0 60 120 180
# --|-----|-----|-----|-----|-----|-----|----> time
# 1 2* * * * 3 4
#
# 1: lastcall, the last time the cron was executed
# 2: past_nextcall, the cron nextcall as seen from lastcall
# *: missed_call, a total of 4 calls are missing
# 3: now
# 4: future_nextcall, the cron nextcall as seen from now
In case the previous process actually timed out, the cron's
server action is not executed and the cron is considered
``'failed'``.
if job['interval_number'] <= 0:
_logger.error("Job %s %r has been disabled because its interval number is null or negative.", job['id'], job['cron_name'])
cron_cr.execute("UPDATE ir_cron SET active=false WHERE id=%s", [job['id']])
return
The server action can use the progress API via the method
:meth:`_notify_progress` to report processing progress, i.e. how
many records are done and how many records are remaining to
process.
Those progress notifications are used to determine the job's
``CompletionStatus`` and to determine the next time the cron
will be executed:
- ``'fully done'``: the cron is rescheduled later, it'll be
executed again after its regular time interval or upon a new
trigger.
- ``'partially done'``: the cron is rescheduled ASAP, it'll be
executed again by this or another cron worker once the other
ready cron jobs have been executed.
- ``'failed'``: the cron is deactivated if it failed too many
times over a given time span; otherwise it is rescheduled
later.
"""
env = api.Environment(cron_cr, job['user_id'], {})
ir_cron = env[cls._name]
failed_by_timeout = (
job['timed_out_counter'] >= CONSECUTIVE_TIMEOUT_FOR_FAILURE
and not job['done']
)
if not failed_by_timeout:
status = cls._run_job(job)
else:
status = CompletionStatus.FAILED
cron_cr.execute("""
UPDATE ir_cron_progress
SET timed_out_counter = 0
WHERE id = %s
""", (job['progress_id'],))
_logger.error("Job %r (%s) timed out", job['cron_name'], job['id'])
ir_cron._update_failure_count(job, status)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
ir_cron._reschedule_later(job)
elif status == CompletionStatus.PARTIALLY_DONE:
ir_cron._reschedule_asap(job)
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
cron_cr.postcommit.add(ir_cron._notifydb) # See: `_notifydb`
else:
raise RuntimeError("unreachable")
cron_cr.commit()
@classmethod
def _run_job(cls, job):
"""
Execute the job's server action multiple times until it
completes. The completion status is returned.
It is considered completed when either:
- the server action doesn't use the progress API, or returned
and notified that all records has been processed: ``'fully done'``;
- the server action returned and notified that there are
remaining records to process, but this cron worker ran this
server action 10 times already: ``'partially done'``;
- the server action was able to commit and notify some work done,
but later crashed due to an exception: ``'partially done'``;
- the server action failed due to an exception and no progress
was notified: ``'failed'``.
"""
timed_out_counter = job['timed_out_counter']
with cls.pool.cursor() as job_cr:
lastcall = fields.Datetime.to_datetime(job['lastcall'])
interval = _intervalTypes[job['interval_type']](job['interval_number'])
env = api.Environment(job_cr, job['user_id'], {'lastcall': lastcall})
ir_cron = env[cls._name]
env = api.Environment(job_cr, job['user_id'], {
'lastcall': job['lastcall'],
'cron_id': job['id'],
})
cron = env[cls._name].browse(job['id'])
# Use the user's timezone to compare and compute datetimes,
# otherwise unexpected results may appear. For instance, adding
# 1 month in UTC to July 1st at midnight in GMT+2 gives July 30
# instead of August 1st!
now = fields.Datetime.context_timestamp(ir_cron, datetime.utcnow())
past_nextcall = fields.Datetime.context_timestamp(
ir_cron, fields.Datetime.to_datetime(job['nextcall']))
status = None
for i in range(MAX_BATCH_PER_CRON_JOB):
cron, progress = cron._add_progress(timed_out_counter=timed_out_counter)
job_cr.commit()
# Compute how many call were missed
missed_call = past_nextcall
missed_call_count = 0
while missed_call <= now:
missed_call += interval
missed_call_count += 1
future_nextcall = missed_call
try:
cron._callback(job['cron_name'], job['ir_actions_server_id'])
except Exception: # noqa: BLE001
if progress.done and progress.remaining:
# we do not consider it a failure if some progress has
# been committed
status = CompletionStatus.PARTIALLY_DONE
else:
status = CompletionStatus.FAILED
else:
if not progress.remaining:
status = CompletionStatus.FULLY_DONE
elif not progress.done:
# assume the server action doesn't use the progress API
# and that there is nothing left to process
status = CompletionStatus.FULLY_DONE
else:
status = CompletionStatus.PARTIALLY_DONE
# Compute how many time we should run the cron
effective_call_count = (
1 if not missed_call_count # run at least once
else 1 if not job['doall'] # run once for all
else missed_call_count if job['numbercall'] == -1 # run them all
else min(missed_call_count, job['numbercall']) # run maximum numbercall times
)
call_count_left = max(job['numbercall'] - effective_call_count, -1)
if status == CompletionStatus.FULLY_DONE and progress.deactivate:
job['active'] = False
finally:
progress.timed_out_counter = 0
timed_out_counter = 0
job_cr.commit()
_logger.info('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], progress.done, progress.remaining)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
break
# The actual cron execution
for call in range(effective_call_count):
ir_cron._callback(job['cron_name'], job['ir_actions_server_id'], job['id'])
return status
# Update the cron with the information computed above
cron_cr.execute("""
def _update_failure_count(self, job, status):
"""
Update cron ``failure_count`` and ``first_failure_date`` given
the job's completion status. Deactivate the cron when BOTH the
counter reaches ``MIN_FAILURE_COUNT_BEFORE_DEACTIVATION`` AND
the time delta reaches ``MIN_DELTA_BEFORE_DEACTIVATION``.
On ``'fully done'`` and ``'partially done'``, the counter and
failure date are reset.
On ``'failed'`` the counter is increased and the first failure
date is set if the counter was 0. In case both thresholds are
reached, ``active`` is set to ``False`` and both values are
reset.
"""
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
if status == CompletionStatus.FAILED:
failure_count = job['failure_count'] + 1
first_failure_date = job['first_failure_date'] or now
active = job['active']
if (
failure_count >= MIN_FAILURE_COUNT_BEFORE_DEACTIVATION
and fields.Datetime.context_timestamp(self, first_failure_date) + MIN_DELTA_BEFORE_DEACTIVATION < now
):
failure_count = 0
first_failure_date = None
active = False
self._notify_admin(_(
"Cron job %(name)s (%(id)s) has been deactivated after failing %(count)s times. "
"More information can be found in the server logs around %(time)s.",
name=repr(job['cron_name']),
id=job['id'],
count=MIN_FAILURE_COUNT_BEFORE_DEACTIVATION,
time=datetime.replace(datetime.utcnow(), microsecond=0),
))
else:
failure_count = 0
first_failure_date = None
active = job['active']
self.env.cr.execute("""
UPDATE ir_cron
SET nextcall=%s,
numbercall=%s,
lastcall=%s,
active=%s
WHERE id=%s
SET failure_count = %s,
first_failure_date = %s,
active = %s
WHERE id = %s
""", [
fields.Datetime.to_string(future_nextcall.astimezone(pytz.UTC)),
call_count_left,
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['active'] and bool(call_count_left),
failure_count,
first_failure_date,
active,
job['id'],
])
cron_cr.execute("""
def _reschedule_later(self, job):
"""
Reschedule the job to be executed later, after its regular
interval or upon a trigger.
"""
# Use the user's timezone to compare and compute datetimes, otherwise unexpected results may appear.
# For instance, adding 1 month in UTC to July 1st at midnight in GMT+2 gives July 30 instead of August 1st!
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
nextcall = fields.Datetime.context_timestamp(self, job['nextcall'])
interval = _intervalTypes[job['interval_type']](job['interval_number'])
while nextcall <= now:
nextcall += interval
_logger.info('Job %r (%s) completed', job['cron_name'], job['id'])
self.env.cr.execute("""
UPDATE ir_cron
SET nextcall = %s,
lastcall = %s
WHERE id = %s
""", [
fields.Datetime.to_string(nextcall.astimezone(pytz.UTC)),
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['id'],
])
self.env.cr.execute("""
DELETE FROM ir_cron_trigger
WHERE cron_id = %s
AND call_at < (now() at time zone 'UTC')
""", [job['id']])
@api.model
def _callback(self, cron_name, server_action_id, job_id):
def _reschedule_asap(self, job):
"""
Reschedule the job to be executed ASAP, after the other cron
jobs had a chance to run.
"""
# leave the existing nextcall and triggers, this leave the job "ready"
pass
def _callback(self, cron_name, server_action_id):
""" Run the method associated to a given job. It takes care of logging
and exception handling. Note that the user running the server action
is the user calling this method. """
self.ensure_one()
try:
if self.pool != self.pool.check_signaling():
# the registry has changed, reload self in the new registry
self.env.reset()
self = self.env()[self._name]
log_depth = (None if _logger.isEnabledFor(logging.DEBUG) else 1)
odoo.netsvc.log(_logger, logging.DEBUG, 'cron.object.execute', (self._cr.dbname, self._uid, '*', cron_name, server_action_id), depth=log_depth)
_logger.info('Starting job `%s`.', cron_name)
_logger.debug(
"cron.object.execute(%r, %d, '*', %r, %d)",
self.env.cr.dbname,
self._uid,
cron_name,
server_action_id,
)
_logger.info('Job %r (%s) starting', cron_name, self.id)
start_time = time.time()
self.env['ir.actions.server'].browse(server_action_id).run()
self.env.flush_all()
end_time = time.time()
_logger.info('Job done: `%s` (%.3fs).', cron_name, end_time - start_time)
_logger.info('Job %r (%s) done in %.3fs', cron_name, self.id, end_time - start_time)
if start_time and _logger.isEnabledFor(logging.DEBUG):
_logger.debug('%.3fs (cron %s, server action %d with uid %d)', end_time - start_time, cron_name, server_action_id, self.env.uid)
_logger.debug('Job %r (%s) server action #%s with uid %s executed in %.3fs',
cron_name, self.id, server_action_id, self.env.uid, end_time - start_time)
self.pool.signal_changes()
except Exception as e:
except Exception:
self.pool.reset_changes()
_logger.exception("Call from cron %s for server action #%s failed in Job #%s",
cron_name, server_action_id, job_id)
self._handle_callback_exception(cron_name, server_action_id, job_id, e)
@api.model
def _handle_callback_exception(self, cron_name, server_action_id, job_id, job_exception):
""" Method called when an exception is raised by a job.
Simply logs the exception and rollback the transaction. """
self._cr.rollback()
_logger.exception('Job %r (%s) server action #%s failed', cron_name, self.id, server_action_id)
self.env.cr.rollback()
raise
def _try_lock(self, lockfk=False):
"""Try to grab a dummy exclusive write-lock to the rows with the given ids,
@ -416,7 +587,7 @@ class ir_cron(models.Model):
to a process currently executing those cron tasks.
:param lockfk: acquire a strong row lock which conflicts with
the lock aquired by foreign keys when they
the lock acquired by foreign keys when they
reference this row.
"""
if not self:
@ -475,17 +646,18 @@ class ir_cron(models.Model):
Schedule a cron job to be executed soon independently of its
``nextcall`` field value.
By default the cron is scheduled to be executed in the next batch but
the optional `at` argument may be given to delay the execution later
with a precision down to 1 minute.
By default, the cron is scheduled to be executed the next time
the cron worker wakes up, but the optional `at` argument may be
given to delay the execution later, with a precision down to 1
minute.
The method may be called with a datetime or an iterable of datetime.
The actual implementation is in :meth:`~._trigger_list`, which is the
recommended method for overrides.
The method may be called with a datetime or an iterable of
datetime. The actual implementation is in :meth:`~._trigger_list`,
which is the recommended method for overrides.
:param Optional[Union[datetime.datetime, list[datetime.datetime]]] at:
When to execute the cron, at one or several moments in time instead
of as soon as possible.
When to execute the cron, at one or several moments in time
instead of as soon as possible.
:return: the created triggers records
:rtype: recordset
"""
@ -524,7 +696,7 @@ class ir_cron(models.Model):
])
if _logger.isEnabledFor(logging.DEBUG):
ats = ', '.join(map(str, at_list))
_logger.debug("will execute '%s' at %s", self.sudo().name, ats)
_logger.debug('Job %r (%s) will execute at %s', self.sudo().name, self.id, ats)
if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
@ -536,10 +708,47 @@ class ir_cron(models.Model):
ir_cron modification and on trigger creation (regardless of call_at)
"""
with odoo.sql_db.db_connect('postgres').cursor() as cr:
query = sql.SQL("SELECT {}('cron_trigger', %s)").format(sql.Identifier(ODOO_NOTIFY_FUNCTION))
cr.execute(query, [self.env.cr.dbname])
cr.execute(SQL("SELECT %s('cron_trigger', %s)", SQL.identifier(ODOO_NOTIFY_FUNCTION), self.env.cr.dbname))
_logger.debug("cron workers notified")
def _add_progress(self, *, timed_out_counter=None):
"""
Create a progress record for the given cron and add it to its
context.
:param int timed_out_counter: the number of times the cron has
consecutively timed out
:return: a pair ``(cron, progress)``, where the progress has
been injected inside the cron's context
"""
progress = self.env['ir.cron.progress'].sudo().create([{
'cron_id': self.id,
'remaining': 0,
'done': 0,
# we use timed_out_counter + 1 so that if the current execution
# times out, the counter already takes it into account
'timed_out_counter': 0 if timed_out_counter is None else timed_out_counter + 1,
}])
return self.with_context(ir_cron_progress_id=progress.id), progress
def _notify_progress(self, *, done, remaining, deactivate=False):
"""
Log the progress of the cron job.
:param int done: the number of tasks already processed
:param int remaining: the number of tasks left to process
:param bool deactivate: whether the cron will be deactivated
"""
if not (progress_id := self.env.context.get('ir_cron_progress_id')):
return
if done < 0 or remaining < 0:
raise ValueError("`done` and `remaining` must be positive integers.")
self.env['ir.cron.progress'].sudo().browse(progress_id).write({
'remaining': remaining,
'done': done,
'deactivate': deactivate,
})
class ir_cron_trigger(models.Model):
_name = 'ir.cron.trigger'
@ -557,3 +766,19 @@ class ir_cron_trigger(models.Model):
if len(records) >= models.GC_UNLINK_LIMIT:
self.env.ref('base.autovacuum_job')._trigger()
return records.unlink()
class ir_cron_progress(models.Model):
_name = 'ir.cron.progress'
_description = 'Progress of Scheduled Actions'
_rec_name = 'cron_id'
cron_id = fields.Many2one("ir.cron", required=True, index=True, ondelete='cascade')
remaining = fields.Integer(default=0)
done = fields.Integer(default=0)
deactivate = fields.Boolean()
timed_out_counter = fields.Integer(default=0)
@api.autovacuum
def _gc_cron_progress(self):
self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))]).unlink()

View file

@ -2,9 +2,11 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from datetime import date
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools, _, SUPERUSER_ID
from odoo.exceptions import ValidationError
from odoo.tools import SQL
class IrDefault(models.Model):
@ -33,18 +35,24 @@ class IrDefault(models.Model):
@api.model_create_multi
def create(self, vals_list):
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
return super(IrDefault, self).create(vals_list)
def write(self, vals):
if self:
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
new_default = super().write(vals)
self.check_access_rule('write')
self.check_access('write')
return new_default
def unlink(self):
if self:
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
return super(IrDefault, self).unlink()
@ -76,13 +84,15 @@ class IrDefault(models.Model):
model = self.env[model_name]
field = model._fields[field_name]
parsed = field.convert_to_cache(value, model)
if field.type in ('date', 'datetime') and isinstance(value, date):
value = field.to_string(value)
json_value = json.dumps(value, ensure_ascii=False)
except KeyError:
raise ValidationError(_("Invalid field %s.%s", model_name, field_name))
raise ValidationError(_("Invalid field %(model)s.%(field)s", model=model_name, field=field_name))
except Exception:
raise ValidationError(_("Invalid value for %s.%s: %s", model_name, field_name, value))
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value))
if field.type == 'integer' and not (-2**31 < parsed < 2**31-1):
raise ValidationError(_("Invalid value for %s.%s: %s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model_name, field_name, value))
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value))
# update existing default for the same scope, or create one
field = self.env['ir.model.fields']._get(model_name, field_name)
@ -146,6 +156,7 @@ class IrDefault(models.Model):
current user), as a dict mapping field names to values.
"""
cr = self.env.cr
self.flush_model()
query = """ SELECT f.name, d.json_value
FROM ir_default d
JOIN ir_model_fields f ON d.field_id=f.id
@ -188,3 +199,30 @@ class IrDefault(models.Model):
json_vals = [json.dumps(value, ensure_ascii=False) for value in values]
domain = [('field_id', '=', field.id), ('json_value', 'in', json_vals)]
return self.search(domain).unlink()
@tools.ormcache('model_name', 'field_name')
def _get_field_column_fallbacks(self, model_name, field_name):
company_ids = self.env.execute_query(SQL('SELECT ARRAY_AGG(id) FROM res_company'))[0][0]
field = self.env[model_name]._fields[field_name]
self_super = self.with_user(SUPERUSER_ID)
return json.dumps({
id_: field.convert_to_column(
self_super.with_company(id_)._get_model_defaults(model_name).get(field_name),
self_super.with_company(id_)
)
for id_ in company_ids
})
def _evaluate_condition_with_fallback(self, model_name, condition):
"""
when the field value of the condition is company_dependent without
customization, evaluate if its fallback value will be kept by
the condition
return True/False/None(for unknown)
"""
field_name = condition[0].split('.', 1)[0]
model = self.env[model_name]
field = model._fields[field_name]
fallback = field.get_company_dependent_fallback(model)
record = model.new({field_name: field.convert_to_write(fallback, model)})
return bool(record.filtered_domain([condition]))

View file

@ -17,5 +17,5 @@ class IrDemo(models.TransientModel):
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/web',
'url': '/odoo',
}

View file

@ -0,0 +1,106 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from ast import literal_eval
class IrEmbeddedActions(models.Model):
_name = 'ir.embedded.actions'
_description = 'Embedded Actions'
_order = 'sequence, id'
name = fields.Char(translate=True)
sequence = fields.Integer()
parent_action_id = fields.Many2one('ir.actions.act_window', required=True, string='Parent Action', ondelete="cascade")
parent_res_id = fields.Integer(string="Active Parent Id")
parent_res_model = fields.Char(string='Active Parent Model', required=True)
# It is required to have either action_id or python_method
action_id = fields.Many2one('ir.actions.actions', string="Action", ondelete="cascade")
python_method = fields.Char(help="Python method returning an action")
user_id = fields.Many2one('res.users', string="User", help="User specific embedded action. If empty, shared embedded action", ondelete="cascade")
is_deletable = fields.Boolean(compute="_compute_is_deletable")
default_view_mode = fields.Char(string="Default View", help="Default view (if none, default view of the action is taken)")
filter_ids = fields.One2many("ir.filters", "embedded_action_id", help="Default filter of the embedded action (if none, no filters)")
is_visible = fields.Boolean(string="Embedded visibility", help="Computed field to check if the record should be visible according to the domain", compute="_compute_is_visible")
domain = fields.Char(default="[]", help="Domain applied to the active id of the parent model")
context = fields.Char(default="{}", help="Context dictionary as Python expression, empty by default (Default: {})")
groups_ids = fields.Many2many('res.groups', help='Groups that can execute the embedded action. Leave empty to allow everybody.')
_sql_constraints = [
(
'check_only_one_action_defined',
"""CHECK(
(action_id IS NOT NULL AND python_method IS NULL) OR
(action_id IS NULL AND python_method IS NOT NULL)
)""",
'Constraint to ensure that either an XML action or a python_method is defined, but not both.'
), (
'check_python_method_requires_name',
"""CHECK(
NOT (python_method IS NOT NULL AND name IS NULL)
)""",
'Constraint to ensure that if a python_method is defined, then the name must also be defined.'
)
]
@api.model_create_multi
def create(self, vals_list):
# The name by default is computed based on the triggered action if a action_id is defined.
for vals in vals_list:
if "name" not in vals:
vals["name"] = self.env["ir.actions.actions"].browse(vals["action_id"]).name
if "python_method" in vals and "action_id" in vals:
if vals.get("python_method"):
# then remove the action_id since the action surely given by the python method.
del vals["action_id"]
else: # remove python_method in the vals since the vals is falsy.
del vals["python_method"]
return super().create(vals_list)
# The record is deletable if it hasn't been created from a xml record (i.e. is not a default embedded action)
def _compute_is_deletable(self):
external_ids = self._get_external_ids()
for record in self:
record_external_ids = external_ids[record.id]
record.is_deletable = all(
ex_id.startswith(("__export__", "__custom__")) for ex_id in record_external_ids
)
# Compute if the record should be visible to the user based on the domain applied to the active id of the parent
# model and based on the groups allowed to access the record.
def _compute_is_visible(self):
active_id = self.env.context.get("active_id", False)
if not active_id:
self.is_visible = False
return
domain_id = [("id", "=", active_id)]
for parent_res_model, records in self.grouped('parent_res_model').items():
active_model_record = self.env[parent_res_model].search(domain_id, order='id')
for record in records:
action_groups = record.groups_ids
if not action_groups or (action_groups & self.env.user.groups_id):
domain_model = literal_eval(record.domain or '[]')
record.is_visible = (
record.parent_res_id in (False, self.env.context.get('active_id', False))
and record.user_id.id in (False, self.env.uid)
and active_model_record.filtered_domain(domain_model)
)
else:
record.is_visible = False
# Delete the filters linked to a embedded action.
@api.ondelete(at_uninstall=False)
def _unlink_if_action_deletable(self):
for record in self:
if not record.is_deletable:
raise UserError(_('You cannot delete a default embedded action'))
def _get_readable_fields(self):
""" return the list of fields that are safe to read
"""
return {
"name", "parent_action_id", "parent_res_id", "parent_res_model", "action_id", "python_method", "user_id",
"is_deletable", "default_view_mode", "filter_ids", "domain", "context", "groups_ids"
}

View file

@ -4,13 +4,16 @@
import json
import functools
import itertools
from typing import NamedTuple
import psycopg2
import pytz
from odoo import api, Command, fields, models, _
from odoo.tools import ustr, OrderedSet
from odoo.tools.translate import code_translations, _lt
from odoo import api, Command, fields, models
from odoo.tools import OrderedSet
from odoo.tools.translate import _, code_translations, LazyTranslate
_lt = LazyTranslate(__name__)
REFERENCING_FIELDS = {None, 'id', '.id'}
def only_ref_fields(record):
@ -26,6 +29,12 @@ BOOLEAN_TRANSLATIONS = (
_lt('false')
)
class FakeField(NamedTuple):
comodel_name: str
name: str
class ImportWarning(Warning):
""" Used to send warnings upwards the stack during the import process """
pass
@ -201,8 +210,99 @@ class IrFieldsConverter(models.AbstractModel):
raise self._format_import_error(ValueError, msg, value)
def _str_to_properties(self, model, field, value):
msg = _("Unable to import field type '%s' ", field.type)
raise self._format_import_error(ValueError, msg)
# If we want to import the all properties at once (with the technical value)
if isinstance(value, str):
try:
value = json.loads(value)
except ValueError:
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg)
if not isinstance(value, list):
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg, {'value': value})
warnings = []
for property_dict in value:
if not (property_dict.keys() >= {'name', 'type', 'string'}):
msg = _("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.")
raise self._format_import_error(ValueError, msg, {'value': property_dict})
val = property_dict.get('value')
if not val:
property_dict.pop('value', None)
continue
property_type = property_dict['type']
if property_type == 'selection':
# either label or the technical value
new_val = next(iter(
sel_val for sel_val, sel_label in property_dict['selection']
if val in (sel_val, sel_label)
), None)
if not new_val:
msg = _("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
property_dict['value'] = new_val
elif property_type == 'tags':
tags = val.split(',')
new_val = []
for tag in tags:
val_tag = next(iter(
tag_val for tag_val, tag_label, _color in property_dict['tags']
if tag in (tag_val, tag_label)
), None)
if not val_tag:
msg = _("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': tag, 'label_property': property_dict['string']})
new_val.append(val_tag)
property_dict['value'] = new_val
elif property_type == 'boolean':
new_val, warnings = self._str_to_boolean(model, field, val)
if not warnings:
property_dict['value'] = new_val
else:
msg = _("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type in ('many2one', 'many2many'):
[record] = property_dict['value']
subfield, w1 = self._referencing_subfield(record)
if w1:
warnings.append(w1)
values = record[subfield]
references = values.split(',') if property_type == 'many2many' else [values]
ids = []
fake_field = FakeField(comodel_name=property_dict['comodel'], name=property_dict['string'])
for reference in references:
id_, __, ws = self.db_id_for(model, fake_field, subfield, reference)
ids.append(id_)
warnings.extend(ws)
property_dict['value'] = ids if property_type == 'many2many' else ids[0]
elif property_type == 'integer':
try:
property_dict['value'] = int(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type == 'float':
try:
property_dict['value'] = float(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
return value, warnings
@api.model
def _str_to_boolean(self, model, field, value):
@ -365,7 +465,6 @@ class IrFieldsConverter(models.AbstractModel):
selection = field.get_description(env)['selection']
for item, label in selection:
label = ustr(label)
if callable(field.selection):
labels = [label]
for item2, label2 in field._description_selection(self.env):
@ -417,7 +516,7 @@ class IrFieldsConverter(models.AbstractModel):
action = {
'name': 'Possible Values',
'type': 'ir.actions.act_window', 'target': 'new',
'view_mode': 'tree,form',
'view_mode': 'list,form',
'views': [(False, 'list'), (False, 'form')],
'context': {'create': False},
'help': _(u"See all possible values")}
@ -461,9 +560,9 @@ class IrFieldsConverter(models.AbstractModel):
if ids:
if len(ids) > 1:
warnings.append(ImportWarning(_(
"Found multiple matches for value %r in field %%(field)r (%d matches)",
str(value).replace('%', '%%'),
len(ids),
'Found multiple matches for value "%(value)s" in field "%%(field)s" (%(match_count)s matches)',
value=str(value).replace('%', '%%'),
match_count=len(ids),
)))
id, _name = ids[0]
else:
@ -477,7 +576,7 @@ class IrFieldsConverter(models.AbstractModel):
else:
raise self._format_import_error(
Exception,
_("Unknown sub-field %r", subfield)
_("Unknown sub-field %s", subfield),
)
set_empty = False

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools
from odoo.exceptions import UserError
from odoo.tools.safe_eval import safe_eval, datetime
@ -17,13 +17,15 @@ class IrFilters(models.Model):
"and available to all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Text(default='[]', required=True)
sort = fields.Char(default='[]', required=True)
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
is_default = fields.Boolean(string='Default Filter')
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
embedded_action_id = fields.Many2one('ir.embedded.actions', help="The embedded action this filter is applied to", ondelete="cascade")
embedded_parent_res_id = fields.Integer(help="id of the record the filter should be applied to. Only used in combination with embedded actions")
active = fields.Boolean(default=True)
@api.model
@ -35,16 +37,20 @@ class IrFilters(models.Model):
)
return self._cr.fetchall()
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
# NULL Integer field value read as 0, wouldn't matter except in this case will trigger
# check_res_id_only_when_embedded_action
for vals in vals_list:
if vals.get('embedded_parent_res_id') == 0:
del vals['embedded_parent_res_id']
return [dict(vals, name=self.env._("%s (copy)", ir_filter.name)) for ir_filter, vals in zip(self, vals_list)]
def write(self, vals):
new_filter = super().write(vals)
self.check_access_rule('write')
self.check_access('write')
return new_filter
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)', self.name))
return super(IrFilters, self).copy(default)
def _get_eval_domain(self):
self.ensure_one()
return safe_eval(self.domain, {
@ -53,17 +59,17 @@ class IrFilters(models.Model):
})
@api.model
def _get_action_domain(self, action_id=None):
def _get_action_domain(self, action_id=None, embedded_action_id=None, embedded_parent_res_id=None):
"""Return a domain component for matching filters that are visible in the
same context (menu/view) as the given action."""
if action_id:
# filters specific to this menu + global ones
return [('action_id', 'in', [action_id, False])]
# only global ones
return [('action_id', '=', False)]
action_condition = ('action_id', 'in', [action_id, False]) if action_id else ('action_id', '=', False)
embedded_condition = ('embedded_action_id', '=', embedded_action_id) if embedded_action_id else ('embedded_action_id', '=', False)
embedded_parent_res_id_condition = ('embedded_parent_res_id', '=', embedded_parent_res_id) if embedded_action_id and embedded_parent_res_id else ('embedded_parent_res_id', 'in', [0, False])
return [action_condition, embedded_condition, embedded_parent_res_id_condition]
@api.model
def get_filters(self, model, action_id=None):
def get_filters(self, model, action_id=None, embedded_action_id=None, embedded_parent_res_id=None):
"""Obtain the list of filters available for the user on the given model.
:param int model: id of model to find filters for
@ -73,15 +79,16 @@ class IrFilters(models.Model):
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
``action_id`` (m2o tuple), ``embedded_action_id`` (m2o tuple), ``embedded_parent_res_id``
and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
user_context = self.env['res.users'].context_get()
action_domain = self._get_action_domain(action_id)
action_domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
return self.with_context(user_context).search_read(
action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])],
['name', 'is_default', 'domain', 'context', 'user_id', 'sort'],
['name', 'is_default', 'domain', 'context', 'user_id', 'sort', 'embedded_action_id', 'embedded_parent_res_id'],
)
@api.model
@ -101,7 +108,7 @@ class IrFilters(models.Model):
:raises odoo.exceptions.UserError: if there is an existing default and
we're not updating it
"""
domain = self._get_action_domain(vals.get('action_id'))
domain = self._get_action_domain(vals.get('action_id'), vals.get('embedded_action_id'), vals.get('embedded_parent_res_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
@ -113,13 +120,17 @@ class IrFilters(models.Model):
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default", model=vals.get('model_id')))
raise UserError(self.env._("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default", model=vals.get('model_id')))
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
current_filters = self.get_filters(vals['model_id'], action_id)
embedded_action_id = vals.get('embedded_action_id')
if not embedded_action_id and 'embedded_parent_res_id' in vals:
del vals['embedded_parent_res_id']
embedded_parent_res_id = vals.get('embedded_parent_res_id')
current_filters = self.get_filters(vals['model_id'], action_id, embedded_action_id, embedded_parent_res_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
@ -131,7 +142,7 @@ class IrFilters(models.Model):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id)
domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
@ -155,12 +166,26 @@ class IrFilters(models.Model):
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (model_id, user_id, action_id, name)', 'Filter names must be unique'),
('name_model_uid_unique', 'unique (model_id, user_id, action_id, embedded_action_id, embedded_parent_res_id, name)',
'Filter names must be unique'),
# The embedded_parent_res_id can only be defined when the embedded_action_id field is set.
# As the embedded model is linked to only one res_model, It ensure the unicity of the filter regarding the
# embedded_parent_res_model and the embedded_parent_res_id
(
'check_res_id_only_when_embedded_action',
"""CHECK(
NOT (embedded_parent_res_id IS NOT NULL AND embedded_action_id IS NULL)
)""",
'Constraint to ensure that the embedded_parent_res_id is only defined when a top_action_id is defined.'
),
('check_sort_json', "CHECK(sort IS NULL OR jsonb_typeof(sort::jsonb) = 'array')", 'Invalid sort definition'),
]
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
tools.create_unique_index(self._cr, 'ir_filters_name_model_uid_unique_action_index',
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)', 'lower(name)'])
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)',
'lower(name)', 'embedded_parent_res_id', 'COALESCE(embedded_action_id,-1)'])
return result

View file

@ -2,16 +2,13 @@
#----------------------------------------------------------
# ir_http modular http routing
#----------------------------------------------------------
import base64
import hashlib
import json
import logging
import mimetypes
import os
import re
import sys
import traceback
import threading
import unicodedata
import werkzeug
import werkzeug.exceptions
@ -23,17 +20,35 @@ try:
except ImportError:
from werkzeug.routing.converters import NumberConverter # moved in werkzeug 2.2.2
# optional python-slugify import (https://github.com/un33k/python-slugify)
try:
import slugify as slugify_lib
except ImportError:
slugify_lib = None
import odoo
from odoo import api, http, models, tools, SUPERUSER_ID
from odoo.exceptions import AccessDenied, AccessError, MissingError
from odoo.http import request, Response, ROUTING_KEYS, Stream
from odoo.exceptions import AccessDenied
from odoo.http import request, Response, ROUTING_KEYS
from odoo.modules.registry import Registry
from odoo.service import security
from odoo.tools import get_lang, submap
from odoo.tools.json import json_default
from odoo.tools.misc import get_lang, submap
from odoo.tools.translate import code_translations
_logger = logging.getLogger(__name__)
# see also mimetypes module: https://docs.python.org/3/library/mimetypes.html and odoo.tools.mimetypes
EXTENSION_TO_WEB_MIMETYPES = {
'.css': 'text/css',
'.less': 'text/less',
'.scss': 'text/scss',
'.js': 'text/javascript',
'.xml': 'text/xml',
'.csv': 'text/csv',
'.html': 'text/html',
}
class RequestUID(object):
def __init__(self, **kw):
@ -47,13 +62,17 @@ class ModelConverter(werkzeug.routing.BaseConverter):
super().__init__(url_map)
self.model = model
def to_python(self, value):
IrHttp = Registry(threading.current_thread().dbname)['ir.http']
self.slug = IrHttp._slug
self.unslug = IrHttp._unslug
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
return env[self.model].browse(int(value))
return env[self.model].browse(self.unslug(value)[1])
def to_url(self, value):
return value.id
def to_url(self, value: models.BaseModel) -> str:
return self.slug(value)
class ModelsConverter(werkzeug.routing.BaseConverter):
@ -63,12 +82,12 @@ class ModelsConverter(werkzeug.routing.BaseConverter):
super().__init__(url_map)
self.model = model
def to_python(self, value):
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
return env[self.model].browse(int(v) for v in value.split(','))
def to_url(self, value):
def to_url(self, value: models.BaseModel) -> str:
return ",".join(value.ids)
@ -117,12 +136,59 @@ class IrHttp(models.AbstractModel):
_name = 'ir.http'
_description = "HTTP Routing"
@classmethod
def _slugify_one(cls, value: str, max_length: int = 0) -> str:
""" Transform a string to a slug that can be used in a url path.
This method will first try to do the job with python-slugify if present.
Otherwise it will process string by stripping leading and ending spaces,
converting unicode chars to ascii, lowering all chars and replacing spaces
and underscore with hyphen "-".
"""
if slugify_lib:
# There are 2 different libraries only python-slugify is supported
try:
return slugify_lib.slugify(value, max_length=max_length)
except TypeError:
pass
uni = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
slug_str = re.sub(r'[\W_]+', '-', uni).strip('-').lower()
return slug_str[:max_length] if max_length > 0 else slug_str
@classmethod
def _slugify(cls, value: str, max_length: int = 0, path: bool = False) -> str:
if not path:
return cls._slugify_one(value, max_length=max_length)
else:
res = []
for u in value.split('/'):
s = cls._slugify_one(u, max_length=max_length)
if s:
res.append(s)
# check if supported extension
path_no_ext, ext = os.path.splitext(value)
if ext in EXTENSION_TO_WEB_MIMETYPES:
res[-1] = cls._slugify_one(path_no_ext) + ext
return '/'.join(res)
@classmethod
def _slug(cls, value: models.BaseModel | tuple[int, str]) -> str:
if isinstance(value, tuple):
return str(value[0])
return str(value.id)
@classmethod
def _unslug(cls, value: str) -> tuple[str | None, int] | tuple[None, None]:
try:
return None, int(value)
except ValueError:
return None, None
#------------------------------------------------------
# Routing map
#------------------------------------------------------
@classmethod
def _get_converters(cls):
def _get_converters(cls) -> dict[str, type]:
return {'model': ModelConverter, 'models': ModelsConverter, 'int': SignedIntConverter}
@classmethod
@ -134,6 +200,48 @@ class IrHttp(models.AbstractModel):
def _get_public_users(cls):
return [request.env['ir.model.data']._xmlid_to_res_model_res_id('base.public_user')[1]]
@classmethod
def _auth_method_bearer(cls):
headers = request.httprequest.headers
def get_http_authorization_bearer_token():
# werkzeug<2.3 doesn't expose `authorization.token` (for bearer authentication)
# check header directly
header = headers.get("Authorization")
if header and (m := re.match(r"^bearer\s+(.+)$", header, re.IGNORECASE)):
return m.group(1)
return None
def check_sec_headers():
"""Protection against CSRF attacks.
Modern browsers automatically add Sec- headers that we can check to protect against CSRF.
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Sec-Fetch-User
"""
return (
headers.get("Sec-Fetch-Dest") == "document"
and headers.get("Sec-Fetch-Mode") == "navigate"
and headers.get("Sec-Fetch-Site") in ('none', 'same-origin')
and headers.get("Sec-Fetch-User") == "?1"
)
if token := get_http_authorization_bearer_token():
# 'rpc' scope does not really exist, we basically require a global key (scope NULL)
uid = request.env['res.users.apikeys']._check_credentials(scope='rpc', key=token)
if not uid:
raise werkzeug.exceptions.Unauthorized(
"Invalid apikey",
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
if request.env.uid and request.env.uid != uid:
raise AccessDenied("Session user does not match the used apikey")
request.update_env(user=uid)
elif not request.env.uid:
raise werkzeug.exceptions.Unauthorized(
'User not authenticated, use the "Authorization" header',
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
elif not check_sec_headers():
raise AccessDenied("Missing \"Authorization\" or Sec-headers for interactive usage")
cls._auth_method_user()
@classmethod
def _auth_method_user(cls):
if request.env.uid in [None] + cls._get_public_users():
@ -152,10 +260,13 @@ class IrHttp(models.AbstractModel):
@classmethod
def _authenticate(cls, endpoint):
auth = 'none' if http.is_cors_preflight(request, endpoint) else endpoint.routing['auth']
cls._authenticate_explicit(auth)
@classmethod
def _authenticate_explicit(cls, auth):
try:
if request.session.uid is not None:
if not security.check_session(request.session, request.env):
if not security.check_session(request.session, request.env, request):
request.session.logout(keep_db=True)
request.env = api.Environment(request.env.cr, None, request.session.context)
getattr(cls, f'_auth_method_{auth}')()
@ -169,6 +280,10 @@ class IrHttp(models.AbstractModel):
def _geoip_resolve(cls):
return request._geoip_resolve()
@classmethod
def _sanitize_cookies(cls, cookies):
pass
@classmethod
def _pre_dispatch(cls, rule, args):
ICP = request.env['ir.config_parameter'].with_user(SUPERUSER_ID)
@ -188,25 +303,22 @@ class IrHttp(models.AbstractModel):
request.dispatcher.pre_dispatch(rule, args)
# Replace uid placeholder by the current request.env.uid
for key, val in list(args.items()):
if isinstance(val, models.BaseModel) and isinstance(val._uid, RequestUID):
args[key] = val.with_user(request.env.uid)
# verify the default language set in the context is valid,
# otherwise fallback on the company lang, english or the first
# lang installed
env = request.env if request.env.uid else request.env['base'].with_user(SUPERUSER_ID).env
request.update_context(lang=get_lang(env)._get_cached('code'))
request.update_context(lang=get_lang(env).code)
for key, val in list(args.items()):
if not isinstance(val, models.BaseModel):
continue
# Replace uid and lang placeholder by the current request.env.uid and request.env.lang
args[key] = val.with_env(request.env)
try:
# explicitly crash now, instead of crashing later
args[key].check_access_rights('read')
args[key].check_access_rule('read')
args[key].check_access('read')
except (odoo.exceptions.AccessError, odoo.exceptions.MissingError) as e:
# custom behavior in case a record is not accessible / has been removed
if handle_error := rule.endpoint.routing.get('handle_params_access_error'):
@ -240,7 +352,7 @@ class IrHttp(models.AbstractModel):
model = request.env['ir.attachment']
attach = model.sudo()._get_serve_attachment(request.httprequest.path)
if attach and (attach.store_fname or attach.db_datas):
return Stream.from_attachment(attach).get_response()
return attach._to_http_stream().get_response()
@classmethod
def _redirect(cls, location, code=303):
@ -273,9 +385,7 @@ class IrHttp(models.AbstractModel):
def _gc_sessions(self):
if os.getenv("ODOO_SKIP_GC_SESSIONS"):
return
ICP = self.env["ir.config_parameter"]
max_lifetime = int(ICP.get_param('sessions.max_inactivity_seconds', http.SESSION_LIFETIME))
http.root.session_store.vacuum(max_lifetime=max_lifetime)
http.root.session_store.vacuum(max_lifetime=http.get_session_max_inactivity(self.env))
@api.model
def get_translations_for_webclient(self, modules, lang):
@ -283,21 +393,19 @@ class IrHttp(models.AbstractModel):
modules = self.pool._init_modules
if not lang:
lang = self._context.get("lang")
langs = self.env['res.lang']._lang_get(lang)
lang_params = None
if langs:
lang_params = {
"name": langs.name,
"direction": langs.direction,
"date_format": langs.date_format,
"time_format": langs.time_format,
"grouping": langs.grouping,
"decimal_point": langs.decimal_point,
"thousands_sep": langs.thousands_sep,
"week_start": langs.week_start,
}
lang_params['week_start'] = int(lang_params['week_start'])
lang_params['code'] = lang
lang_data = self.env['res.lang']._get_data(code=lang)
lang_params = {
"name": lang_data.name,
"code": lang_data.code,
"direction": lang_data.direction,
"date_format": lang_data.date_format,
"time_format": lang_data.time_format,
"short_time_format": lang_data.short_time_format,
"grouping": lang_data.grouping,
"decimal_point": lang_data.decimal_point,
"thousands_sep": lang_data.thousands_sep,
"week_start": int(lang_data.week_start),
} if lang_data else None
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
# done server-side when the language is loaded, so we only need to load the user's lang.
@ -317,11 +425,11 @@ class IrHttp(models.AbstractModel):
'lang': lang,
'multi_lang': len(self.env['res.lang'].sudo().get_installed()) > 1,
}
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True).encode()).hexdigest()
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True, default=json_default).encode()).hexdigest()
@classmethod
def _is_allowed_cookie(cls, cookie_type):
return True
return True if cookie_type == 'required' else bool(request.env.user)
@api.model
def _verify_request_recaptcha_token(self, action):

View file

@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from email.message import EmailMessage
from email.utils import make_msgid
import base64
import datetime
import email
@ -12,19 +9,18 @@ import logging
import re
import smtplib
import ssl
import sys
import threading
from email.message import EmailMessage
from email.utils import make_msgid
from socket import gaierror, timeout
from OpenSSL import crypto as SSLCrypto
from OpenSSL.crypto import Error as SSLCryptoError, FILETYPE_PEM
from OpenSSL.SSL import Error as SSLError
from urllib3.contrib.pyopenssl import PyOpenSSLContext
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools, _, modules
from odoo.exceptions import UserError
from odoo.tools import ustr, pycompat, formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize
from odoo.tools import formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize, human_size
_logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('odoo.tests')
@ -55,7 +51,7 @@ class SMTPConnection:
SMTP_ATTRIBUTES = [
'auth', 'auth_cram_md5', 'auth_login', 'auth_plain', 'close', 'data', 'docmd', 'ehlo', 'ehlo_or_helo_if_needed',
'expn', 'from_filter', 'getreply', 'has_extn', 'login', 'mail', 'noop', 'putcmd', 'quit', 'rcpt', 'rset',
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host',
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host', 'esmtp_features',
]
for name in SMTP_ATTRIBUTES:
setattr(SMTPConnection, name, make_wrap_property(name))
@ -99,7 +95,7 @@ def extract_rfc2822_addresses(text):
"""
if not text:
return []
candidates = address_pattern.findall(ustr(text))
candidates = address_pattern.findall(text)
valid_addresses = []
for c in candidates:
try:
@ -157,10 +153,19 @@ class IrMailServer(models.Model):
smtp_debug = fields.Boolean(string='Debugging', help="If enabled, the full output of SMTP sessions will "
"be written to the server log at DEBUG level "
"(this is very verbose and may include confidential info!)")
max_email_size = fields.Float(string="Max Email Size")
sequence = fields.Integer(string='Priority', default=10, help="When no specific mail server is requested for a mail, the highest priority one "
"is used. Default priority is 10 (smaller number = higher priority)")
active = fields.Boolean(default=True)
_sql_constraints = [
(
'certificate_requires_tls',
"CHECK(smtp_encryption != 'none' OR smtp_authentication != 'certificate')",
"Certificate-based authentication requires a TLS transport"
),
]
@api.depends('smtp_authentication')
def _compute_smtp_authentication_info(self):
for server in self:
@ -217,11 +222,11 @@ class IrMailServer(models.Model):
for line in usage_details_per_server[server])
if is_multiple_server_usage:
raise UserError(
_('You cannot archive these Outgoing Mail Servers (%s) because they are still used in the following case(s):\n%s',
error_server_usage, error_usage_details))
_('You cannot archive these Outgoing Mail Servers (%(server_usage)s) because they are still used in the following case(s):\n%(usage_details)s',
server_usage=error_server_usage, usage_details=error_usage_details))
raise UserError(
_('You cannot archive this Outgoing Mail Server (%s) because it is still used in the following case(s):\n%s',
error_server_usage, error_usage_details))
_('You cannot archive this Outgoing Mail Server (%(server_usage)s) because it is still used in the following case(s):\n%(usage_details)s',
server_usage=error_server_usage, usage_details=error_usage_details))
def _active_usages_compute(self):
"""Compute a dict server id to list of user-friendly outgoing mail servers usage of this record set.
@ -232,6 +237,11 @@ class IrMailServer(models.Model):
"""
return dict()
def _get_max_email_size(self):
if self.max_email_size:
return self.max_email_size
return float(self.env['ir.config_parameter'].sudo().get_param('base.default_max_email_size', '10'))
def _get_test_email_from(self):
self.ensure_one()
email_from = False
@ -252,7 +262,16 @@ class IrMailServer(models.Model):
def _get_test_email_to(self):
return "noreply@odoo.com"
def test_smtp_connection(self):
def test_smtp_connection(self, autodetect_max_email_size=False):
"""Test the connection and if autodetect_max_email_size, set auto-detected max email size.
:param bool autodetect_max_email_size: whether to autodetect the max email size
:return (dict): client action to notify the user of the result of the operation (connection test or
auto-detection successful depending on the autodetect_max_email_size parameter)
:raises UserError: if the connection fails and if autodetect_max_email_size and
the server doesn't support the auto-detection of email max size
"""
for server in self:
smtp = False
try:
@ -274,6 +293,12 @@ class IrMailServer(models.Model):
(code, repl) = smtp.getreply()
if code != 354:
raise UserError(_('The server refused the test connection with error %(repl)s', repl=repl)) # noqa: TRY301
if autodetect_max_email_size:
max_size = smtp.esmtp_features.get('size')
if not max_size:
raise UserError(_('The server "%(server_name)s" doesn\'t return the maximum email size.',
server_name=server.name))
server.max_email_size = float(max_size) / (1024 ** 2)
except (UnicodeError, idna.core.InvalidCodepoint) as e:
raise UserError(_("Invalid server name!\n %s", e)) from e
except (gaierror, timeout) as e:
@ -301,7 +326,12 @@ class IrMailServer(models.Model):
# ignored, just a consequence of the previous exception
pass
message = _("Connection Test Successful!")
if autodetect_max_email_size:
message = _(
'Email maximum size updated (%(details)s).',
details=', '.join(f'{server.name}: {human_size(server.max_email_size * 1024 ** 2)}' for server in self))
else:
message = _('Connection Test Successful!')
return {
'type': 'ir.actions.client',
'tag': 'display_notification',
@ -309,9 +339,14 @@ class IrMailServer(models.Model):
'message': message,
'type': 'success',
'sticky': False,
}
'next': {'type': 'ir.actions.act_window_close'}, # force a form reload
},
}
def action_retrieve_max_email_size(self):
self.ensure_one()
return self.test_smtp_connection(autodetect_max_email_size=True)
def connect(self, host=None, port=None, user=None, password=None, encryption=None,
smtp_from=None, ssl_certificate=None, ssl_private_key=None, smtp_debug=False, mail_server_id=None,
allow_archived=False):
@ -336,9 +371,8 @@ class IrMailServer(models.Model):
longer raised.
"""
# Do not actually connect while running in test mode
if self._is_test_mode():
if modules.module.current_test:
return
mail_server = smtp_encryption = None
if mail_server_id:
mail_server = self.sudo().browse(mail_server_id)
@ -408,10 +442,11 @@ class IrMailServer(models.Model):
raise UserError(_('Could not load your certificate / private key. \n%s', str(e)))
if not smtp_server:
raise UserError(
(_("Missing SMTP Server") + "\n" +
_("Please define at least one SMTP server, "
"or provide the SMTP parameters explicitly.")))
raise UserError(_(
"Missing SMTP Server\n"
"Please define at least one SMTP server, "
"or provide the SMTP parameters explicitly.",
))
if smtp_encryption == 'ssl':
if 'SMTP_SSL' not in smtplib.__all__:
@ -500,12 +535,11 @@ class IrMailServer(models.Model):
headers = headers or {} # need valid dict later
email_cc = email_cc or []
email_bcc = email_bcc or []
body = body or u''
msg = EmailMessage(policy=email.policy.SMTP)
if not message_id:
if object_id:
message_id = tools.generate_tracking_message_id(object_id)
message_id = tools.mail.generate_tracking_message_id(object_id)
else:
message_id = make_msgid()
msg['Message-Id'] = message_id
@ -522,16 +556,16 @@ class IrMailServer(models.Model):
msg['Bcc'] = email_bcc
msg['Date'] = datetime.datetime.utcnow()
for key, value in headers.items():
msg[pycompat.to_text(ustr(key))] = value
msg[key] = value
email_body = ustr(body)
email_body = body or ''
if subtype == 'html' and not body_alternative:
msg['MIME-Version'] = '1.0'
msg.add_alternative(tools.html2plaintext(email_body), subtype='plain', charset='utf-8')
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
elif body_alternative:
msg['MIME-Version'] = '1.0'
msg.add_alternative(ustr(body_alternative), subtype=subtype_alternative, charset='utf-8')
msg.add_alternative(body_alternative, subtype=subtype_alternative, charset='utf-8')
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
else:
msg.set_content(email_body, subtype=subtype, charset='utf-8')
@ -703,27 +737,14 @@ class IrMailServer(models.Model):
smtp_from, smtp_to_list, message = self._prepare_email_message(message, smtp)
# Do not actually send emails in testing mode!
if self._is_test_mode():
_test_logger.info("skip sending email in test mode")
if modules.module.current_test:
_test_logger.debug("skip sending email in test mode")
return message['Message-Id']
try:
message_id = message['Message-Id']
if sys.version_info < (3, 7, 4):
# header folding code is buggy and adds redundant carriage
# returns, it got fixed in 3.7.4 thanks to bpo-34424
message_str = message.as_string()
message_str = re.sub('\r+(?!\n)', '', message_str)
mail_options = []
if any((not is_ascii(addr) for addr in smtp_to_list + [smtp_from])):
# non ascii email found, require SMTPUTF8 extension,
# the relay may reject it
mail_options.append("SMTPUTF8")
smtp.sendmail(smtp_from, smtp_to_list, message_str, mail_options=mail_options)
else:
smtp.send_message(message, smtp_from, smtp_to_list)
smtp.send_message(message, smtp_from, smtp_to_list)
# do not quit() a pre-established smtp_session
if not smtp_session:
@ -731,8 +752,12 @@ class IrMailServer(models.Model):
except smtplib.SMTPServerDisconnected:
raise
except Exception as e:
params = (ustr(smtp_server), e.__class__.__name__, e)
msg = _("Mail delivery failed via SMTP server '%s'.\n%s: %s", *params)
msg = _(
"Mail delivery failed via SMTP server '%(server)s'.\n%(exception_name)s: %(message)s",
server=smtp_server,
exception_name=e.__class__.__name__,
message=e,
)
_logger.info(msg)
raise MailDeliveryException(_("Mail Delivery Failed"), msg)
return message_id
@ -842,11 +867,3 @@ class IrMailServer(models.Model):
else:
self.smtp_port = 25
return result
def _is_test_mode(self):
"""Return True if we are running the tests, so we do not send real emails.
Can be overridden in tests after mocking the SMTP lib to test in depth the
outgoing mail server.
"""
return getattr(threading.current_thread(), 'testing', False) or self.env.registry.in_test_mode()

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import inspect
import itertools
import logging
import random
@ -10,18 +11,29 @@ from collections import defaultdict
from collections.abc import Mapping
from operator import itemgetter
from psycopg2 import sql
from psycopg2.extras import Json
from psycopg2.sql import Identifier, SQL, Placeholder
from odoo import api, fields, models, tools, _, _lt, Command
from odoo import api, fields, models, tools, Command
from odoo.exceptions import AccessError, UserError, ValidationError
from odoo.osv import expression
from odoo.tools import pycompat, unique, OrderedSet, lazy_property
from odoo.tools import format_list, lazy_property, sql, unique, OrderedSet, SQL
from odoo.tools.safe_eval import safe_eval, datetime, dateutil, time
from odoo.tools.translate import _, LazyTranslate
_lt = LazyTranslate(__name__)
_logger = logging.getLogger(__name__)
# Messages are declared in extenso so they are properly exported in translation terms
ACCESS_ERROR_HEADER = {
'read': _lt("You are not allowed to access '%(document_kind)s' (%(document_model)s) records."),
'write': _lt("You are not allowed to modify '%(document_kind)s' (%(document_model)s) records."),
'create': _lt("You are not allowed to create '%(document_kind)s' (%(document_model)s) records."),
'unlink': _lt("You are not allowed to delete '%(document_kind)s' (%(document_model)s) records."),
}
ACCESS_ERROR_GROUPS = _lt("This operation is allowed for the following groups:\n%(groups_list)s")
ACCESS_ERROR_NOGROUP = _lt("No group currently allows this operation.")
ACCESS_ERROR_RESOLUTION = _lt("Contact your administrator to request access if necessary.")
MODULE_UNINSTALL_FLAG = '_force_unlink'
RE_ORDER_FIELDS = re.compile(r'"?(\w+)"?\s*(?:asc|desc)?', flags=re.I)
@ -65,13 +77,6 @@ def selection_xmlid(module, model_name, field_name, value):
return '%s.selection__%s__%s__%s' % (module, xmodel, field_name, xvalue)
# generic INSERT and UPDATE queries
INSERT_QUERY = SQL("INSERT INTO {table} ({cols}) VALUES %s RETURNING id")
UPDATE_QUERY = SQL("UPDATE {table} SET {assignment} WHERE {condition} RETURNING id")
quote = '"{}"'.format
def query_insert(cr, table, rows):
""" Insert rows in a table. ``rows`` is a list of dicts, all with the same
set of keys. Return the ids of the new rows.
@ -79,12 +84,15 @@ def query_insert(cr, table, rows):
if isinstance(rows, Mapping):
rows = [rows]
cols = list(rows[0])
query = INSERT_QUERY.format(
table=Identifier(table),
cols=SQL(",").join(map(Identifier, cols)),
query = SQL(
"INSERT INTO %s (%s)",
SQL.identifier(table),
SQL(",").join(map(SQL.identifier, cols)),
)
assert not query.params
str_query = query.code + " VALUES %s RETURNING id"
params = [tuple(row[col] for col in cols) for row in rows]
cr.execute_values(query, params)
cr.execute_values(str_query, params)
return [row[0] for row in cr.fetchall()]
@ -92,34 +100,40 @@ def query_update(cr, table, values, selectors):
""" Update the table with the given values (dict), and use the columns in
``selectors`` to select the rows to update.
"""
setters = set(values) - set(selectors)
query = UPDATE_QUERY.format(
table=Identifier(table),
assignment=SQL(",").join(
SQL("{} = {}").format(Identifier(s), Placeholder(s))
for s in setters
query = SQL(
"UPDATE %s SET %s WHERE %s RETURNING id",
SQL.identifier(table),
SQL(",").join(
SQL("%s = %s", SQL.identifier(key), val)
for key, val in values.items()
if key not in selectors
),
condition=SQL(" AND ").join(
SQL("{} = {}").format(Identifier(s), Placeholder(s))
for s in selectors
SQL(" AND ").join(
SQL("%s = %s", SQL.identifier(key), values[key])
for key in selectors
),
)
cr.execute(query, values)
cr.execute(query)
return [row[0] for row in cr.fetchall()]
def select_en(model, fnames, where, params):
def select_en(model, fnames, model_names):
""" Select the given columns from the given model's table, with the given WHERE clause.
Translated fields are returned in 'en_US'.
"""
table = quote(model._table)
cols = ", ".join(
f"{quote(fname)}->>'en_US'" if model._fields[fname].translate else quote(fname)
if not model_names:
return []
cols = SQL(", ").join(
SQL("%s->>'en_US'", SQL.identifier(fname)) if model._fields[fname].translate else SQL.identifier(fname)
for fname in fnames
)
query = f"SELECT {cols} FROM {table} WHERE {where}"
model.env.cr.execute(query, params)
return model.env.cr.fetchall()
query = SQL(
"SELECT %s FROM %s WHERE model IN %s",
cols,
SQL.identifier(model._table),
tuple(model_names),
)
return model.env.execute_query(query)
def upsert_en(model, fnames, rows, conflict):
@ -131,24 +145,6 @@ def upsert_en(model, fnames, rows, conflict):
:param conflict: list of column names to put into the ON CONFLICT clause
:return: the ids of the inserted or updated rows
"""
table = quote(model._table)
cols = ", ".join(quote(fname) for fname in fnames)
values = ", ".join("%s" for row in rows)
conf = ", ".join(conflict)
excluded = ", ".join(
(
f"COALESCE({table}.{quote(fname)}, '{{}}'::jsonb) || EXCLUDED.{quote(fname)}"
if model._fields[fname].translate is True
else f"EXCLUDED.{quote(fname)}"
)
for fname in fnames
)
query = f"""
INSERT INTO {table} ({cols}) VALUES {values}
ON CONFLICT ({conf}) DO UPDATE SET ({cols}) = ({excluded})
RETURNING id
"""
# for translated fields, we can actually erase the json value, as
# translations will be reloaded after this
def identity(val):
@ -158,12 +154,34 @@ def upsert_en(model, fnames, rows, conflict):
return Json({'en_US': val}) if val is not None else val
wrappers = [(jsonify if model._fields[fname].translate else identity) for fname in fnames]
params = [
values = [
tuple(func(val) for func, val in zip(wrappers, row))
for row in rows
]
model.env.cr.execute(query, params)
return [row[0] for row in model.env.cr.fetchall()]
comma = SQL(", ").join
query = SQL("""
INSERT INTO %(table)s (%(cols)s) VALUES %(values)s
ON CONFLICT (%(conflict)s) DO UPDATE SET (%(cols)s) = (%(excluded)s)
RETURNING id
""",
table=SQL.identifier(model._table),
cols=comma(SQL.identifier(fname) for fname in fnames),
values=comma(values),
conflict=comma(SQL.identifier(fname) for fname in conflict),
excluded=comma(
(
SQL(
"COALESCE(%s, '{}'::jsonb) || EXCLUDED.%s",
SQL.identifier(model._table, fname),
SQL.identifier(fname),
)
if model._fields[fname].translate is True
else SQL("EXCLUDED.%s", SQL.identifier(fname))
)
for fname in fnames
),
)
return [id_ for id_, in model.env.execute_query(query)]
#
@ -240,13 +258,12 @@ class IrModel(models.Model):
@api.depends()
def _compute_count(self):
cr = self.env.cr
self.count = 0
for model in self:
records = self.env[model.model]
if not records._abstract and records._auto:
cr.execute(sql.SQL('SELECT COUNT(*) FROM {}').format(sql.Identifier(records._table)))
model.count = cr.fetchone()[0]
[[count]] = self.env.execute_query(SQL("SELECT COUNT(*) FROM %s", SQL.identifier(records._table)))
model.count = count
@api.constrains('model')
def _check_model_name(self):
@ -306,11 +323,11 @@ class IrModel(models.Model):
continue
table = current_model._table
kind = tools.table_kind(self._cr, table)
if kind == tools.TableKind.View:
self._cr.execute(sql.SQL('DROP VIEW {}').format(sql.Identifier(table)))
elif kind == tools.TableKind.Regular:
self._cr.execute(sql.SQL('DROP TABLE {} CASCADE').format(sql.Identifier(table)))
kind = sql.table_kind(self._cr, table)
if kind == sql.TableKind.View:
self._cr.execute(SQL('DROP VIEW %s', SQL.identifier(table)))
elif kind == sql.TableKind.Regular:
self._cr.execute(SQL('DROP TABLE %s CASCADE', SQL.identifier(table)))
elif kind is not None:
_logger.warning(
"Unable to drop table %r of model %r: unmanaged or unknown tabe type %r",
@ -325,7 +342,7 @@ class IrModel(models.Model):
# Prevent manual deletion of module tables
for model in self:
if model.state != 'manual':
raise UserError(_("Model %r contains module data and cannot be removed.", model.name))
raise UserError(_("Model %s contains module data and cannot be removed.", model.name))
def unlink(self):
# prevent screwing up fields that depend on these models' fields
@ -417,7 +434,7 @@ class IrModel(models.Model):
model_ids = {}
existing = {}
for row in select_en(self, ['id'] + cols, "model IN %s", [tuple(model_names)]):
for row in select_en(self, ['id'] + cols, model_names):
model_ids[row[1]] = row[0]
existing[row[1]] = row[1:]
@ -450,7 +467,7 @@ class IrModel(models.Model):
models.check_pg_name(model_data["model"].replace(".", "_"))
class CustomModel(models.Model):
_name = pycompat.to_text(model_data['model'])
_name = model_data['model']
_description = model_data['name']
_module = False
_custom = True
@ -486,8 +503,8 @@ class IrModel(models.Model):
for model_data in cr.dictfetchall():
model_class = self._instanciate(model_data)
Model = model_class._build_model(self.pool, cr)
kind = tools.table_kind(cr, Model._table)
if kind not in (tools.TableKind.Regular, None):
kind = sql.table_kind(cr, Model._table)
if kind not in (sql.TableKind.Regular, None):
_logger.info(
"Model %r is backed by table %r which is not a regular table (%r), disabling automatic schema management",
Model._name, Model._table, kind,
@ -540,13 +557,14 @@ class IrModelFields(models.Model):
copied = fields.Boolean(string='Copied',
compute='_compute_copied', store=True, readonly=False,
help="Whether the value is copied when duplicating a record.")
related = fields.Char(string='Related Field', help="The corresponding related field, if any. This must be a dot-separated list of field names.")
related = fields.Char(string='Related Field Definition', help="The corresponding related field, if any. This must be a dot-separated list of field names.")
related_field_id = fields.Many2one('ir.model.fields', compute='_compute_related_field_id',
store=True, string="Related field", ondelete='cascade')
store=True, string="Related Field", ondelete='cascade')
required = fields.Boolean()
readonly = fields.Boolean()
index = fields.Boolean(string='Indexed')
translate = fields.Boolean(string='Translatable', help="Whether values for this field can be translated (enables the translation mechanism for that field)")
company_dependent = fields.Boolean(string='Company Dependent', help="Whether values for this field is company dependent", readonly=True)
size = fields.Integer()
state = fields.Selection([('manual', 'Custom Field'), ('base', 'Base Field')], string='Type', default='manual', required=True, readonly=True, index=True)
on_delete = fields.Selection([('cascade', 'Cascade'), ('set null', 'Set NULL'), ('restrict', 'Restrict')],
@ -662,10 +680,18 @@ class IrModelFields(models.Model):
for index, name in enumerate(names):
field = self._get(model_name, name)
if not field:
raise UserError(_("Unknown field name %r in related field %r", name, self.related))
raise UserError(_(
'Unknown field name "%(field_name)s" in related field "%(related_field)s"',
field_name=name,
related_field=self.related,
))
model_name = field.relation
if index < last and not field.relation:
raise UserError(_("Non-relational field name %r in related field %r", name, self.related))
raise UserError(_(
'Non-relational field name "%(field_name)s" in related field "%(related_field)s"',
field_name=name,
related_field=self.related,
))
return field
@api.constrains('related')
@ -674,9 +700,17 @@ class IrModelFields(models.Model):
if rec.state == 'manual' and rec.related:
field = rec._related_field()
if field.ttype != rec.ttype:
raise ValidationError(_("Related field %r does not have type %r", rec.related, rec.ttype))
raise ValidationError(_(
'Related field "%(related_field)s" does not have type "%(type)s"',
related_field=rec.related,
type=rec.ttype,
))
if field.relation != rec.relation:
raise ValidationError(_("Related field %r does not have comodel %r", rec.related, rec.relation))
raise ValidationError(_(
'Related field "%(related_field)s" does not have comodel "%(comodel)s"',
related_field=rec.related,
comodel=rec.relation,
))
@api.onchange('related')
def _onchange_related(self):
@ -710,7 +744,7 @@ class IrModelFields(models.Model):
continue
for seq in record.depends.split(","):
if not seq.strip():
raise UserError(_("Empty dependency in %r", record.depends))
raise UserError(_("Empty dependency in %s", record.depends))
model = self.env[record.model]
names = seq.strip().split(".")
last = len(names) - 1
@ -719,9 +753,17 @@ class IrModelFields(models.Model):
raise UserError(_("Compute method cannot depend on field 'id'"))
field = model._fields.get(name)
if field is None:
raise UserError(_("Unknown field %r in dependency %r", name, seq.strip()))
raise UserError(_(
'Unknown field “%(field)s” in dependency “%(dependency)s',
field=name,
dependency=seq.strip(),
))
if index < last and not field.relational:
raise UserError(_("Non-relational field %r in dependency %r", name, seq.strip()))
raise UserError(_(
'Non-relational field “%(field)s” in dependency “%(dependency)s',
field=name,
dependency=seq.strip(),
))
model = model[name]
@api.onchange('compute')
@ -746,7 +788,7 @@ class IrModelFields(models.Model):
else:
currency_field = self._get(rec.model, rec.currency_field)
if not currency_field:
raise ValidationError(_("Unknown field name %r in currency_field", rec.currency_field))
raise ValidationError(_("Unknown field specified “%s in currency_field", rec.currency_field))
if currency_field.ttype != 'many2one':
raise ValidationError(_("Currency field does not have type many2one"))
@ -792,7 +834,7 @@ class IrModelFields(models.Model):
return
return {'warning': {
'title': _("Warning"),
'message': _("The table %r if used for other, possibly incompatible fields.", self.relation_table),
'message': _("The table %s” is used by another, possibly incompatible field(s).", self.relation_table),
}}
@api.constrains('required', 'ttype', 'on_delete')
@ -827,10 +869,10 @@ class IrModelFields(models.Model):
is_model = model is not None
if field.store:
# TODO: Refactor this brol in master
if is_model and tools.column_exists(self._cr, model._table, field.name) and \
tools.table_kind(self._cr, model._table) == tools.TableKind.Regular:
self._cr.execute(sql.SQL('ALTER TABLE {} DROP COLUMN {} CASCADE').format(
sql.Identifier(model._table), sql.Identifier(field.name),
if is_model and sql.column_exists(self._cr, model._table, field.name) and \
sql.table_kind(self._cr, model._table) == sql.TableKind.Regular:
self._cr.execute(SQL('ALTER TABLE %s DROP COLUMN %s CASCADE',
SQL.identifier(model._table), SQL.identifier(field.name),
))
if field.state == 'manual' and field.ttype == 'many2many':
rel_name = field.relation_table or (is_model and model._fields[field.name].relation)
@ -845,7 +887,7 @@ class IrModelFields(models.Model):
(tuple(tables_to_drop), tuple(self.ids)))
tables_to_keep = set(row[0] for row in self._cr.fetchall())
for rel_name in tables_to_drop - tables_to_keep:
self._cr.execute(sql.SQL('DROP TABLE {}').format(sql.Identifier(rel_name)))
self._cr.execute(SQL('DROP TABLE %s', SQL.identifier(rel_name)))
return True
@ -888,8 +930,8 @@ class IrModelFields(models.Model):
if not uninstalling:
field, dep = failed_dependencies[0]
raise UserError(_(
"The field '%s' cannot be removed because the field '%s' depends on it.",
field, dep,
"The field '%(field)s' cannot be removed because the field '%(other_field)s' depends on it.",
field=field, other_field=dep,
))
else:
self = self.union(*[
@ -919,9 +961,9 @@ class IrModelFields(models.Model):
except Exception:
if not uninstalling:
raise UserError(_(
"Cannot rename/delete fields that are still present in views:\nFields: %s\nView: %s",
", ".join(str(f) for f in fields),
view.name,
"Cannot rename/delete fields that are still present in views:\nFields: %(fields)s\nView: %(view)s",
fields=format_list(self.env, [str(f) for f in fields]),
view=view.name,
))
else:
# uninstall mode
@ -957,7 +999,7 @@ class IrModelFields(models.Model):
# discard the removed fields from fields to compute
for field in fields:
self.env.all.tocompute.pop(field, None)
self.env.transaction.tocompute.pop(field, None)
model_names = self.mapped('model')
self._drop_column()
@ -1000,7 +1042,7 @@ class IrModelFields(models.Model):
('model', '=', vals['relation']),
('name', '=', vals['relation_field']),
]):
raise UserError(_("Many2one %s on model %s does not exist!", vals['relation_field'], vals['relation']))
raise UserError(_("Many2one %(field)s on model %(model)s does not exist!", field=vals['relation_field'], model=vals['relation']))
if any(model in self.pool for model in models):
# setup models; this re-initializes model in registry
@ -1068,18 +1110,18 @@ class IrModelFields(models.Model):
# rename column in database, and its corresponding index if present
table, oldname, newname, index, stored = column_rename
if stored:
self._cr.execute(
sql.SQL('ALTER TABLE {} RENAME COLUMN {} TO {}').format(
sql.Identifier(table),
sql.Identifier(oldname),
sql.Identifier(newname)
))
self._cr.execute(SQL(
'ALTER TABLE %s RENAME COLUMN %s TO %s',
SQL.identifier(table),
SQL.identifier(oldname),
SQL.identifier(newname)
))
if index:
self._cr.execute(
sql.SQL('ALTER INDEX {} RENAME TO {}').format(
sql.Identifier(f'{table}_{oldname}_index'),
sql.Identifier(f'{table}_{newname}_index'),
))
self._cr.execute(SQL(
'ALTER INDEX %s RENAME TO %s',
SQL.identifier(f'{table}_{oldname}_index'),
SQL.identifier(f'{table}_{newname}_index'),
))
if column_rename or patched_models or translate_only:
# setup models, this will reload all manual fields in registry
@ -1124,6 +1166,7 @@ class IrModelFields(models.Model):
'selectable': bool(field.search or field.store),
'size': getattr(field, 'size', None),
'translate': bool(field.translate),
'company_dependent': bool(field.company_dependent),
'relation_field': field.inverse_name if field.type == 'one2many' else None,
'relation_table': field.relation if field.type == 'many2many' else None,
'column1': field.column1 if field.type == 'many2many' else None,
@ -1168,7 +1211,7 @@ class IrModelFields(models.Model):
field_ids = {}
existing = {}
for row in select_en(self, ['id'] + cols, "model IN %s", [tuple(model_names)]):
for row in select_en(self, ['id'] + cols, model_names):
field_ids[row[1:3]] = row[0]
existing[row[1:3]] = row[1:]
@ -1234,6 +1277,7 @@ class IrModelFields(models.Model):
'required': bool(field_data['required']),
'readonly': bool(field_data['readonly']),
'store': bool(field_data['store']),
'company_dependent': bool(field_data['company_dependent']),
}
if field_data['ttype'] in ('char', 'text', 'html'):
attrs['translate'] = bool(field_data['translate'])
@ -1367,7 +1411,7 @@ class ModelInherit(models.Model):
IrModel = self.env["ir.model"]
get_model_id = IrModel._get_id
module_mapping = defaultdict(list)
module_mapping = defaultdict(OrderedSet)
for model_name in model_names:
get_field_id = self.env["ir.model.fields"]._get_ids(model_name).get
model_id = get_model_id(model_name)
@ -1384,10 +1428,16 @@ class ModelInherit(models.Model):
] + [
(model_id, get_model_id(parent_name), get_field_id(field))
for parent_name, field in cls._inherits.items()
] + [
(model_id, get_model_id(field.comodel_name), get_field_id(field_name))
for (field_name, field) in inspect.getmembers(cls)
if isinstance(field, fields.Many2one)
if field.type == 'many2one' and not field.related and field.delegate
if field_name not in cls._inherits.values()
]
for item in items:
module_mapping[item].append(cls._module)
module_mapping[item].add(cls._module)
if not module_mapping:
return
@ -1480,6 +1530,13 @@ class IrModelSelection(models.Model):
]
if not fields:
return
if invalid_fields := OrderedSet(
field for field in fields
for selection in field.selection
for value_label in selection
if not isinstance(value_label, str)
):
raise ValidationError(_("Fields %s contain a non-str value/label in selection", invalid_fields))
# determine expected and existing rows
IMF = self.env['ir.model.fields']
@ -1682,14 +1739,15 @@ class IrModelSelection(models.Model):
"Could not fulfill ondelete action for field %s.%s, "
"attempting ORM bypass...", records._name, fname,
)
query = sql.SQL("UPDATE {} SET {}=%s WHERE id IN %s").format(
sql.Identifier(records._table),
sql.Identifier(fname),
)
# if this fails then we're shit out of luck and there's nothing
# we can do except fix on a case-by-case basis
value = field.convert_to_column(value, records)
self.env.cr.execute(query, [value, records._ids])
self.env.execute_query(SQL(
"UPDATE %s SET %s=%s WHERE id IN %s",
SQL.identifier(records._table),
SQL.identifier(fname),
field.convert_to_column_insert(value, records),
records._ids,
))
records.invalidate_recordset([fname])
for selection in self:
@ -1727,8 +1785,8 @@ class IrModelSelection(models.Model):
else:
# this shouldn't happen... simply a sanity check
raise ValueError(_(
"The ondelete policy %r is not valid for field %r",
ondelete, selection
'The ondelete policy "%(policy)s" is not valid for field "%(field)s"',
policy=ondelete, field=selection,
))
def _get_records(self):
@ -1770,39 +1828,38 @@ class IrModelConstraint(models.Model):
]
def unlink(self):
self.check_access_rights('unlink')
self.check_access_rule('unlink')
self.check_access('unlink')
ids_set = set(self.ids)
for data in self.sorted(key='id', reverse=True):
name = tools.ustr(data.name)
name = data.name
if data.model.model in self.env:
table = self.env[data.model.model]._table
else:
table = data.model.model.replace('.', '_')
typ = data.type
# double-check we are really going to delete all the owners of this schema element
self._cr.execute("""SELECT id from ir_model_constraint where name=%s""", (data.name,))
self._cr.execute("""SELECT id from ir_model_constraint where name=%s""", [name])
external_ids = set(x[0] for x in self._cr.fetchall())
if external_ids - ids_set:
# as installed modules have defined this element we must not delete it!
continue
typ = data.type
if typ == 'f':
# test if FK exists on this table (it could be on a related m2m table, in which case we ignore it)
self._cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""",
('f', name, table))
if self._cr.fetchone():
self._cr.execute(
sql.SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
sql.Identifier(table),
sql.Identifier(name[:63])
))
self._cr.execute(SQL(
'ALTER TABLE %s DROP CONSTRAINT %s',
SQL.identifier(table),
SQL.identifier(name[:63]),
))
_logger.info('Dropped FK CONSTRAINT %s@%s', name, data.model.model)
if typ == 'u':
hname = tools.make_identifier(name)
hname = sql.make_identifier(name)
# test if constraint exists
# Since type='u' means any "other" constraint, to avoid issues we limit to
# 'c' -> check, 'u' -> unique, 'x' -> exclude constraints, effective leaving
@ -1812,16 +1869,18 @@ class IrModelConstraint(models.Model):
WHERE cs.contype in ('c', 'u', 'x') and cs.conname=%s and cl.relname=%s""",
(hname, table))
if self._cr.fetchone():
self._cr.execute(sql.SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
sql.Identifier(table), sql.Identifier(hname)))
self._cr.execute(SQL(
'ALTER TABLE %s DROP CONSTRAINT %s',
SQL.identifier(table),
SQL.identifier(hname),
))
_logger.info('Dropped CONSTRAINT %s@%s', name, data.model.model)
return super().unlink()
def copy(self, default=None):
default = dict(default or {})
default['name'] = self.name + '_copy'
return super(IrModelConstraint, self).copy(default)
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
return [dict(vals, name=constraint.name + '_copy') for constraint, vals in zip(self, vals_list)]
def _reflect_constraint(self, model, conname, type, definition, module, message=None):
""" Reflect the given constraint, and return its corresponding record
@ -1922,23 +1981,23 @@ class IrModelRelation(models.Model):
ids_set = set(self.ids)
to_drop = tools.OrderedSet()
for data in self.sorted(key='id', reverse=True):
name = tools.ustr(data.name)
name = data.name
# double-check we are really going to delete all the owners of this schema element
self._cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,))
external_ids = set(x[0] for x in self._cr.fetchall())
if external_ids - ids_set:
self._cr.execute("""SELECT id from ir_model_relation where name = %s""", [name])
external_ids = {x[0] for x in self._cr.fetchall()}
if not external_ids.issubset(ids_set):
# as installed modules have defined this element we must not delete it!
continue
if tools.table_exists(self._cr, name):
if sql.table_exists(self._cr, name):
to_drop.add(name)
self.unlink()
# drop m2m relation tables
for table in to_drop:
self._cr.execute(sql.SQL('DROP TABLE {} CASCADE').format(sql.Identifier(table)))
self._cr.execute(SQL('DROP TABLE %s CASCADE', SQL.identifier(table)))
_logger.info('Dropped table %s', table)
def _reflect_relation(self, model, table, module):
@ -1981,6 +2040,7 @@ class IrModelAccess(models.Model):
def group_names_with_access(self, model_name, access_mode):
""" Return the names of visible groups which have been granted
``access_mode`` on the model ``model_name``.
:rtype: list
"""
assert access_mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
@ -1998,6 +2058,25 @@ class IrModelAccess(models.Model):
""", [lang, lang, model_name])
return [('%s/%s' % x) if x[0] else x[1] for x in self._cr.fetchall()]
@api.model
@tools.ormcache('model_name', 'access_mode')
def _get_access_groups(self, model_name, access_mode='read'):
""" Return the group expression object that represents the users who
have ``access_mode`` to the model ``model_name``.
"""
assert access_mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
model = self.env['ir.model']._get(model_name)
accesses = self.sudo().search([
(f'perm_{access_mode}', '=', True), ('model_id', '=', model.id),
])
group_definitions = self.env['res.groups']._get_group_definitions()
if not accesses:
return group_definitions.empty
if not all(access.group_id for access in accesses): # there is some global access
return group_definitions.universe
return group_definitions.from_ids(accesses.group_id.ids)
# The context parameter is useful when the method translates error messages.
# But as the method raises an exception in that case, the key 'lang' might
# not be really necessary as a cache key, unless the `ormcache_context`
@ -2007,26 +2086,22 @@ class IrModelAccess(models.Model):
def _get_allowed_models(self, mode='read'):
assert mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
group_ids = self.env.user._get_group_ids()
self.flush_model()
self.env.cr.execute(f"""
rows = self.env.execute_query(SQL("""
SELECT m.model
FROM ir_model_access a
JOIN ir_model m ON (m.id = a.model_id)
WHERE a.perm_{mode}
WHERE a.perm_%s
AND a.active
AND (
a.group_id IS NULL OR
-- use subselect fo force a better query plan. See #99695 --
a.group_id IN (
SELECT gu.gid
FROM res_groups_users_rel gu
WHERE gu.uid = %s
)
a.group_id IN %s
)
GROUP BY m.model
""", (self.env.uid,))
""", SQL(mode), tuple(group_ids) or (None,)))
return frozenset(v[0] for v in self.env.cr.fetchall())
return frozenset(v[0] for v in rows)
@api.model
def check(self, model, mode='read', raise_exception=True):
@ -2036,61 +2111,32 @@ class IrModelAccess(models.Model):
assert isinstance(model, str), 'Not a model name: %s' % (model,)
# TransientModel records have no access rights, only an implicit access rule
if model not in self.env:
_logger.error('Missing model %s', model)
has_access = model in self._get_allowed_models(mode)
if not has_access and raise_exception:
groups = '\n'.join('\t- %s' % g for g in self.group_names_with_access(model, mode))
document_kind = self.env['ir.model']._get(model).name or model
msg_heads = {
# Messages are declared in extenso so they are properly exported in translation terms
'read': _lt(
"You are not allowed to access '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'write': _lt(
"You are not allowed to modify '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'create': _lt(
"You are not allowed to create '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'unlink': _lt(
"You are not allowed to delete '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
}
operation_error = msg_heads[mode]
if groups:
group_info = _("This operation is allowed for the following groups:\n%(groups_list)s", groups_list=groups)
else:
group_info = _("No group currently allows this operation.")
resolution_info = _("Contact your administrator to request access if necessary.")
_logger.info('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, self._uid, model)
msg = """{operation_error}
{group_info}
{resolution_info}""".format(
operation_error=operation_error,
group_info=group_info,
resolution_info=resolution_info)
raise AccessError(msg) from None
raise self._make_access_error(model, mode) from None
return has_access
def _make_access_error(self, model: str, mode: str):
""" Return the exception corresponding to an access error. """
_logger.info('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, self._uid, model)
operation_error = str(ACCESS_ERROR_HEADER[mode]) % {
'document_kind': self.env['ir.model']._get(model).name or model,
'document_model': model,
}
groups = "\n".join(f"\t- {g}" for g in self.group_names_with_access(model, mode))
if groups:
group_info = str(ACCESS_ERROR_GROUPS) % {'groups_list': groups}
else:
group_info = str(ACCESS_ERROR_NOGROUP)
resolution_info = str(ACCESS_ERROR_RESOLUTION)
return AccessError(f"{operation_error}\n\n{group_info}\n\n{resolution_info}")
@api.model
def call_cache_clearing_methods(self):
@ -2164,10 +2210,12 @@ class IrModelData(models.Model):
def _auto_init(self):
res = super(IrModelData, self)._auto_init()
tools.create_unique_index(self._cr, 'ir_model_data_module_name_uniq_index',
self._table, ['module', 'name'])
tools.create_index(self._cr, 'ir_model_data_model_res_id_index',
self._table, ['model', 'res_id'])
sql.create_unique_index(
self._cr, 'ir_model_data_module_name_uniq_index',
self._table, ['module', 'name'])
sql.create_index(
self._cr, 'ir_model_data_model_res_id_index',
self._table, ['model', 'res_id'])
return res
@api.depends('res_id', 'model', 'complete_name')
@ -2222,23 +2270,35 @@ class IrModelData(models.Model):
if self.env[model].search([('id', '=', res_id)]):
return model, res_id
if raise_on_access_error:
raise AccessError(_('Not enough access rights on the external ID %r', '%s.%s', (module, xml_id)))
raise AccessError(_('Not enough access rights on the external ID "%(module)s.%(xml_id)s"', module=module, xml_id=xml_id))
return model, False
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
self.ensure_one()
rand = "%04x" % random.getrandbits(16)
default = dict(default or {}, name="%s_%s" % (self.name, rand))
return super().copy(default)
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
for model, vals in zip(self, vals_list):
rand = "%04x" % random.getrandbits(16)
vals['name'] = "%s_%s" % (model.name, rand)
return vals_list
@api.model_create_multi
def create(self, vals_list):
res = super().create(vals_list)
if any(vals.get('model') == 'res.groups' for vals in vals_list):
self.env.registry.clear_cache('groups')
return res
def write(self, values):
self.env.registry.clear_cache() # _xmlid_lookup
return super().write(values)
res = super().write(values)
if values.get('model') == 'res.groups':
self.env.registry.clear_cache('groups')
return res
def unlink(self):
""" Regular unlink method, but make sure to clear the caches. """
self.env.registry.clear_cache() # _xmlid_lookup
if self and any(data.model == 'res.groups' for data in self.exists()):
self.env.registry.clear_cache('groups')
return super(IrModelData, self).unlink()
def _lookup_xmlids(self, xml_ids, model):
@ -2311,6 +2371,9 @@ class IrModelData(models.Model):
# update loaded_xmlids
self.pool.loaded_xmlids.update("%s.%s" % row[:2] for row in rows)
if any(row[2] == 'res.groups' for row in rows):
self.env.registry.clear_cache('groups')
# NOTE: this method is overriden in web_studio; if you need to make another
# override, make sure it is compatible with the one that is there.
def _build_insert_xmlids_values(self):
@ -2424,6 +2487,8 @@ class IrModelData(models.Model):
('model', '=', records._name),
('res_id', 'in', records.ids),
])
cloc_exclude_data = ref_data.filtered(lambda imd: imd.module == '__cloc_exclude__')
ref_data -= cloc_exclude_data
records -= records.browse((ref_data - module_data).mapped('res_id'))
if not records:
return
@ -2452,6 +2517,7 @@ class IrModelData(models.Model):
_logger.info('Deleting %s', records)
try:
with self._cr.savepoint():
cloc_exclude_data.unlink()
records.unlink()
except Exception:
if len(records) <= 1:
@ -2599,10 +2665,9 @@ class IrModelData(models.Model):
@api.model
def toggle_noupdate(self, model, res_id):
""" Toggle the noupdate flag on the external id of the record """
record = self.env[model].browse(res_id)
if record.check_access_rights('write'):
for xid in self.search([('model', '=', model), ('res_id', '=', res_id)]):
xid.noupdate = not xid.noupdate
self.env[model].browse(res_id).check_access('write')
for xid in self.search([('model', '=', model), ('res_id', '=', res_id)]):
xid.noupdate = not xid.noupdate
class WizardModelMenu(models.TransientModel):
@ -2618,7 +2683,7 @@ class WizardModelMenu(models.TransientModel):
vals = {
'name': menu.name,
'res_model': model.model,
'view_mode': 'tree,form',
'view_mode': 'list,form',
}
action_id = self.env['ir.actions.act_window'].create(vals)
self.env['ir.ui.menu'].create({

View file

@ -101,7 +101,7 @@ class ModuleCategory(models.Model):
@api.constrains('parent_id')
def _check_parent_not_circular(self):
if not self._check_recursion():
if self._has_cycle():
raise ValidationError(_("Error ! You cannot create recursive categories."))
@ -178,7 +178,7 @@ class Module(models.Model):
for element, _attribute, _link, _pos in html.iterlinks():
if element.get('src') and not '//' in element.get('src') and not 'static/' in element.get('src'):
element.set('src', "/%s/static/description/%s" % (module.name, element.get('src')))
return tools.html_sanitize(lxml.html.tostring(html))
return tools.html_sanitize(lxml.html.tostring(html, encoding='unicode'))
for module in self:
if not module.name:
@ -299,6 +299,7 @@ class Module(models.Model):
sequence = fields.Integer('Sequence', default=100)
dependencies_id = fields.One2many('ir.module.module.dependency', 'module_id',
string='Dependencies', readonly=True)
country_ids = fields.Many2many('res.country', 'module_country', 'module_id', 'country_id')
exclusion_ids = fields.One2many('ir.module.module.exclusion', 'module_id',
string='Exclusions', readonly=True)
auto_install = fields.Boolean('Automatic Installation',
@ -351,18 +352,17 @@ class Module(models.Model):
""" Domain to retrieve the modules that should be loaded by the registry. """
return [('state', '=', 'installed')]
@classmethod
def check_external_dependencies(cls, module_name, newstate='to install'):
terp = cls.get_module_info(module_name)
def check_external_dependencies(self, module_name, newstate='to install'):
terp = self.get_module_info(module_name)
try:
modules.check_manifest_dependencies(terp)
except Exception as e:
if newstate == 'to install':
msg = _('Unable to install module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to install module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to upgrade module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
else:
msg = _('Unable to process module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to process module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
raise UserError(msg)
def _state_update(self, newstate, states_to_update, level=100):
@ -381,7 +381,10 @@ class Module(models.Model):
update_mods, ready_mods = self.browse(), self.browse()
for dep in module.dependencies_id:
if dep.state == 'unknown':
raise UserError(_("You try to install module %r that depends on module %r.\nBut the latter module is not available in your system.", module.name, dep.name))
raise UserError(_(
'You try to install module "%(module)s" that depends on module "%(dependency)s".\nBut the latter module is not available in your system.',
module=module.name, dependency=dep.name,
))
if dep.depend_id.state == newstate:
ready_mods += dep.depend_id
else:
@ -401,16 +404,20 @@ class Module(models.Model):
@assert_log_admin_access
def button_install(self):
company_countries = self.env['res.company'].search([]).country_id
# domain to select auto-installable (but not yet installed) modules
auto_domain = [('state', '=', 'uninstalled'), ('auto_install', '=', True)]
# determine whether an auto-install module must be installed:
# - all its dependencies are installed or to be installed,
# - at least one dependency is 'to install'
# - if the module is country specific, at least one company is in one of the countries
install_states = frozenset(('installed', 'to install', 'to upgrade'))
def must_install(module):
states = {dep.state for dep in module.dependencies_id if dep.auto_install_required}
return states <= install_states and 'to install' in states
return states <= install_states and 'to install' in states and (
not module.country_ids or module.country_ids & company_countries
)
modules = self
while modules:
@ -428,7 +435,11 @@ class Module(models.Model):
for module in install_mods:
for exclusion in module.exclusion_ids:
if exclusion.name in install_names:
raise UserError(_('Modules %r and %r are incompatible.', module.shortdesc, exclusion.exclusion_id.shortdesc))
raise UserError(_(
'Modules "%(module)s" and "%(incompatible_module)s" are incompatible.',
module=module.shortdesc,
incompatible_module=exclusion.exclusion_id.shortdesc,
))
# check category exclusions
def closure(module):
@ -448,7 +459,7 @@ class Module(models.Model):
if modules and not any(modules <= closure(module) for module in modules):
labels = dict(self.fields_get(['state'])['state']['selection'])
raise UserError(
_('You are trying to install incompatible modules in category %r:%s', category.name, ''.join(
_('You are trying to install incompatible modules in category "%(category)s":%(module_list)s', category=category.name, module_list=''.join(
f"\n- {module.shortdesc} ({labels[module.state]})"
for module in modules
))
@ -571,7 +582,7 @@ class Module(models.Model):
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/web',
'url': '/odoo',
}
def _button_immediate_function(self, function):
@ -689,7 +700,7 @@ class Module(models.Model):
module = todo[i]
i += 1
if module.state not in ('installed', 'to upgrade'):
raise UserError(_("Can not upgrade module %r. It is not installed.", module.name))
raise UserError(_("Cannot upgrade module “%s. It is not installed.", module.name))
if self.get_module_info(module.name).get("installable", True):
self.check_external_dependencies(module.name, 'to upgrade')
for dep in Dependency.search([('name', '=', module.name)]):
@ -708,7 +719,7 @@ class Module(models.Model):
continue
for dep in module.dependencies_id:
if dep.state == 'unknown':
raise UserError(_('You try to upgrade the module %s that depends on the module: %s.\nBut this module is not available in your system.', module.name, dep.name))
raise UserError(_('You try to upgrade the module %(module)s that depends on the module: %(dependency)s.\nBut this module is not available in your system.', module=module.name, dependency=dep.name))
if dep.state == 'uninstalled':
to_install += self.search([('name', '=', dep.name)]).ids
@ -794,6 +805,7 @@ class Module(models.Model):
def _update_from_terp(self, terp):
self._update_dependencies(terp.get('depends', []), terp.get('auto_install'))
self._update_countries(terp.get('countries', []))
self._update_exclusions(terp.get('excludes', []))
self._update_category(terp.get('category', 'Uncategorized'))
@ -810,6 +822,16 @@ class Module(models.Model):
self.env['ir.module.module.dependency'].invalidate_model(['auto_install_required'])
self.invalidate_recordset(['dependencies_id'])
def _update_countries(self, countries=()):
existing = set(self.country_ids.ids)
needed = set(self.env['res.country'].search([('code', 'in', [c.upper() for c in countries])]).ids)
for dep in (needed - existing):
self._cr.execute('INSERT INTO module_country (module_id, country_id) values (%s, %s)', (self.id, dep))
for dep in (existing - needed):
self._cr.execute('DELETE FROM module_country WHERE module_id = %s and country_id = %s', (self.id, dep))
self.invalidate_recordset(['country_ids'])
self.env['res.company'].invalidate_model(['uninstalled_l10n_module_ids'])
def _update_exclusions(self, excludes=None):
self.env['ir.module.module.exclusion'].flush_model()
existing = set(excl.name for excl in self.exclusion_ids)
@ -883,13 +905,18 @@ class Module(models.Model):
def search_panel_select_range(self, field_name, **kwargs):
if field_name == 'category_id':
enable_counters = kwargs.get('enable_counters', False)
domain = [('parent_id', '=', False), ('child_ids.module_ids', '!=', False)]
domain = [
('parent_id', '=', False),
'|',
('module_ids.application', '!=', False),
('child_ids.module_ids', '!=', False),
]
excluded_xmlids = [
'base.module_category_website_theme',
'base.module_category_theme',
]
if not self.user_has_groups('base.group_no_one'):
if not self.env.user.has_group('base.group_no_one'):
excluded_xmlids.append('base.module_category_hidden')
excluded_category_ids = []
@ -928,18 +955,19 @@ class Module(models.Model):
return super(Module, self).search_panel_select_range(field_name, **kwargs)
@api.model
def _load_module_terms(self, modules, langs, overwrite=False):
def _load_module_terms(self, modules, langs, overwrite=False, imported_module=False):
""" Load PO files of the given modules for the given languages. """
# load i18n files
translation_importer = TranslationImporter(self.env.cr, verbose=False)
for module_name in modules:
modpath = get_module_path(module_name)
modpath = get_module_path(module_name, downloaded=imported_module)
if not modpath:
continue
for lang in langs:
is_lang_imported = False
for po_path in get_po_paths(module_name, lang):
env = self.env if imported_module else None
for po_path in get_po_paths(module_name, lang, env=env):
_logger.info('module %s: loading translation file %s for language %s', module_name, po_path, lang)
translation_importer.load_file(po_path, lang)
is_lang_imported = True
@ -994,6 +1022,27 @@ class ModuleDependency(models.Model):
for dependency in self:
dependency.state = dependency.depend_id.state or 'unknown'
@api.model
def all_dependencies(self, module_names):
to_search = {key: True for key in module_names}
res = {}
def search_direct_deps(to_search, res):
to_search_list = list(to_search.keys())
dependencies = self.web_search_read(domain=[("module_id.name", "in", to_search_list)], specification={"module_id":{"fields":{"name":{}}}, "name": {}, })["records"]
to_search.clear()
for dependency in dependencies:
dep_name = dependency["name"]
mod_name = dependency["module_id"]["name"]
if dep_name not in res and dep_name not in to_search and dep_name not in to_search_list:
to_search[dep_name] = True
if mod_name not in res:
res[mod_name] = list()
res[mod_name].append(dep_name)
search_direct_deps(to_search, res)
while to_search:
search_direct_deps(to_search, res)
return res
class ModuleExclusion(models.Model):
_name = "ir.module.module.exclusion"

View file

@ -1,475 +0,0 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.osv.expression import TERM_OPERATORS_NEGATION
from odoo.tools import ormcache
TYPE2FIELD = {
'char': 'value_text',
'float': 'value_float',
'boolean': 'value_integer',
'integer': 'value_integer',
'text': 'value_text',
'binary': 'value_binary',
'many2one': 'value_reference',
'date': 'value_datetime',
'datetime': 'value_datetime',
'selection': 'value_text',
'html': 'value_text',
}
TYPE2CLEAN = {
'boolean': bool,
'integer': lambda val: val or False,
'float': lambda val: val or False,
'char': lambda val: val or False,
'text': lambda val: val or False,
'selection': lambda val: val or False,
'binary': lambda val: val or False,
'date': lambda val: val.date() if val else False,
'datetime': lambda val: val or False,
'html': lambda val: val or False,
}
class Property(models.Model):
_name = 'ir.property'
_description = 'Company Property'
_allow_sudo_commands = False
name = fields.Char(index=True)
res_id = fields.Char(string='Resource', index=True, help="If not set, acts as a default value for new resources",)
company_id = fields.Many2one('res.company', string='Company', index=True)
fields_id = fields.Many2one('ir.model.fields', string='Field', ondelete='cascade', required=True)
value_float = fields.Float()
value_integer = fields.Integer()
value_text = fields.Text() # will contain (char, text)
value_binary = fields.Binary(attachment=False)
value_reference = fields.Char()
value_datetime = fields.Datetime()
type = fields.Selection([('char', 'Char'),
('float', 'Float'),
('boolean', 'Boolean'),
('integer', 'Integer'),
('text', 'Text'),
('binary', 'Binary'),
('many2one', 'Many2One'),
('date', 'Date'),
('datetime', 'DateTime'),
('selection', 'Selection'),
('html', 'Html'),
],
required=True,
default='many2one',
index=True)
def init(self):
# Ensure there is at most one active variant for each combination.
query = """
CREATE UNIQUE INDEX IF NOT EXISTS ir_property_unique_index
ON %s (fields_id, COALESCE(company_id, 0), COALESCE(res_id, ''))
"""
self.env.cr.execute(query % self._table)
def _update_values(self, values):
if 'value' not in values:
return values
value = values.pop('value')
prop = None
type_ = values.get('type')
if not type_:
if self:
prop = self[0]
type_ = prop.type
else:
type_ = self._fields['type'].default(self)
field = TYPE2FIELD.get(type_)
if not field:
raise UserError(_('Invalid type'))
if field == 'value_reference':
if not value:
value = False
elif isinstance(value, models.BaseModel):
value = '%s,%d' % (value._name, value.id)
elif isinstance(value, int):
field_id = values.get('fields_id')
if not field_id:
if not prop:
raise ValueError()
field_id = prop.fields_id
else:
field_id = self.env['ir.model.fields'].browse(field_id)
value = '%s,%d' % (field_id.sudo().relation, value)
values[field] = value
return values
def write(self, values):
# if any of the records we're writing on has a res_id=False *or*
# we're writing a res_id=False on any record
default_set = False
values = self._update_values(values)
default_set = (
# turning a record value into a fallback value
values.get('res_id') is False and any(record.res_id for record in self)
) or any(
# changing a fallback value
not record.res_id and any(record[fname] != self._fields[fname].convert_to_record(value, self) for fname, value in values.items())
for record in self
)
r = super().write(values)
if default_set:
# DLE P44: test `test_27_company_dependent`
# Easy solution, need to flush write when changing a property.
# Maybe it would be better to be able to compute all impacted cache value and update those instead
# Then clear_cache must be removed as well.
self.env.flush_all()
self.env.registry.clear_cache()
return r
@api.model_create_multi
def create(self, vals_list):
vals_list = [self._update_values(vals) for vals in vals_list]
created_default = any(not v.get('res_id') for v in vals_list)
r = super(Property, self).create(vals_list)
if created_default:
# DLE P44: test `test_27_company_dependent`
self.env.flush_all()
self.env.registry.clear_cache()
return r
def unlink(self):
default_deleted = any(not p.res_id for p in self)
r = super().unlink()
if default_deleted:
self.env.registry.clear_cache()
return r
def get_by_record(self):
self.ensure_one()
if self.type in ('char', 'text', 'selection'):
return self.value_text
elif self.type == 'float':
return self.value_float
elif self.type == 'boolean':
return bool(self.value_integer)
elif self.type == 'integer':
return self.value_integer
elif self.type == 'binary':
return self.value_binary
elif self.type == 'many2one':
if not self.value_reference:
return False
model, resource_id = self.value_reference.split(',')
return self.env[model].browse(int(resource_id)).exists()
elif self.type == 'datetime':
return self.value_datetime
elif self.type == 'date':
if not self.value_datetime:
return False
return fields.Date.to_string(fields.Datetime.from_string(self.value_datetime))
return False
@api.model
def _set_default(self, name, model, value, company=False):
""" Set the given field's generic value for the given company.
:param name: the field's name
:param model: the field's model name
:param value: the field's value
:param company: the company (record or id)
"""
field_id = self.env['ir.model.fields']._get(model, name).id
company_id = int(company) if company else False
prop = self.sudo().search([
('fields_id', '=', field_id),
('company_id', '=', company_id),
('res_id', '=', False),
])
if prop:
prop.write({'value': value})
else:
prop.create({
'fields_id': field_id,
'company_id': company_id,
'res_id': False,
'name': name,
'value': value,
'type': self.env[model]._fields[name].type,
})
@api.model
def _get(self, name, model, res_id=False):
""" Get the given field's generic value for the record.
:param name: the field's name
:param model: the field's model name
:param res_id: optional resource, format: "<id>" (int) or
"<model>,<id>" (str)
"""
if not res_id:
t, v = self._get_default_property(name, model)
if not v or t != 'many2one':
return v
return self.env[v[0]].browse(v[1])
p = self._get_property(name, model, res_id=res_id)
if p:
return p.get_by_record()
return False
# only cache Property._get(res_id=False) as that's
# sub-optimally.
COMPANY_KEY = "self.env.company.id"
@ormcache(COMPANY_KEY, 'name', 'model')
def _get_default_property(self, name, model):
prop = self._get_property(name, model, res_id=False)
if not prop:
return None, False
v = prop.get_by_record()
if prop.type != 'many2one':
return prop.type, v
return 'many2one', v and (v._name, v.id)
def _get_property(self, name, model, res_id):
domain = self._get_domain(name, model)
if domain is not None:
if res_id and isinstance(res_id, int):
res_id = "%s,%s" % (model, res_id)
domain = [('res_id', '=', res_id)] + domain
#make the search with company_id asc to make sure that properties specific to a company are given first
return self.sudo().search(domain, limit=1, order='company_id')
return self.sudo().browse(())
def _get_domain(self, prop_name, model):
field_id = self.env['ir.model.fields']._get(model, prop_name).id
if not field_id:
return None
company_id = self.env.company.id
return [('fields_id', '=', field_id), ('company_id', 'in', [company_id, False])]
@api.model
def _get_multi(self, name, model, ids):
""" Read the property field `name` for the records of model `model` with
the given `ids`, and return a dictionary mapping `ids` to their
corresponding value.
"""
if not ids:
return {}
field = self.env[model]._fields[name]
field_id = self.env['ir.model.fields']._get(model, name).id
company_id = self.env.company.id or None
if field.type == 'many2one':
comodel = self.env[field.comodel_name]
model_pos = len(model) + 2
value_pos = len(comodel._name) + 2
# retrieve values: both p.res_id and p.value_reference are formatted
# as "<rec._name>,<rec.id>"; the purpose of the LEFT JOIN is to
# return the value id if it exists, NULL otherwise
query = """
SELECT substr(p.res_id, %s)::integer, r.id
FROM ir_property p
LEFT JOIN {} r ON substr(p.value_reference, %s)::integer=r.id
WHERE p.fields_id=%s
AND (p.company_id=%s OR p.company_id IS NULL)
AND (p.res_id IN %s OR p.res_id IS NULL)
ORDER BY p.company_id NULLS FIRST
""".format(comodel._table)
params = [model_pos, value_pos, field_id, company_id]
clean = comodel.browse
elif field.type in TYPE2FIELD:
model_pos = len(model) + 2
# retrieve values: p.res_id is formatted as "<rec._name>,<rec.id>"
query = """
SELECT substr(p.res_id, %s)::integer, p.{}
FROM ir_property p
WHERE p.fields_id=%s
AND (p.company_id=%s OR p.company_id IS NULL)
AND (p.res_id IN %s OR p.res_id IS NULL)
ORDER BY p.company_id NULLS FIRST
""".format(TYPE2FIELD[field.type])
params = [model_pos, field_id, company_id]
clean = TYPE2CLEAN[field.type]
else:
return dict.fromkeys(ids, False)
# retrieve values
self.flush_model()
cr = self.env.cr
result = {}
refs = {"%s,%s" % (model, id) for id in ids}
for sub_refs in cr.split_for_in_conditions(refs):
cr.execute(query, params + [sub_refs])
result.update(cr.fetchall())
# determine all values and format them
default = result.get(None, None)
return {
id: clean(result.get(id, default))
for id in ids
}
@api.model
def _set_multi(self, name, model, values, default_value=None):
""" Assign the property field `name` for the records of model `model`
with `values` (dictionary mapping record ids to their value).
If the value for a given record is the same as the default
value, the property entry will not be stored, to avoid bloating
the database.
If `default_value` is provided, that value will be used instead
of the computed default value, to determine whether the value
for a record should be stored or not.
"""
def clean(value):
return value.id if isinstance(value, models.BaseModel) else value
if not values:
return
if default_value is None:
domain = self._get_domain(name, model)
if domain is None:
raise Exception()
# retrieve the default value for the field
default_value = clean(self._get(name, model))
# retrieve the properties corresponding to the given record ids
field_id = self.env['ir.model.fields']._get(model, name).id
company_id = self.env.company.id
refs = {('%s,%s' % (model, id)): id for id in values}
props = self.sudo().search([
('fields_id', '=', field_id),
('company_id', '=', company_id),
('res_id', 'in', list(refs)),
])
# modify existing properties
for prop in props:
id = refs.pop(prop.res_id)
value = clean(values[id])
if value == default_value:
# avoid prop.unlink(), as it clears the record cache that can
# contain the value of other properties to set on record!
self._cr.execute("DELETE FROM ir_property WHERE id=%s", [prop.id])
elif value != clean(prop.get_by_record()):
prop.write({'value': value})
# create new properties for records that do not have one yet
vals_list = []
for ref, id in refs.items():
value = clean(values[id])
if value != default_value:
vals_list.append({
'fields_id': field_id,
'company_id': company_id,
'res_id': ref,
'name': name,
'value': value,
'type': self.env[model]._fields[name].type,
})
self.sudo().create(vals_list)
@api.model
def search_multi(self, name, model, operator, value):
""" Return a domain for the records that match the given condition. """
default_matches = False
negate = False
# For "is set" and "is not set", same logic for all types
if operator == 'in' and False in value:
operator = 'not in'
negate = True
elif operator == 'not in' and False not in value:
operator = 'in'
negate = True
elif operator in ('!=', 'not like', 'not ilike') and value:
operator = TERM_OPERATORS_NEGATION[operator]
negate = True
elif operator == '=' and not value:
operator = '!='
negate = True
field = self.env[model]._fields[name]
if field.type == 'many2one':
def makeref(value):
return value and f'{field.comodel_name},{value}'
if operator in ('=', '!=', '<=', '<', '>', '>='):
value = makeref(value)
elif operator in ('in', 'not in'):
value = [makeref(v) for v in value]
elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike', 'not ilike'):
# most probably inefficient... but correct
target = self.env[field.comodel_name]
target_names = target.name_search(value, operator=operator, limit=None)
target_ids = [n[0] for n in target_names]
operator, value = 'in', [makeref(v) for v in target_ids]
elif operator in ('any', 'not any'):
if operator == 'not any':
negate = True
operator = 'in'
value = list(map(makeref, self.env[field.comodel_name]._search(value)))
elif field.type in ('integer', 'float'):
# No record is created in ir.property if the field's type is float or integer with a value
# equal to 0. Then to match with the records that are linked to a property field equal to 0,
# the negation of the operator must be taken to compute the goods and the domain returned
# to match the searched records is just the opposite.
value = float(value) if field.type == 'float' else int(value)
if operator == '>=' and value <= 0:
operator = '<'
negate = True
elif operator == '>' and value < 0:
operator = '<='
negate = True
elif operator == '<=' and value >= 0:
operator = '>'
negate = True
elif operator == '<' and value > 0:
operator = '>='
negate = True
elif field.type == 'boolean':
# the value must be mapped to an integer value
value = int(value)
# retrieve the properties that match the condition
domain = self._get_domain(name, model)
if domain is None:
raise Exception()
props = self.search(domain + [(TYPE2FIELD[field.type], operator, value)])
# retrieve the records corresponding to the properties that match
good_ids = []
for prop in props:
if prop.res_id:
__, res_id = prop.res_id.split(',')
good_ids.append(int(res_id))
else:
default_matches = True
if default_matches:
# exclude all records with a property that does not match
props = self.search(domain + [('res_id', '!=', False)])
all_ids = {int(res_id.split(',')[1]) for res_id in props.mapped('res_id')}
bad_ids = list(all_ids - set(good_ids))
if negate:
return [('id', 'in', bad_ids)]
else:
return [('id', 'not in', bad_ids)]
elif negate:
return [('id', 'not in', good_ids)]
else:
return [('id', 'in', good_ids)]

View file

@ -24,7 +24,7 @@ which executes its directive but doesn't generate any output in and of
itself.
To create new XML template, please see :doc:`QWeb Templates documentation
<https://www.odoo.com/documentation/17.0/developer/reference/frontend/qweb.html>`
<https://www.odoo.com/documentation/master/developer/reference/frontend/qweb.html>`
Rendering process
=================
@ -190,7 +190,7 @@ Only validate the **input**, the compilation if inside the ``t-if`` directive.
**Values**: name of the allowed odoo user group, or preceded by ``!`` for
prohibited groups
The generated code uses ``user_has_groups`` Odoo method.
The generated code uses ``has_group`` Odoo method from ``res.users`` model.
``t-foreach``
~~~~~~~~~~~~~
@ -380,6 +380,7 @@ import traceback
import warnings
import werkzeug
import psycopg2.errors
from markupsafe import Markup, escape
from collections.abc import Sized, Mapping
from itertools import count, chain
@ -773,6 +774,8 @@ class IrQWeb(models.AbstractModel):
except Exception as e:
if isinstance(e, TransactionRollbackError):
raise
if isinstance(e, ReadOnlySqlTransaction):
raise
raise QWebException("Error while render the template",
self, template, ref={compile_context['ref']!r}, code=code) from e
""", 0)]
@ -967,6 +970,7 @@ class IrQWeb(models.AbstractModel):
'QWebException': QWebException,
'Exception': Exception,
'TransactionRollbackError': TransactionRollbackError, # for SerializationFailure in assets
'ReadOnlySqlTransaction': psycopg2.errors.ReadOnlySqlTransaction,
'ValueError': ValueError,
'UserError': UserError,
'AccessDenied': AccessDenied,
@ -1246,7 +1250,15 @@ class IrQWeb(models.AbstractModel):
""" Generates a text value (an instance of text_type) from an arbitrary
source.
"""
return pycompat.to_text(expr)
if expr is None or expr is False:
return ''
if isinstance(expr, str):
return expr
elif isinstance(expr, bytes):
return expr.decode()
else:
return str(expr)
# order
@ -1845,14 +1857,14 @@ class IrQWeb(models.AbstractModel):
"""Compile `t-groups` expressions into a python code as a list of
strings.
The code will contain the condition `if self.user_has_groups(groups)`
The code will contain the condition `if self.env.user.has_groups(groups)`
part that wrap the rest of the compiled code of this element.
"""
groups = el.attrib.pop('t-groups', el.attrib.pop('groups', None))
strip = self._rstrip_text(compile_context)
code = self._flush_text(compile_context, level)
code.append(indent_code(f"if self.user_has_groups({groups!r}):", level))
code.append(indent_code(f"if self.env.user.has_groups({groups!r}):", level))
if strip and el.tag.lower() != 't':
self._append_text(strip, compile_context)
code.extend([
@ -2428,7 +2440,7 @@ class IrQWeb(models.AbstractModel):
inherit_branding = (
self.env.context['inherit_branding']
if 'inherit_branding' in self.env.context
else self.env.context.get('inherit_branding_auto') and record.check_access_rights('write', False))
else self.env.context.get('inherit_branding_auto') and record.has_access('write'))
field_options['inherit_branding'] = inherit_branding
translate = self.env.context.get('edit_translations') and values.get('translatable') and field.translate
field_options['translate'] = translate
@ -2483,7 +2495,7 @@ class IrQWeb(models.AbstractModel):
If debug=assets, the assets will be regenerated when a file which composes them has been modified.
Else, the assets will be generated only once and then stored in cache.
"""
rtl = self.env['res.lang'].sudo()._lang_get_direction(self.env.context.get('lang') or self.env.user.lang) == 'rtl'
rtl = self.env['res.lang'].sudo()._get_data(code=(self.env.lang or self.env.user.lang)).direction == 'rtl'
assets_params = self.env['ir.asset']._get_asset_params() # website_id
debug_assets = debug and 'assets' in debug

View file

@ -8,35 +8,36 @@ from io import BytesIO
import babel
import babel.dates
from markupsafe import Markup, escape
from markupsafe import Markup, escape, escape_silent
from PIL import Image
from lxml import etree, html
from odoo import api, fields, models, _, _lt, tools
from odoo.tools import posix_to_ldml, float_utils, format_date, format_duration, pycompat
from odoo import api, fields, models, tools
from odoo.tools import posix_to_ldml, float_is_zero, float_utils, format_date, format_duration
from odoo.tools.mail import safe_attrs
from odoo.tools.misc import get_lang, babel_locale_parse
from odoo.tools.mimetypes import guess_mimetype
from odoo.tools.translate import _, LazyTranslate
_lt = LazyTranslate(__name__)
_logger = logging.getLogger(__name__)
def nl2br(string):
""" Converts newlines to HTML linebreaks in ``string``. returns
the unicode result
:param str string:
:rtype: unicode
def nl2br(string: str) -> Markup:
""" Converts newlines to HTML linebreaks in ``string`` after HTML-escaping
it.
"""
return pycompat.to_text(string).replace('\n', Markup('<br>\n'))
return escape_silent(string).replace('\n', Markup('<br>\n'))
def nl2br_enclose(string, enclosure_tag='div'):
def nl2br_enclose(string: str, enclosure_tag: str = 'div') -> Markup:
""" Like nl2br, but returns enclosed Markup allowing to better manipulate
trusted and untrusted content. New lines added by use are trusted, other
content is escaped. """
converted = nl2br(escape(string))
return Markup(f'<{enclosure_tag}>{converted}</{enclosure_tag}>')
return Markup('<{enclosure_tag}>{converted}</{enclosure_tag}>').format(
enclosure_tag=enclosure_tag,
converted=nl2br(string),
)
#--------------------------------------------------------------------
# QWeb Fields converters
@ -115,7 +116,10 @@ class FieldConverter(models.AbstractModel):
Converts a single value to its HTML version/output
:rtype: unicode
"""
return escape(pycompat.to_text(value))
if value is None or value is False:
return ''
return escape(value.decode() if isinstance(value, bytes) else value)
@api.model
def record_to_html(self, record, field_name, options):
@ -139,7 +143,7 @@ class FieldConverter(models.AbstractModel):
:returns: Model[res.lang]
"""
return get_lang(self.env)
return self.env['res.lang'].browse(get_lang(self.env).id)
class IntegerConverter(models.AbstractModel):
@ -159,8 +163,8 @@ class IntegerConverter(models.AbstractModel):
@api.model
def value_to_html(self, value, options):
if options.get('format_decimalized_number'):
return tools.format_decimalized_number(value, options.get('precision_digits', 1))
return pycompat.to_text(self.user_lang().format('%d', value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}'))
return tools.misc.format_decimalized_number(value, options.get('precision_digits', 1))
return self.user_lang().format('%d', value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
class FloatConverter(models.AbstractModel):
@ -198,7 +202,7 @@ class FloatConverter(models.AbstractModel):
if precision is None:
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
return pycompat.to_text(formatted)
return formatted
@api.model
def record_to_html(self, record, field_name, options):
@ -250,7 +254,6 @@ class DateTimeConverter(models.AbstractModel):
lang = self.user_lang()
locale = babel_locale_parse(lang.code)
format_func = babel.dates.format_datetime
if isinstance(value, str):
value = fields.Datetime.from_string(value)
@ -266,11 +269,11 @@ class DateTimeConverter(models.AbstractModel):
pattern = options['format']
else:
if options.get('time_only'):
strftime_pattern = ("%s" % (lang.time_format))
strftime_pattern = lang.time_format
elif options.get('date_only'):
strftime_pattern = ("%s" % (lang.date_format))
strftime_pattern = lang.date_format
else:
strftime_pattern = ("%s %s" % (lang.date_format, lang.time_format))
strftime_pattern = "%s %s" % (lang.date_format, lang.time_format)
pattern = posix_to_ldml(strftime_pattern, locale=locale)
@ -278,13 +281,11 @@ class DateTimeConverter(models.AbstractModel):
pattern = pattern.replace(":ss", "").replace(":s", "")
if options.get('time_only'):
format_func = babel.dates.format_time
return pycompat.to_text(format_func(value, format=pattern, tzinfo=tzinfo, locale=locale))
if options.get('date_only'):
format_func = babel.dates.format_date
return pycompat.to_text(format_func(value, format=pattern, locale=locale))
return pycompat.to_text(format_func(value, format=pattern, tzinfo=tzinfo, locale=locale))
return babel.dates.format_time(value, format=pattern, tzinfo=tzinfo, locale=locale)
elif options.get('date_only'):
return babel.dates.format_date(value, format=pattern, locale=locale)
else:
return babel.dates.format_datetime(value, format=pattern, tzinfo=tzinfo, locale=locale)
class TextConverter(models.AbstractModel):
@ -297,7 +298,7 @@ class TextConverter(models.AbstractModel):
"""
Escapes the value and converts newlines to br. This is bullshit.
"""
return nl2br(escape(value)) if value else ''
return nl2br(value) if value else ''
class SelectionConverter(models.AbstractModel):
@ -320,7 +321,7 @@ class SelectionConverter(models.AbstractModel):
def value_to_html(self, value, options):
if not value:
return ''
return escape(pycompat.to_text(options['selection'][value]) or '')
return escape(options['selection'][value] or '')
@api.model
def record_to_html(self, record, field_name, options):
@ -341,7 +342,7 @@ class ManyToOneConverter(models.AbstractModel):
value = value.sudo().display_name
if not value:
return False
return nl2br(escape(value))
return nl2br(value)
class ManyToManyConverter(models.AbstractModel):
@ -354,7 +355,7 @@ class ManyToManyConverter(models.AbstractModel):
if not value:
return False
text = ', '.join(value.sudo().mapped('display_name'))
return nl2br(escape(text))
return nl2br(text)
class HTMLConverter(models.AbstractModel):
@ -397,18 +398,20 @@ class ImageConverter(models.AbstractModel):
except binascii.Error:
raise ValueError("Invalid image content") from None
if img_b64 and guess_mimetype(img_b64, '') == 'image/webp':
mimetype = guess_mimetype(img_b64, '') if img_b64 else None
if mimetype == 'image/webp':
return self.env["ir.qweb"]._get_converted_image_data_uri(value)
elif mimetype != "image/svg+xml":
try:
image = Image.open(BytesIO(img_b64))
image.verify()
mimetype = Image.MIME[image.format]
except OSError as exc:
raise ValueError("Non-image binary fields can not be converted to HTML") from exc
except Exception as exc: # noqa: BLE001
raise ValueError("Invalid image content") from exc
try:
image = Image.open(BytesIO(img_b64))
image.verify()
except IOError:
raise ValueError("Non-image binary fields can not be converted to HTML") from None
except: # image.verify() throws "suitable exceptions", I have no idea what they are
raise ValueError("Invalid image content") from None
return "data:%s;base64,%s" % (Image.MIME[image.format], value.decode('ascii'))
return "data:%s;base64,%s" % (mimetype, value.decode('ascii'))
@api.model
def value_to_html(self, value, options):
@ -478,9 +481,12 @@ class MonetaryConverter(models.AbstractModel):
company = self.env.company
value = options['from_currency']._convert(value, display_currency, company, date)
if float_is_zero(value, precision_digits=display_currency.decimal_places):
value = 0.0
lang = self.user_lang()
formatted_amount = lang.format(fmt, display_currency.round(value),
grouping=True, monetary=True).replace(r' ', '\N{NO-BREAK SPACE}').replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
formatted_amount = lang.format(fmt, display_currency.round(value), grouping=True)\
.replace(r' ', '\N{NO-BREAK SPACE}').replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
pre = post = ''
if display_currency.position == 'before':
@ -700,7 +706,7 @@ class RelativeDatetimeConverter(models.AbstractModel):
# value should be a naive datetime in UTC. So is fields.Datetime.now()
reference = fields.Datetime.from_string(options['now'])
return pycompat.to_text(babel.dates.format_timedelta(value - reference, add_direction=True, locale=locale))
return babel.dates.format_timedelta(value - reference, add_direction=True, locale=locale)
@api.model
def record_to_html(self, record, field_name, options):
@ -735,6 +741,8 @@ class BarcodeConverter(models.AbstractModel):
def value_to_html(self, value, options=None):
if not value:
return ''
if not bool(re.match(r'^[\x00-\x7F]+$', value)):
return nl2br(value)
barcode_symbology = options.get('symbology', 'Code128')
barcode = self.env['ir.actions.report'].barcode(
barcode_symbology,

View file

@ -5,7 +5,7 @@ import logging
from odoo import api, fields, models, tools, _
from odoo.exceptions import AccessError, ValidationError
from odoo.osv import expression
from odoo.tools import config
from odoo.tools import config, SQL
from odoo.tools.safe_eval import safe_eval, time
_logger = logging.getLogger(__name__)
@ -119,16 +119,17 @@ class IrRule(models.Model):
if self.env.su:
return self.browse(())
query = """ SELECT r.id FROM ir_rule r JOIN ir_model m ON (r.model_id=m.id)
WHERE m.model=%s AND r.active AND r.perm_{mode}
AND (r.id IN (SELECT rule_group_id FROM rule_group_rel rg
JOIN res_groups_users_rel gu ON (rg.group_id=gu.gid)
WHERE gu.uid=%s)
OR r.global)
ORDER BY r.id
""".format(mode=mode)
self._cr.execute(query, (model_name, self._uid))
return self.browse(row[0] for row in self._cr.fetchall())
sql = SQL("""
SELECT r.id FROM ir_rule r
JOIN ir_model m ON (r.model_id=m.id)
WHERE m.model = %s AND r.active AND r.perm_%s
AND (r.global OR r.id IN (
SELECT rule_group_id FROM rule_group_rel rg
WHERE rg.group_id IN %s
))
ORDER BY r.id
""", model_name, SQL(mode), tuple(self.env.user._get_group_ids()) or (None,))
return self.browse(v for v, in self.env.execute_query(sql))
@api.model
@tools.conditional(
@ -213,16 +214,11 @@ class IrRule(models.Model):
}
user_description = f"{self.env.user.name} (id={self.env.user.id})"
operation_error = _("Uh-oh! Looks like you have stumbled upon some top-secret records.\n\n" \
"Sorry, %s doesn't have '%s' access to:", user_description, operations[operation])
failing_model = _("- %s (%s)", description, model)
"Sorry, %(user)s doesn't have '%(operation)s' access to:", user=user_description, operation=operations[operation])
failing_model = _("- %(description)s (%(model)s)", description=description, model=model)
resolution_info = _("If you really, really need access, perhaps you can win over your friendly administrator with a batch of freshly baked cookies.")
if not self.user_has_groups('base.group_no_one') or not self.env.user.has_group('base.group_user'):
records.invalidate_recordset()
return AccessError(f"{operation_error}\n{failing_model}\n\n{resolution_info}")
# This extended AccessError is only displayed in debug mode.
# Note that by default, public and portal users do not have
# the group "base.group_no_one", even if debug mode is enabled,
# so it is relatively safe here to include the list of rules and record names.
@ -238,19 +234,33 @@ class IrRule(models.Model):
return f'{description}, {rec.display_name} ({model}: {rec.id}, company={rec.company_id.display_name})'
return f'{description}, {rec.display_name} ({model}: {rec.id})'
failing_records = '\n '.join(f'- {get_record_description(rec)}' for rec in records_sudo)
rules_description = '\n'.join(f'- {rule.name}' for rule in rules)
failing_rules = _("Blame the following rules:\n%s", rules_description)
context = None
if company_related:
failing_rules += "\n\n" + _('Note: this might be a multi-company issue. Switching company may help - in Odoo, not in real life!')
suggested_companies = records_sudo._get_redirect_suggested_company()
if suggested_companies and len(suggested_companies) != 1:
resolution_info += _('\n\nNote: this might be a multi-company issue. Switching company may help - in Odoo, not in real life!')
elif suggested_companies and suggested_companies in self.env.user.company_ids:
context = {'suggested_company': {'id': suggested_companies.id, 'display_name': suggested_companies.display_name}}
resolution_info += _('\n\nThis seems to be a multi-company issue, you might be able to access the record by switching to the company: %s.', suggested_companies.display_name)
elif suggested_companies:
resolution_info += _('\n\nThis seems to be a multi-company issue, but you do not have access to the proper company to access the record anyhow.')
if not self.env.user.has_group('base.group_no_one') or not self.env.user._is_internal():
msg = f"{operation_error}\n{failing_model}\n\n{resolution_info}"
else:
# This extended AccessError is only displayed in debug mode.
failing_records = '\n'.join(f'- {get_record_description(rec)}' for rec in records_sudo)
rules_description = '\n'.join(f'- {rule.name}' for rule in rules)
failing_rules = _("Blame the following rules:\n%s", rules_description)
msg = f"{operation_error}\n{failing_records}\n\n{failing_rules}\n\n{resolution_info}"
# clean up the cache of records prefetched with display_name above
records_sudo.invalidate_recordset()
msg = f"{operation_error}\n{failing_records}\n\n{failing_rules}\n\n{resolution_info}"
return AccessError(msg)
exception = AccessError(msg)
if context:
exception.context = context
return exception
#

View file

@ -3,10 +3,10 @@
from datetime import datetime, timedelta
import logging
import pytz
from psycopg2 import sql
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.tools import SQL
_logger = logging.getLogger(__name__)
@ -23,10 +23,10 @@ def _drop_sequences(cr, seq_names):
""" Drop the PostreSQL sequences if they exist. """
if not seq_names:
return
names = sql.SQL(',').join(map(sql.Identifier, seq_names))
names = SQL(',').join(map(SQL.identifier, seq_names))
# RESTRICT is the default; it prevents dropping the sequence if an
# object depends on it.
cr.execute(sql.SQL("DROP SEQUENCE IF EXISTS {} RESTRICT").format(names))
cr.execute(SQL("DROP SEQUENCE IF EXISTS %s RESTRICT", names))
def _alter_sequence(cr, seq_name, number_increment=None, number_next=None):
@ -37,15 +37,13 @@ def _alter_sequence(cr, seq_name, number_increment=None, number_next=None):
if not cr.fetchone():
# sequence is not created yet, we're inside create() so ignore it, will be set later
return
statement = sql.SQL("ALTER SEQUENCE") + sql.Identifier(seq_name)
params = []
if number_increment is not None:
statement += sql.SQL("INCREMENT BY") + sql.Placeholder()
params.append(number_increment)
if number_next is not None:
statement += sql.SQL("RESTART WITH") + sql.Placeholder()
params.append(number_next)
cr.execute(statement.join(' '), params)
statement = SQL(
"ALTER SEQUENCE %s%s%s",
SQL.identifier(seq_name),
SQL(" INCREMENT BY %s", number_increment) if number_increment is not None else SQL(),
SQL(" RESTART WITH %s", number_next) if number_next is not None else SQL(),
)
cr.execute(statement)
def _select_nextval(cr, seq_name):
@ -65,19 +63,15 @@ def _predict_nextval(self, seq_id):
"""Predict next value for PostgreSQL sequence without consuming it"""
# Cannot use currval() as it requires prior call to nextval()
seqname = 'ir_sequence_%s' % seq_id
seqtable = sql.Identifier(seqname)
query = sql.SQL("""SELECT last_value,
(SELECT increment_by
FROM pg_sequences
WHERE sequencename = %s),
is_called
FROM {}""")
params = [seqname]
seqtable = SQL.identifier(seqname)
query = SQL("""
SELECT last_value,
(SELECT increment_by FROM pg_sequences WHERE sequencename = %s),
is_called
FROM %s""", seqname, seqtable)
if self.env.cr._cnx.server_version < 100000:
query = sql.SQL("SELECT last_value, increment_by, is_called FROM {}")
params = []
self.env.cr.execute(query.format(seqtable), params)
(last_value, increment_by, is_called) = self.env.cr.fetchone()
query = SQL("SELECT last_value, increment_by, is_called FROM %s", seqtable)
[(last_value, increment_by, is_called)] = self.env.execute_query(query)
if is_called:
return last_value + increment_by
# sequence has just been RESTARTed to return last_value next time
@ -235,7 +229,7 @@ class IrSequence(models.Model):
interpolated_prefix = _interpolate(self.prefix, d)
interpolated_suffix = _interpolate(self.suffix, d)
except (ValueError, TypeError, KeyError):
raise UserError(_('Invalid prefix or suffix for sequence %r', self.name))
raise UserError(_('Invalid prefix or suffix for sequence %s', self.name))
return interpolated_prefix, interpolated_suffix
def get_next_char(self, number_next):
@ -272,7 +266,7 @@ class IrSequence(models.Model):
def next_by_id(self, sequence_date=None):
""" Draw an interpolated string using the specified sequence."""
self.check_access_rights('read')
self.browse().check_access('read')
return self._next(sequence_date=sequence_date)
@api.model
@ -282,7 +276,7 @@ class IrSequence(models.Model):
(multi-company cases), the one from the user's current company will
be used.
"""
self.check_access_rights('read')
self.browse().check_access('read')
company_id = self.env.company.id
seq_ids = self.search([('code', '=', sequence_code), ('company_id', 'in', [company_id, False])], order='company_id')
if not seq_ids:

View file

@ -28,7 +28,7 @@ class IrUiMenu(models.Model):
sequence = fields.Integer(default=10)
child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs')
parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict")
parent_path = fields.Char(index=True, unaccent=False)
parent_path = fields.Char(index=True)
groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', string='Groups',
help="If you have groups, the visibility of this menu will be based on these groups. "\
@ -70,7 +70,7 @@ class IrUiMenu(models.Model):
@api.constrains('parent_id')
def _check_parent_id(self):
if not self._check_recursion():
if self._has_cycle():
raise ValidationError(_('Error! You cannot create recursive menus.'))
@api.model
@ -81,12 +81,12 @@ class IrUiMenu(models.Model):
context = {'ir.ui.menu.full_list': True}
menus = self.with_context(context).search_fetch([], ['action', 'parent_id']).sudo()
groups = self.env.user.groups_id
if not debug:
groups = groups - self.env.ref('base.group_no_one')
# first discard all menus with groups the user does not have
group_ids = set(self.env.user._get_group_ids())
if not debug:
group_ids = group_ids - {self.env['ir.model.data']._xmlid_to_res_id('base.group_no_one', raise_if_not_found=False)}
menus = menus.filtered(
lambda menu: not menu.groups_id or menu.groups_id & groups)
lambda menu: not (menu.groups_id and group_ids.isdisjoint(menu.groups_id._ids)))
# take apart menus that have an action
actions_by_model = defaultdict(set)
@ -199,14 +199,15 @@ class IrUiMenu(models.Model):
return super(IrUiMenu, self).unlink()
def copy(self, default=None):
record = super(IrUiMenu, self).copy(default=default)
match = NUMBER_PARENS.search(record.name)
if match:
next_num = int(match.group(1)) + 1
record.name = NUMBER_PARENS.sub('(%d)' % next_num, record.name)
else:
record.name = record.name + '(1)'
return record
new_menus = super().copy(default=default)
for new_menu in new_menus:
match = NUMBER_PARENS.search(new_menu.name)
if match:
next_num = int(match.group(1)) + 1
new_menu.name = NUMBER_PARENS.sub('(%d)' % next_num, new_menu.name)
else:
new_menu.name = new_menu.name + '(1)'
return new_menus
@api.model
@api.returns('self')
@ -299,7 +300,7 @@ class IrUiMenu(models.Model):
'children', []).append(menu_item['id'])
attachment = mi_attachment_by_res_id.get(menu_item['id'])
if attachment:
menu_item['web_icon_data'] = attachment['datas']
menu_item['web_icon_data'] = attachment['datas'].decode()
menu_item['web_icon_data_mimetype'] = attachment['mimetype']
else:
menu_item['web_icon_data'] = False

View file

@ -187,6 +187,7 @@ class report_paperformat(models.Model):
report_ids = fields.One2many('ir.actions.report', 'paperformat_id', 'Associated reports', help="Explicitly associated reports")
print_page_width = fields.Float('Print page width (mm)', compute='_compute_print_page_size')
print_page_height = fields.Float('Print page height (mm)', compute='_compute_print_page_size')
css_margins = fields.Boolean('Use css margins', default=False)
@api.constrains('format')
def _check_format_or_page(self):

View file

@ -1,11 +1,10 @@
# -*- coding: utf-8 -*-
import re
from collections.abc import Iterable
from odoo import api, fields, models, _
from odoo.osv import expression
from odoo import api, fields, models
from odoo.tools import _, SQL
def sanitize_account_number(acc_number):
if acc_number:
@ -17,6 +16,7 @@ class Bank(models.Model):
_description = 'Bank'
_name = 'res.bank'
_order = 'name'
_rec_names_search = ['name', 'bic']
name = fields.Char(required=True)
street = fields.Char()
@ -25,6 +25,7 @@ class Bank(models.Model):
city = fields.Char()
state = fields.Many2one('res.country.state', 'Fed. State', domain="[('country_id', '=?', country)]")
country = fields.Many2one('res.country')
country_code = fields.Char(related='country.code', string='Country Code')
email = fields.Char()
phone = fields.Char()
active = fields.Boolean(default=True)
@ -37,14 +38,13 @@ class Bank(models.Model):
bank.display_name = name
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
domain = domain or []
if name:
name_domain = ['|', ('bic', '=ilike', name + '%'), ('name', operator, name)]
if operator in expression.NEGATIVE_TERM_OPERATORS:
name_domain = ['&', '!'] + name_domain[1:]
domain = domain + name_domain
return self._search(domain, limit=limit, order=order)
def _search_display_name(self, operator, value):
if operator in ('ilike', 'not ilike') and value:
domain = ['|', ('bic', '=ilike', value + '%'), ('name', 'ilike', value)]
if operator == 'not ilike':
domain = ['!', *domain]
return domain
return super()._search_display_name(operator, value)
@api.onchange('country')
def _onchange_country_id(self):
@ -84,6 +84,7 @@ class ResPartnerBank(models.Model):
sequence = fields.Integer(default=10)
currency_id = fields.Many2one('res.currency', string='Currency')
company_id = fields.Many2one('res.company', 'Company', related='partner_id.company_id', store=True, readonly=True)
country_code = fields.Char(related='partner_id.country_code', string="Country Code")
_sql_constraints = [(
'unique_number',
@ -117,6 +118,15 @@ class ResPartnerBank(models.Model):
for acc in self:
acc.display_name = f'{acc.acc_number} - {acc.bank_id.name}' if acc.bank_id else acc.acc_number
def _condition_to_sql(self, alias: str, fname: str, operator: str, value, query) -> SQL:
if fname == 'acc_number':
fname = 'sanitized_acc_number'
if not isinstance(value, str) and isinstance(value, Iterable):
value = [sanitize_account_number(i) for i in value]
else:
value = sanitize_account_number(value)
return super()._condition_to_sql(alias, fname, operator, value, query)
def _sanitize_vals(self, vals):
if 'sanitized_acc_number' in vals: # do not allow to write on sanitized directly
vals['acc_number'] = vals.pop('sanitized_acc_number')
@ -133,17 +143,19 @@ class ResPartnerBank(models.Model):
self._sanitize_vals(vals)
return super().write(vals)
@api.model
def _search(self, domain, offset=0, limit=None, order=None, access_rights_uid=None):
def sanitize(arg):
if isinstance(arg, (tuple, list)) and arg[0] == 'acc_number':
value = arg[2]
if not isinstance(value, str) and isinstance(value, Iterable):
value = [sanitize_account_number(i) for i in value]
else:
value = sanitize_account_number(value)
return ('sanitized_acc_number', arg[1], value)
return arg
def action_archive_bank(self):
"""
Custom archive function because the basic action_archive don't trigger a re-rendering of the page, so
the archived value is still visible in the view.
"""
self.ensure_one()
self.action_archive()
return {'type': 'ir.actions.client', 'tag': 'reload'}
domain = [sanitize(item) for item in domain]
return super()._search(domain, offset, limit, order, access_rights_uid)
def unlink(self):
"""
Instead of deleting a bank account, we want to archive it since we cannot delete bank account that is linked
to any entries
"""
self.action_archive()
return True

View file

@ -3,10 +3,12 @@
import base64
import logging
import threading
import warnings
from odoo import api, fields, models, tools, _, Command, SUPERUSER_ID
from odoo.exceptions import ValidationError, UserError
from odoo.osv import expression
from odoo.tools import html2plaintext, file_open, ormcache
_logger = logging.getLogger(__name__)
@ -16,6 +18,7 @@ class Company(models.Model):
_name = "res.company"
_description = 'Companies'
_order = 'sequence, name'
_inherit = ['format.address.mixin', 'format.vat.label.mixin']
_parent_store = True
def copy(self, default=None):
@ -31,10 +34,10 @@ class Company(models.Model):
name = fields.Char(related='partner_id.name', string='Company Name', required=True, store=True, readonly=False)
active = fields.Boolean(default=True)
sequence = fields.Integer(help='Used to order Companies in the company switcher', default=10)
parent_id = fields.Many2one('res.company', string='Parent Company', index=True)
parent_id = fields.Many2one('res.company', string='Parent Company', index=True, ondelete='restrict')
child_ids = fields.One2many('res.company', 'parent_id', string='Branches')
all_child_ids = fields.One2many('res.company', 'parent_id', context={'active_test': False})
parent_path = fields.Char(index=True, unaccent=False)
parent_path = fields.Char(index=True)
parent_ids = fields.Many2many('res.company', compute='_compute_parent_ids', compute_sudo=True)
root_id = fields.Many2one('res.company', compute='_compute_parent_ids', compute_sudo=True)
partner_id = fields.Many2one('res.partner', string='Partner', required=True)
@ -59,6 +62,8 @@ class Company(models.Model):
)
bank_ids = fields.One2many(related='partner_id.bank_ids', readonly=False)
country_id = fields.Many2one('res.country', compute='_compute_address', inverse='_inverse_country', string="Country")
# Technical field to hide country specific fields in company form view
country_code = fields.Char(related='country_id.code', depends=['country_id'])
email = fields.Char(related='partner_id.email', store=True, readonly=False)
phone = fields.Char(related='partner_id.phone', store=True, readonly=False)
mobile = fields.Char(related='partner_id.mobile', store=True, readonly=False)
@ -67,12 +72,13 @@ class Company(models.Model):
company_registry = fields.Char(related='partner_id.company_registry', string="Company ID", readonly=False)
paperformat_id = fields.Many2one('report.paperformat', 'Paper format', default=lambda self: self.env.ref('base.paperformat_euro', raise_if_not_found=False))
external_report_layout_id = fields.Many2one('ir.ui.view', 'Document Template')
font = fields.Selection([("Lato", "Lato"), ("Roboto", "Roboto"), ("Open_Sans", "Open Sans"), ("Montserrat", "Montserrat"), ("Oswald", "Oswald"), ("Raleway", "Raleway"), ('Tajawal', 'Tajawal')], default="Lato")
font = fields.Selection([("Lato", "Lato"), ("Roboto", "Roboto"), ("Open_Sans", "Open Sans"), ("Montserrat", "Montserrat"), ("Oswald", "Oswald"), ("Raleway", "Raleway"), ('Tajawal', 'Tajawal'), ('Fira_Mono', 'Fira Mono')], default="Lato")
primary_color = fields.Char()
secondary_color = fields.Char()
color = fields.Integer(compute='_compute_color', inverse='_inverse_color')
layout_background = fields.Selection([('Blank', 'Blank'), ('Geometric', 'Geometric'), ('Custom', 'Custom')], default="Blank", required=True)
layout_background = fields.Selection([('Blank', 'Blank'), ('Demo logo', 'Demo logo'), ('Custom', 'Custom')], default="Blank", required=True)
layout_background_image = fields.Binary("Background Image")
uninstalled_l10n_module_ids = fields.Many2many('ir.module.module', compute='_compute_uninstalled_l10n_module_ids')
_sql_constraints = [
('name_uniq', 'unique (name)', 'The company name must be unique!')
]
@ -181,35 +187,79 @@ class Company(models.Model):
if self[fname] != self.parent_id[fname]:
self[fname] = self.parent_id[fname]
@api.depends('country_id')
def _compute_uninstalled_l10n_module_ids(self):
# This will only compute uninstalled modules with auto-install without recursion,
# the rest will eventually be handled by `button_install`
self.env['ir.module.module'].flush_model(['auto_install', 'country_ids', 'dependencies_id'])
self.env['ir.module.module.dependency'].flush_model()
self.env.cr.execute("""
SELECT country.id,
ARRAY_AGG(module.id)
FROM ir_module_module module,
res_country country
WHERE module.auto_install
AND state NOT IN %(install_states)s
AND NOT EXISTS (
SELECT 1
FROM ir_module_module_dependency d
JOIN ir_module_module mdep ON (d.name = mdep.name)
WHERE d.module_id = module.id
AND d.auto_install_required
AND mdep.state NOT IN %(install_states)s
)
AND EXISTS (
SELECT 1
FROM module_country mc
WHERE mc.module_id = module.id
AND mc.country_id = country.id
)
AND country.id = ANY(%(country_ids)s)
GROUP BY country.id
""", {
'country_ids': self.country_id.ids,
'install_states': ('installed', 'to install', 'to upgrade'),
})
mapping = dict(self.env.cr.fetchall())
for company in self:
company.uninstalled_l10n_module_ids = self.env['ir.module.module'].browse(mapping.get(company.country_id.id))
def install_l10n_modules(self):
uninstalled_modules = self.uninstalled_l10n_module_ids
is_ready_and_not_test = (
not tools.config['test_enable']
and (self.env.registry.ready or not self.env.registry._init)
and not getattr(threading.current_thread(), 'testing', False)
)
if uninstalled_modules and is_ready_and_not_test:
return uninstalled_modules.button_immediate_install()
return is_ready_and_not_test
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
def make_delegated_fields_readonly(node):
for child in node.iterchildren():
if child.tag == 'field' and child.get('name') in delegated_fnames:
child.set('readonly', "parent_id != False")
else:
make_delegated_fields_readonly(child)
return node
delegated_fnames = set(self._get_company_root_delegated_field_names())
arch, view = super()._get_view(view_id, view_type, **options)
arch = make_delegated_fields_readonly(arch)
for f in arch.iter("field"):
if f.get('name') in delegated_fnames:
f.set('readonly', "parent_id != False")
return arch, view
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
def _search_display_name(self, operator, value):
context = dict(self.env.context)
newself = self
constraint = []
if context.pop('user_preference', None):
# We browse as superuser. Otherwise, the user would be able to
# select only the currently visible companies (according to rules,
# which are probably to allow to see the child companies) even if
# she belongs to some other companies.
companies = self.env.user.company_ids
domain = (domain or []) + [('id', 'in', companies.ids)]
constraint = [('id', 'in', companies.ids)]
newself = newself.sudo()
self = newself.with_context(context)
return super()._name_search(name, domain, operator, limit, order)
newself = newself.with_context(context)
domain = super(Company, newself)._search_display_name(operator, value)
return expression.AND([domain, constraint])
@api.model
@api.returns('self', lambda value: value.id)
@ -273,6 +323,10 @@ class Company(models.Model):
# Make sure that the selected currencies are enabled
companies.currency_id.sudo().filtered(lambda c: not c.active).active = True
companies_needs_l10n = companies.filtered('country_id')
if companies_needs_l10n:
companies_needs_l10n.install_l10n_modules()
return companies
def cache_invalidation_fields(self):
@ -282,11 +336,6 @@ class Company(models.Model):
'sequence', # user._get_company_ids and other potential cached search
}
@api.ondelete(at_uninstall=False)
def _unlink_if_company_has_no_children(self):
if any(company.child_ids for company in self):
raise UserError(_("Companies that have associated branches cannot be deleted. Consider archiving them instead."))
def unlink(self):
"""
Unlink the companies and clear the cache to make sure that
@ -299,6 +348,12 @@ class Company(models.Model):
def write(self, values):
invalidation_fields = self.cache_invalidation_fields()
asset_invalidation_fields = {'font', 'primary_color', 'secondary_color', 'external_report_layout_id'}
companies_needs_l10n = (
values.get('country_id')
and self.filtered(lambda company: not company.country_id)
or self.browse()
)
if not invalidation_fields.isdisjoint(values):
self.env.registry.clear_cache()
@ -331,6 +386,9 @@ class Company(models.Model):
for fname in sorted(changed):
branches[fname] = company[fname]
if companies_needs_l10n:
companies_needs_l10n.install_l10n_modules()
# invalidate company cache to recompute address based on updated partner
company_address_fields = self._get_company_address_field_names()
company_address_fields_upd = set(company_address_fields) & set(values.keys())
@ -364,22 +422,6 @@ class Company(models.Model):
description = self.env['ir.model.fields']._get("res.company", fname).field_description
raise ValidationError(_("The %s of a subsidiary must be the same as it's root company.", description))
def open_company_edit_report(self):
warnings.warn("Since 17.0.", DeprecationWarning, 2)
self.ensure_one()
return self.env['res.config.settings'].open_company()
def write_company_and_print_report(self):
warnings.warn("Since 17.0.", DeprecationWarning, 2)
context = self.env.context
report_name = context.get('default_report_name')
active_ids = context.get('active_ids')
active_model = context.get('active_model')
if report_name and active_ids and active_model:
docids = self.env[active_model].browse(active_ids)
return (self.env['ir.actions.report'].search([('report_name', '=', report_name)], limit=1)
.report_action(docids))
@api.model
def _get_main_company(self):
try:
@ -432,7 +474,7 @@ class Company(models.Model):
'active_test': False,
'default_parent_id': self.id,
},
'views': [[False, 'tree'], [False, 'kanban'], [False, 'form']],
'views': [[False, 'list'], [False, 'kanban'], [False, 'form']],
}
def _get_public_user(self):

View file

@ -1,16 +1,11 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
import logging
import re
from ast import literal_eval
from lxml import etree
from odoo import api, models, _
from odoo.exceptions import AccessError, RedirectWarning, UserError
from odoo.tools import ustr
_logger = logging.getLogger(__name__)
@ -120,195 +115,6 @@ class ResConfigConfigurable(models.TransientModel):
return self.cancel() or self.next()
class ResConfigInstaller(models.TransientModel, ResConfigModuleInstallationMixin):
""" New-style configuration base specialized for addons selection
and installation.
Basic usage
-----------
Subclasses can simply define a number of boolean fields. The field names
should be the names of the addons to install (when selected). Upon action
execution, selected boolean fields (and those only) will be interpreted as
addons to install, and batch-installed.
Additional addons
-----------------
It is also possible to require the installation of an additional
addon set when a specific preset of addons has been marked for
installation (in the basic usage only, additionals can't depend on
one another).
These additionals are defined through the ``_install_if``
property. This property is a mapping of a collection of addons (by
name) to a collection of addons (by name) [#]_, and if all the *key*
addons are selected for installation, then the *value* ones will
be selected as well. For example::
_install_if = {
('sale','crm'): ['sale_crm'],
}
This will install the ``sale_crm`` addon if and only if both the
``sale`` and ``crm`` addons are selected for installation.
You can define as many additionals as you wish, and additionals
can overlap in key and value. For instance::
_install_if = {
('sale','crm'): ['sale_crm'],
('sale','project'): ['sale_service'],
}
will install both ``sale_crm`` and ``sale_service`` if all of
``sale``, ``crm`` and ``project`` are selected for installation.
Hook methods
------------
Subclasses might also need to express dependencies more complex
than that provided by additionals. In this case, it's possible to
define methods of the form ``_if_%(name)s`` where ``name`` is the
name of a boolean field. If the field is selected, then the
corresponding module will be marked for installation *and* the
hook method will be executed.
Hook methods take the usual set of parameters (cr, uid, ids,
context) and can return a collection of additional addons to
install (if they return anything, otherwise they should not return
anything, though returning any "falsy" value such as None or an
empty collection will have the same effect).
Complete control
----------------
The last hook is to simply overload the ``modules_to_install``
method, which implements all the mechanisms above. This method
takes the usual set of parameters (cr, uid, ids, context) and
returns a ``set`` of addons to install (addons selected by the
above methods minus addons from the *basic* set which are already
installed) [#]_ so an overloader can simply manipulate the ``set``
returned by ``ResConfigInstaller.modules_to_install`` to add or
remove addons.
Skipping the installer
----------------------
Unless it is removed from the view, installers have a *skip*
button which invokes ``action_skip`` (and the ``cancel`` hook from
``res.config``). Hooks and additionals *are not run* when skipping
installation, even for already installed addons.
Again, setup your hooks accordingly.
.. [#] note that since a mapping key needs to be hashable, it's
possible to use a tuple or a frozenset, but not a list or a
regular set
.. [#] because the already-installed modules are only pruned at
the very end of ``modules_to_install``, additionals and
hooks depending on them *are guaranteed to execute*. Setup
your hooks accordingly.
"""
_name = 'res.config.installer'
_inherit = 'res.config'
_description = 'Config Installer'
_install_if = {}
def already_installed(self):
""" For each module, check if it's already installed and if it
is return its name
:returns: a list of the already installed modules in this
installer
:rtype: [str]
"""
return [m.name for m in self._already_installed()]
def _already_installed(self):
""" For each module (boolean fields in a res.config.installer),
check if it's already installed (either 'to install', 'to upgrade'
or 'installed') and if it is return the module's record
:returns: a list of all installed modules in this installer
:rtype: recordset (collection of Record)
"""
selectable = [name for name, field in self._fields.items()
if field.type == 'boolean']
return self.env['ir.module.module'].search([('name', 'in', selectable),
('state', 'in', ['to install', 'installed', 'to upgrade'])])
def modules_to_install(self):
""" selects all modules to install:
* checked boolean fields
* return values of hook methods. Hook methods are of the form
``_if_%(addon_name)s``, and are called if the corresponding
addon is marked for installation. They take the arguments
cr, uid, ids and context, and return an iterable of addon
names
* additionals, additionals are setup through the ``_install_if``
class variable. ``_install_if`` is a dict of {iterable:iterable}
where key and value are iterables of addon names.
If all the addons in the key are selected for installation
(warning: addons added through hooks don't count), then the
addons in the value are added to the set of modules to install
* not already installed
"""
base = set(module_name
for installer in self.read()
for module_name, to_install in installer.items()
if self._fields[module_name].type == 'boolean' and to_install)
hooks_results = set()
for module in base:
hook = getattr(self, '_if_%s'% module, None)
if hook:
hooks_results.update(hook() or set())
additionals = set(module
for requirements, consequences in self._install_if.items()
if base.issuperset(requirements)
for module in consequences)
return (base | hooks_results | additionals) - set(self.already_installed())
@api.model
def default_get(self, fields_list):
''' If an addon is already installed, check it by default
'''
defaults = super(ResConfigInstaller, self).default_get(fields_list)
return dict(defaults, **dict.fromkeys(self.already_installed(), True))
@api.model
def fields_get(self, allfields=None, attributes=None):
""" If an addon is already installed, set it to readonly as
res.config.installer doesn't handle uninstallations of already
installed addons
"""
fields = super().fields_get(allfields=allfields, attributes=attributes)
for name in self.already_installed():
if name not in fields:
continue
fields[name].update(
readonly=True,
help= ustr(fields[name].get('help', '')) +
_('\n\nThis addon is already installed on your system'))
return fields
def execute(self):
to_install = list(self.modules_to_install())
_logger.info('Selecting addons %s to install', to_install)
IrModule = self.env['ir.module.module']
modules = IrModule.search([('name', 'in', to_install)])
return self._install_modules(modules)
class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin):
""" Base configuration wizard for application settings. It provides support for setting
default values, assigning groups to employee users, and installing modules.

View file

@ -33,6 +33,7 @@ class Country(models.Model):
_name = 'res.country'
_description = 'Country'
_order = 'name'
_rec_names_search = ['name', 'code']
name = fields.Char(
string='Country Name', required=True, translate=True)
@ -82,20 +83,22 @@ class Country(models.Model):
'The code of the country must be unique!')
]
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
if domain is None:
domain = []
ids = []
if len(name) == 2:
ids = list(self._search([('code', 'ilike', name)] + domain, limit=limit, order=order))
search_domain = [('name', operator, name)]
if ids:
search_domain.append(('id', 'not in', ids))
ids += list(self._search(search_domain + domain, limit=limit, order=order))
return ids
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
result = []
domain = args or []
# first search by code
if operator not in expression.NEGATIVE_TERM_OPERATORS and name and len(name) == 2:
countries = self.search_fetch(expression.AND([domain, [('code', operator, name)]]), ['display_name'], limit=limit)
result.extend((country.id, country.display_name) for country in countries.sudo())
domain = expression.AND([domain, [('id', 'not in', countries.ids)]])
if limit is not None:
limit -= len(countries)
if limit <= 0:
return result
# normal search
result.extend(super().name_search(name, domain, operator, limit))
return result
@api.model
@tools.ormcache('code')
@ -116,9 +119,11 @@ class Country(models.Model):
if ('code' in vals or 'phone_code' in vals):
# Intentionally simplified by not clearing the cache in create and unlink.
self.env.registry.clear_cache()
if 'address_view_id' in vals:
if 'address_view_id' in vals or 'vat_label' in vals:
# Changing the address view of the company must invalidate the view cached for res.partner
# because of _view_get_address
# Same goes for vat_label
# because of _get_view override from FormatVATLabelMixin
self.env.registry.clear_cache('templates')
return res
@ -158,6 +163,7 @@ class CountryState(models.Model):
_description = "Country state"
_name = 'res.country.state'
_order = 'code'
_rec_names_search = ['name', 'code']
country_id = fields.Many2one('res.country', string='Country', required=True)
name = fields.Char(string='State Name', required=True,
@ -169,41 +175,38 @@ class CountryState(models.Model):
]
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
domain = domain or []
if self.env.context.get('country_id'):
domain = expression.AND([domain, [('country_id', '=', self.env.context.get('country_id'))]])
def name_search(self, name='', args=None, operator='ilike', limit=100):
result = []
domain = args or []
# first search by code (with =ilike)
if operator not in expression.NEGATIVE_TERM_OPERATORS and name:
states = self.search_fetch(expression.AND([domain, [('code', '=like', name)]]), ['display_name'], limit=limit)
result.extend((state.id, state.display_name) for state in states.sudo())
domain = expression.AND([domain, [('id', 'not in', states.ids)]])
if limit is not None:
limit -= len(states)
if limit <= 0:
return result
# normal search
result.extend(super().name_search(name, domain, operator, limit))
return result
if operator == 'ilike' and not (name or '').strip():
domain1 = []
domain2 = []
else:
domain1 = [('code', '=ilike', name)]
domain2 = [('name', operator, name)]
first_state_ids = []
if domain1:
first_state_ids = list(self._search(
expression.AND([domain1, domain]), limit=limit, order=order,
))
fallback_domain = None
if name and operator in ['ilike', '=']:
fallback_domain = self._get_name_search_domain(name, operator)
if name and operator in ['in', 'any']:
fallback_domain = expression.OR([self._get_name_search_domain(n, '=') for n in name])
return first_state_ids + [
state_id
for state_id in self._search(expression.AND([domain2, domain]),
limit=limit, order=order)
if state_id not in first_state_ids
] or (
list(self._search(expression.AND([fallback_domain, domain]), limit=limit))
if fallback_domain
else []
)
@api.model
def _search_display_name(self, operator, value):
domain = super()._search_display_name(operator, value)
if value and operator not in expression.NEGATIVE_TERM_OPERATORS:
if operator in ('ilike', '='):
domain = expression.OR([
domain, self._get_name_search_domain(value, operator),
])
elif operator == 'in':
domain = expression.OR([
domain,
*(self._get_name_search_domain(name, '=') for name in value),
])
if country_id := self.env.context.get('country_id'):
domain = expression.AND([domain, [('country_id', '=', country_id)]])
return domain
def _get_name_search_domain(self, name, operator):
m = re.fullmatch(r"(?P<name>.+)\((?P<country>.+)\)", name)
@ -213,7 +216,7 @@ class CountryState(models.Model):
'|', ('country_id.name', 'ilike', m['country'].strip()),
('country_id.code', '=', m['country'].strip()),
]
return None
return [expression.FALSE_LEAF]
@api.depends('country_id')
def _compute_display_name(self):

View file

@ -27,6 +27,7 @@ class Currency(models.Model):
# Note: 'code' column was removed as of v6.0, the 'name' should now hold the ISO code.
name = fields.Char(string='Currency', size=3, required=True, help="Currency Code (ISO 4217)")
iso_numeric = fields.Integer(string="Currency numeric code.", help="Currency Numeric Code (ISO 4217).")
full_name = fields.Char(string='Name')
symbol = fields.Char(help="Currency sign, to be used when printing amounts.", required=True)
rate = fields.Float(compute='_compute_current_rate', string='Current Rate', digits=0,
@ -43,8 +44,8 @@ class Currency(models.Model):
position = fields.Selection([('after', 'After Amount'), ('before', 'Before Amount')], default='after',
string='Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.")
date = fields.Date(compute='_compute_date')
currency_unit_label = fields.Char(string="Currency Unit")
currency_subunit_label = fields.Char(string="Currency Subunit")
currency_unit_label = fields.Char(string="Currency Unit", translate=True)
currency_subunit_label = fields.Char(string="Currency Subunit", translate=True)
is_current_company_currency = fields.Boolean(compute='_compute_is_current_company_currency')
_sql_constraints = [
@ -114,46 +115,30 @@ class Currency(models.Model):
return
currencies = self.filtered(lambda c: not c.active)
if self.env['res.company'].search([('currency_id', 'in', currencies.ids)]):
if self.env['res.company'].search_count([('currency_id', 'in', currencies.ids)], limit=1):
raise UserError(_("This currency is set on a company and therefore cannot be deactivated."))
def _get_rates(self, company, date):
if not self.ids:
return {}
self.env['res.currency.rate'].flush_model(['rate', 'currency_id', 'company_id', 'name'])
query = SQL(
"""
SELECT c.id,
COALESCE(
( -- take the first rate before the given date
SELECT r.rate
FROM res_currency_rate r
WHERE r.currency_id = c.id
AND r.name <= %(date)s
AND (r.company_id IS NULL OR r.company_id = %(company_id)s)
ORDER BY r.company_id, r.name DESC
LIMIT 1
),
( -- if no rate is found, take the rate for the very first date
SELECT r.rate
FROM res_currency_rate r
WHERE r.currency_id = c.id
AND (r.company_id IS NULL OR r.company_id = %(company_id)s)
ORDER BY r.company_id, r.name ASC
LIMIT 1
),
1.0 -- fallback to 1
) AS rate
FROM res_currency c
WHERE c.id IN %(currency_ids)s
""",
date=date,
company_id=company.root_id.id,
currency_ids=tuple(self.ids),
)
self._cr.execute(query)
currency_rates = dict(self._cr.fetchall())
return currency_rates
currency_query = self.env['res.currency']._where_calc([
('id', 'in', self.ids),
], active_test=False)
currency_id = self.env['res.currency']._field_to_sql(currency_query.table, 'id')
rate_query = self.env['res.currency.rate']._search([
('name', '<=', date),
('company_id', 'in', (False, company.root_id.id)),
('currency_id', '=', currency_id),
], order='company_id.id, name DESC', limit=1)
rate_fallback = self.env['res.currency.rate']._search([
('company_id', 'in', (False, company.root_id.id)),
('currency_id', '=', currency_id),
], order='company_id.id, name ASC', limit=1)
rate = self.env['res.currency.rate']._field_to_sql(rate_query.table, 'rate')
return dict(self.env.execute_query(currency_query.select(
currency_id,
SQL("COALESCE((%s), (%s), 1.0)", rate_query.select(rate), rate_fallback.select(rate))
)))
@api.depends_context('company')
def _compute_is_current_company_currency(self):
@ -201,22 +186,23 @@ class Currency(models.Model):
logging.getLogger(__name__).warning("The library 'num2words' is missing, cannot render textual amounts.")
return ""
formatted = "%.{0}f".format(self.decimal_places) % amount
parts = formatted.partition('.')
integer_value = int(parts[0])
fractional_value = int(parts[2] or 0)
integral, _sep, fractional = f"{amount:.{self.decimal_places}f}".partition('.')
integer_value = int(integral)
lang = tools.get_lang(self.env)
amount_words = tools.ustr('{amt_value} {amt_word}').format(
amt_value=_num2words(integer_value, lang=lang.iso_code),
amt_word=self.currency_unit_label,
)
if not self.is_zero(amount - integer_value):
amount_words += ' ' + _('and') + tools.ustr(' {amt_value} {amt_word}').format(
amt_value=_num2words(fractional_value, lang=lang.iso_code),
amt_word=self.currency_subunit_label,
)
return amount_words
if self.is_zero(amount - integer_value):
return _(
'%(integral_amount)s %(currency_unit)s',
integral_amount=_num2words(integer_value, lang=lang.iso_code),
currency_unit=self.currency_unit_label,
)
else:
return _(
'%(integral_amount)s %(currency_unit)s and %(fractional_amount)s %(currency_subunit)s',
integral_amount=_num2words(integer_value, lang=lang.iso_code),
currency_unit=self.currency_unit_label,
fractional_amount=_num2words(int(fractional or 0), lang=lang.iso_code),
currency_subunit=self.currency_subunit_label,
)
def format(self, amount):
"""Return ``amount`` formatted according to ``self``'s rounding rules, symbols and positions.
@ -332,14 +318,14 @@ class Currency(models.Model):
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
if view_type in ('tree', 'form'):
if view_type in ('list', 'form'):
currency_name = (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name
fields_maps = [
[['company_rate', 'rate'], _('Unit per %s', currency_name)],
[['inverse_company_rate', 'inverse_rate'], _('%s per Unit', currency_name)],
]
for fnames, label in fields_maps:
xpath_expression = '//tree//field[' + " or ".join(f"@name='{f}'" for f in fnames) + "][1]"
xpath_expression = '//list//field[' + " or ".join(f"@name='{f}'" for f in fnames) + "][1]"
node = arch.xpath(xpath_expression)
if node:
node[0].set('string', label)
@ -357,7 +343,7 @@ class CurrencyRate(models.Model):
default=fields.Date.context_today)
rate = fields.Float(
digits=0,
group_operator="avg",
aggregator="avg",
help='The rate of the currency to the currency of rate 1',
string='Technical Rate'
)
@ -365,14 +351,14 @@ class CurrencyRate(models.Model):
digits=0,
compute="_compute_company_rate",
inverse="_inverse_company_rate",
group_operator="avg",
aggregator="avg",
help="The currency of rate 1 to the rate of the currency.",
)
inverse_company_rate = fields.Float(
digits=0,
compute="_compute_inverse_company_rate",
inverse="_inverse_inverse_company_rate",
group_operator="avg",
aggregator="avg",
help="The rate of the currency to the currency of rate 1 ",
)
currency_id = fields.Many2one('res.currency', string='Currency', readonly=True, required=True, ondelete="cascade")
@ -476,8 +462,9 @@ class CurrencyRate(models.Model):
raise ValidationError("Currency rates should only be created for main companies")
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
return super()._name_search(parse_date(self.env, name), domain, operator, limit, order)
def _search_display_name(self, operator, value):
value = parse_date(self.env, value)
return super()._search_display_name(operator, value)
@api.model
def _get_view_cache_key(self, view_id=None, view_type='form', **options):
@ -489,14 +476,14 @@ class CurrencyRate(models.Model):
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
if view_type in ('tree'):
if view_type == 'list':
names = {
'company_currency_name': (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,
'rate_currency_name': self.env['res.currency'].browse(self._context.get('active_id')).name or 'Unit',
}
for field in [['company_rate', _('%(rate_currency_name)s per %(company_currency_name)s', **names)],
['inverse_company_rate', _('%(company_currency_name)s per %(rate_currency_name)s', **names)]]:
node = arch.xpath("//tree//field[@name='%s']" % field[0])
if node:
node[0].set('string', field[1])
for name, label in [['company_rate', _('%(rate_currency_name)s per %(company_currency_name)s', **names)],
['inverse_company_rate', _('%(company_currency_name)s per %(rate_currency_name)s', **names)]]:
if (node := arch.find(f"./field[@name='{name}']")) is not None:
node.set('string', label)
return arch, view

View file

@ -0,0 +1,203 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from contextlib import nullcontext
from datetime import datetime
import logging
from odoo import api, fields, models, tools
from odoo.http import GeoIP, request, root
from odoo.tools import SQL, OrderedSet, unique
from odoo.tools.translate import _
from .res_users import check_identity
_logger = logging.getLogger(__name__)
class ResDeviceLog(models.Model):
_name = 'res.device.log'
_description = 'Device Log'
_rec_names_search = ['platform', 'browser']
session_identifier = fields.Char("Session Identifier", required=True, index='btree')
platform = fields.Char("Platform")
browser = fields.Char("Browser")
ip_address = fields.Char("IP Address")
country = fields.Char("Country")
city = fields.Char("City")
device_type = fields.Selection([('computer', 'Computer'), ('mobile', 'Mobile')], "Device Type")
user_id = fields.Many2one("res.users", index='btree')
first_activity = fields.Datetime("First Activity")
last_activity = fields.Datetime("Last Activity", index='btree')
revoked = fields.Boolean("Revoked",
help="""If True, the session file corresponding to this device
no longer exists on the filesystem.""")
is_current = fields.Boolean("Current Device", compute="_compute_is_current")
linked_ip_addresses = fields.Text("Linked IP address", compute="_compute_linked_ip_addresses")
def init(self):
self.env.cr.execute(SQL("""
CREATE INDEX IF NOT EXISTS res_device_log__composite_idx ON %s
(user_id, session_identifier, platform, browser, last_activity, id) WHERE revoked = False
""",
SQL.identifier(self._table)
))
def _compute_display_name(self):
for device in self:
platform = device.platform or _("Unknown")
browser = device.browser or _("Unknown")
device.display_name = f"{platform.capitalize()} {browser.capitalize()}"
def _compute_is_current(self):
for device in self:
device.is_current = request and request.session.sid.startswith(device.session_identifier)
def _compute_linked_ip_addresses(self):
device_group_map = {}
for *device_info, ip_array in self.env['res.device.log']._read_group(
domain=[('session_identifier', 'in', self.mapped('session_identifier'))],
groupby=['session_identifier', 'platform', 'browser'],
aggregates=['ip_address:array_agg']
):
device_group_map[tuple(device_info)] = ip_array
for device in self:
device.linked_ip_addresses = '\n'.join(
OrderedSet(device_group_map.get(
(device.session_identifier, device.platform, device.browser), []
))
)
def _order_field_to_sql(self, alias, field_name, direction, nulls, query):
if field_name == 'is_current' and request:
return SQL("session_identifier = %s DESC", request.session.sid[:42])
return super()._order_field_to_sql(alias, field_name, direction, nulls, query)
def _is_mobile(self, platform):
if not platform:
return False
mobile_platform = ['android', 'iphone', 'ipad', 'ipod', 'blackberry', 'windows phone', 'webos']
return platform.lower() in mobile_platform
@api.model
def _update_device(self, request):
"""
Must be called when we want to update the device for the current request.
Passage through this method must leave a "trace" in the session.
:param request: Request or WebsocketRequest object
"""
trace = request.session.update_trace(request)
if not trace:
return
geoip = GeoIP(trace['ip_address'])
user_id = request.session.uid
session_identifier = request.session.sid[:42]
if self.env.cr.readonly:
self.env.cr.rollback()
cursor = self.env.registry.cursor(readonly=False)
else:
cursor = nullcontext(self.env.cr)
with cursor as cr:
cr.execute(SQL("""
INSERT INTO res_device_log (session_identifier, platform, browser, ip_address, country, city, device_type, user_id, first_activity, last_activity, revoked)
VALUES (%(session_identifier)s, %(platform)s, %(browser)s, %(ip_address)s, %(country)s, %(city)s, %(device_type)s, %(user_id)s, %(first_activity)s, %(last_activity)s, %(revoked)s)
""",
session_identifier=session_identifier,
platform=trace['platform'],
browser=trace['browser'],
ip_address=trace['ip_address'],
country=geoip.get('country_name'),
city=geoip.get('city'),
device_type='mobile' if self._is_mobile(trace['platform']) else 'computer',
user_id=user_id,
first_activity=datetime.fromtimestamp(trace['first_activity']),
last_activity=datetime.fromtimestamp(trace['last_activity']),
revoked=False,
))
_logger.info("User %d inserts device log (%s)", user_id, session_identifier)
@api.autovacuum
def _gc_device_log(self):
# Keep the last device log
# (even if the session file no longer exists on the filesystem)
self.env.cr.execute("""
DELETE FROM res_device_log log1
WHERE EXISTS (
SELECT 1 FROM res_device_log log2
WHERE
log1.session_identifier = log2.session_identifier
AND log1.platform = log2.platform
AND log1.browser = log2.browser
AND log1.ip_address = log2.ip_address
AND log1.last_activity < log2.last_activity
)
""")
_logger.info("GC device logs delete %d entries", self.env.cr.rowcount)
class ResDevice(models.Model):
_name = "res.device"
_inherit = ["res.device.log"]
_description = "Devices"
_auto = False
_order = 'last_activity desc'
@check_identity
def revoke(self):
return self._revoke()
def _revoke(self):
ResDeviceLog = self.env['res.device.log']
session_identifiers = list(unique(device.session_identifier for device in self))
root.session_store.delete_from_identifiers(session_identifiers)
revoked_devices = ResDeviceLog.sudo().search([('session_identifier', 'in', session_identifiers)])
revoked_devices.write({'revoked': True})
_logger.info("User %d revokes devices (%s)", self.env.uid, ', '.join(session_identifiers))
must_logout = bool(self.filtered('is_current'))
if must_logout:
request.session.logout()
@api.model
def _select(self):
return "SELECT D.*"
@api.model
def _from(self):
return "FROM res_device_log D"
@api.model
def _where(self):
return """
WHERE
NOT EXISTS (
SELECT 1
FROM res_device_log D2
WHERE
D2.user_id = D.user_id
AND D2.session_identifier = D.session_identifier
AND D2.platform IS NOT DISTINCT FROM D.platform
AND D2.browser IS NOT DISTINCT FROM D.browser
AND (
D2.last_activity > D.last_activity
OR (D2.last_activity = D.last_activity AND D2.id > D.id)
)
AND D2.revoked = False
)
AND D.revoked = False
"""
@property
def _query(self):
return "%s %s %s" % (self._select(), self._from(), self._where())
def init(self):
tools.drop_view_if_exists(self.env.cr, self._table)
self.env.cr.execute(SQL("""
CREATE or REPLACE VIEW %s as (%s)
""",
SQL.identifier(self._table),
SQL(self._query)
))

View file

@ -6,15 +6,46 @@ import json
import locale
import logging
import re
from operator import itemgetter
from typing import Any, Literal
from odoo import api, fields, models, tools, _
from odoo.exceptions import UserError, ValidationError
from odoo.tools import OrderedSet
from odoo.tools.misc import ReadonlyDict
_logger = logging.getLogger(__name__)
DEFAULT_DATE_FORMAT = '%m/%d/%Y'
DEFAULT_TIME_FORMAT = '%H:%M:%S'
DEFAULT_SHORT_TIME_FORMAT = '%H:%M'
class LangData(ReadonlyDict):
""" A ``dict``-like class which can access field value like a ``res.lang`` record.
Note: This data class cannot store data for fields with the same name as
``dict`` methods, like ``dict.keys``.
"""
__slots__ = ()
def __bool__(self) -> bool:
return bool(self.id)
def __getattr__(self, name: str) -> Any:
try:
return self[name]
except KeyError:
raise AttributeError
class LangDataDict(ReadonlyDict):
""" A ``dict`` of :class:`LangData` objects indexed by some key, which returns
a special dummy :class:`LangData` for missing keys.
"""
__slots__ = ()
def __missing__(self, key: Any) -> LangData:
some_lang = next(iter(self.values())) # should have at least one active language
return LangData(dict.fromkeys(some_lang, False))
class Lang(models.Model):
@ -23,7 +54,7 @@ class Lang(models.Model):
_order = "active desc,name"
_allow_sudo_commands = False
_disallowed_datetime_patterns = list(tools.DATETIME_FORMATS_MAP)
_disallowed_datetime_patterns = list(tools.misc.DATETIME_FORMATS_MAP)
_disallowed_datetime_patterns.remove('%y') # this one is in fact allowed, just not good practice
name = fields.Char(required=True)
@ -34,6 +65,7 @@ class Lang(models.Model):
direction = fields.Selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')], required=True, default='ltr')
date_format = fields.Char(string='Date Format', required=True, default=DEFAULT_DATE_FORMAT)
time_format = fields.Char(string='Time Format', required=True, default=DEFAULT_TIME_FORMAT)
short_time_format = fields.Char(string='Short Time Format', required=True, default=DEFAULT_SHORT_TIME_FORMAT, help="Time Format without seconds")
week_start = fields.Selection([('1', 'Monday'),
('2', 'Tuesday'),
('3', 'Wednesday'),
@ -132,7 +164,7 @@ class Lang(models.Model):
# create the language with locale information
fail = True
iso_lang = tools.get_iso_codes(lang)
for ln in tools.get_locales(lang):
for ln in tools.translate.get_locales(lang):
try:
locale.setlocale(locale.LC_ALL, str(ln))
fail = False
@ -164,7 +196,7 @@ class Lang(models.Model):
# For some locales, nl_langinfo returns a D_FMT/T_FMT that contains
# unsupported '%-' patterns, e.g. for cs_CZ
format = format.replace('%-', '%')
for pattern, replacement in tools.DATETIME_FORMATS_MAP.items():
for pattern, replacement in tools.misc.DATETIME_FORMATS_MAP.items():
format = format.replace(pattern, replacement)
return str(format)
@ -183,7 +215,7 @@ class Lang(models.Model):
try:
return self.create(lang_info)
finally:
tools.resetlocale()
tools.translate.resetlocale()
@api.model
def install_lang(self):
@ -209,68 +241,69 @@ class Lang(models.Model):
partner.write({'lang': lang_code})
return True
@tools.ormcache('code')
def _lang_get_id(self, code):
return self.with_context(active_test=True).search([('code', '=', code)]).id
@tools.ormcache('code')
def _lang_get_direction(self, code):
return self.with_context(active_test=True).search([('code', '=', code)]).direction
@tools.ormcache('url_code')
def _lang_get_code(self, url_code):
return self.with_context(active_test=True).search([('url_code', '=', url_code)]).code or url_code
def _lang_get(self, code):
""" Return the language using this code if it is active """
return self.browse(self._lang_get_id(code))
@tools.ormcache('self.code', 'monetary')
def _data_get(self, monetary=False):
thousands_sep = self.thousands_sep or ''
decimal_point = self.decimal_point
grouping = self.grouping
return grouping, thousands_sep, decimal_point
@api.model
@tools.ormcache()
def get_available(self):
""" Return the available languages as a list of (code, url_code, name,
active) sorted by name.
# ------------------------------------------------------------
# cached methods for **active** languages
# ------------------------------------------------------------
@property
def CACHED_FIELDS(self) -> OrderedSet:
""" Return fields to cache for the active languages
Please promise all these fields don't depend on other models and context
and are not translated.
Warning: Don't add method names of ``dict`` to CACHED_FIELDS for sake of the
implementation of LangData
"""
langs = self.with_context(active_test=False).search([])
return langs.get_sorted()
return OrderedSet(['id', 'name', 'code', 'iso_code', 'url_code', 'active', 'direction', 'date_format',
'time_format', 'short_time_format', 'week_start', 'grouping', 'decimal_point', 'thousands_sep', 'flag_image_url'])
def get_sorted(self):
return sorted([(lang.code, lang.url_code, lang.name, lang.active, lang.flag_image_url) for lang in self], key=itemgetter(2))
def _get_data(self, **kwargs: Any) -> LangData:
""" Get the language data for the given field value in kwargs
For example, get_data(code='en_US') will return the LangData
for the res.lang record whose 'code' field value is 'en_US'
@tools.ormcache('self.id')
def _get_cached_values(self):
self.ensure_one()
return {
'id': self.id,
'code': self.code,
'url_code': self.url_code,
'name': self.name,
}
:param dict kwargs: {field_name: field_value}
field_name is the only key in kwargs and in ``self.CACHED_FIELDS``
Try to reuse the used ``field_name``s: 'id', 'code', 'url_code'
:return: Valid LangData if (field_name, field_value) pair is for an
**active** language. Otherwise, Dummy LangData which will return
``False`` for all ``self.CACHED_FIELDS``
:rtype: LangData
:raise: UserError if field_name is not in ``self.CACHED_FIELDS``
"""
[[field_name, field_value]] = kwargs.items()
return self._get_active_by(field_name)[field_value]
def _get_cached(self, field):
return self._get_cached_values()[field]
def _lang_get(self, code: str):
""" Return the language using this code if it is active """
return self.browse(self._get_data(code=code).id)
def _get_code(self, code: str) -> str | Literal[False]:
""" Return the given language code if active, else return ``False`` """
return self._get_data(code=code).code
@api.model
@tools.ormcache('code')
def _lang_code_to_urlcode(self, code):
for c, urlc, name, *_ in self.get_available():
if c == code:
return urlc
return self._lang_get(code).url_code
@api.readonly
def get_installed(self) -> list[tuple[str, str]]:
""" Return installed languages' (code, name) pairs sorted by name. """
return [(code, data.name) for code, data in self._get_active_by('code').items()]
@api.model
@tools.ormcache()
def get_installed(self):
""" Return the installed languages as a list of (code, name) sorted by name. """
langs = self.with_context(active_test=True).search([])
return sorted([(lang.code, lang.name) for lang in langs], key=itemgetter(1))
@tools.ormcache('field')
def _get_active_by(self, field: str) -> LangDataDict:
""" Return a LangDataDict mapping active languages' **unique**
**required** ``self.CACHED_FIELDS`` values to their LangData.
Its items are ordered by languages' names
Try to reuse the used ``field``s: 'id', 'code', 'url_code'
"""
if field not in self.CACHED_FIELDS:
raise UserError(_('Field "%s" is not cached', field))
if field == 'code':
langs = self.sudo().with_context(active_test=True).search_fetch([], self.CACHED_FIELDS, order='name')
return LangDataDict({
lang.code: LangData({f: lang[f] for f in self.CACHED_FIELDS})
for lang in langs
})
return LangDataDict({data[field]: data for data in self._get_active_by('code').values()})
# ------------------------------------------------------------
def toggle_active(self):
super().toggle_active()
@ -303,6 +336,26 @@ class Lang(models.Model):
self.env['ir.default'].discard_values('res.partner', 'lang', lang_codes)
res = super(Lang, self).write(vals)
if vals.get('active'):
# If we activate a lang, set it's url_code to the shortest version
# if possible
for long_lang in self.filtered(lambda lang: '_' in lang.url_code):
short_code = long_lang.code.split('_')[0]
short_lang = self.with_context(active_test=False).search([
('url_code', '=', short_code),
], limit=1) # url_code is unique
if (
short_lang
and not short_lang.active
# `code` should always be the long format containing `_` but
# there is a plan to change this in the future for `es_419`.
# This `and` is about not failing if it's the case one day.
and short_lang.code != short_code
):
short_lang.url_code = short_lang.code
long_lang.url_code = short_code
self.env.flush_all()
self.env.registry.clear_cache()
return res
@ -324,16 +377,17 @@ class Lang(models.Model):
def copy_data(self, default=None):
default = dict(default or {})
vals_list = super().copy_data(default=default)
for record, vals in zip(self, vals_list):
if "name" not in default:
vals["name"] = _("%s (copy)", record.name)
if "code" not in default:
vals["code"] = _("%s (copy)", record.code)
if "url_code" not in default:
vals["url_code"] = _("%s (copy)", record.url_code)
return vals_list
if "name" not in default:
default["name"] = _("%s (copy)", self.name)
if "code" not in default:
default["code"] = _("%s (copy)", self.code)
if "url_code" not in default:
default["url_code"] = _("%s (copy)", self.url_code)
return super().copy_data(default=default)
def format(self, percent, value, grouping=False, monetary=False):
def format(self, percent: str, value, grouping: bool = False) -> str:
""" Format() will return the language-specific output for float values"""
self.ensure_one()
if percent[0] != '%':
@ -343,7 +397,10 @@ class Lang(models.Model):
# floats and decimal ints need special action!
if grouping:
lang_grouping, thousands_sep, decimal_point = self._data_get(monetary)
data = self._get_data(id=self.id)
if not data:
raise UserError(_("The language %s is not installed.", self.name))
lang_grouping, thousands_sep, decimal_point = data.grouping, data.thousands_sep or '', data.decimal_point
eval_lang_grouping = ast.literal_eval(lang_grouping)
if percent[-1] in 'eEfFgG':
@ -374,7 +431,6 @@ class Lang(models.Model):
}
}
def split(l, counts):
"""

View file

@ -1,25 +1,29 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from __future__ import annotations
import base64
import collections
import datetime
import hashlib
import pytz
import threading
import re
import warnings
import requests
from collections import defaultdict
from lxml import etree
from random import randint
from werkzeug import urls
from odoo import api, fields, models, tools, SUPERUSER_ID, _, Command
from odoo.osv.expression import get_unaccent_wrapper
from odoo.exceptions import RedirectWarning, UserError, ValidationError
import typing
if typing.TYPE_CHECKING:
from .res_users import UsersView as ResUsers
from .res_bank import ResPartnerBank
from .res_country import Country, CountryState
from .res_company import Company as ResCompany
# Global variables used for the warning fields declared on the res.partner
# in the following modules : sale, purchase, account, stock
WARNING_MESSAGE = [
@ -42,6 +46,21 @@ def _tz_get(self):
return _tzs
class FormatVATLabelMixin(models.AbstractModel):
_name = "format.vat.label.mixin"
_description = "Country Specific VAT Label"
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
if vat_label := self.env.company.country_id.vat_label:
for node in arch.iterfind(".//field[@name='vat']"):
node.set("string", vat_label)
# In some module vat field is replaced and so above string change is not working
for node in arch.iterfind(".//label[@for='vat']"):
node.set("string", vat_label)
return arch, view
class FormatAddressMixin(models.AbstractModel):
_name = "format.address.mixin"
_description = 'Address Format'
@ -129,17 +148,17 @@ class PartnerCategory(models.Model):
def _get_default_color(self):
return randint(1, 11)
name = fields.Char(string='Tag Name', required=True, translate=True)
color = fields.Integer(string='Color', default=_get_default_color)
parent_id = fields.Many2one('res.partner.category', string='Parent Category', index=True, ondelete='cascade')
child_ids = fields.One2many('res.partner.category', 'parent_id', string='Child Tags')
name = fields.Char('Name', required=True, translate=True)
color = fields.Integer(string='Color', default=_get_default_color, aggregator=False)
parent_id: PartnerCategory = fields.Many2one('res.partner.category', string='Category', index=True, ondelete='cascade')
child_ids: PartnerCategory = fields.One2many('res.partner.category', 'parent_id', string='Child Tags')
active = fields.Boolean(default=True, help="The active field allows you to hide the category without removing it.")
parent_path = fields.Char(index=True, unaccent=False)
partner_ids = fields.Many2many('res.partner', column1='category_id', column2='partner_id', string='Partners', copy=False)
parent_path = fields.Char(index=True)
partner_ids: Partner = fields.Many2many('res.partner', column1='category_id', column2='partner_id', string='Partners', copy=False)
@api.constrains('parent_id')
def _check_parent_id(self):
if not self._check_recursion():
if self._has_cycle():
raise ValidationError(_('You can not create recursive tags.'))
@api.depends('parent_id')
@ -156,14 +175,11 @@ class PartnerCategory(models.Model):
category.display_name = ' / '.join(reversed(names))
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
domain = domain or []
if name:
# Be sure name_search is symetric to display_name
name = name.split(' / ')[-1]
domain = [('name', operator, name)] + domain
return self._search(domain, limit=limit, order=order)
def _search_display_name(self, operator, value):
domain = super()._search_display_name(operator, value)
if operator.endswith('like'):
return [('id', 'child_of', self._search(list(domain)))]
return domain
class PartnerTitle(models.Model):
_name = 'res.partner.title'
@ -176,7 +192,7 @@ class PartnerTitle(models.Model):
class Partner(models.Model):
_description = 'Contact'
_inherit = ['format.address.mixin', 'avatar.mixin']
_inherit = ['format.address.mixin', 'format.vat.label.mixin', 'avatar.mixin']
_name = "res.partner"
_order = "complete_name ASC, id DESC"
_rec_names_search = ['complete_name', 'email', 'ref', 'vat', 'company_registry'] # TODO vat must be sanitized the same way for storing/searching
@ -208,37 +224,38 @@ class Partner(models.Model):
name = fields.Char(index=True, default_export_compatible=True)
complete_name = fields.Char(compute='_compute_complete_name', store=True, index=True)
date = fields.Date(index=True)
title = fields.Many2one('res.partner.title')
parent_id = fields.Many2one('res.partner', string='Related Company', index=True)
title: PartnerTitle = fields.Many2one('res.partner.title')
parent_id: Partner = fields.Many2one('res.partner', string='Related Company', index=True)
parent_name = fields.Char(related='parent_id.name', readonly=True, string='Parent name')
child_ids = fields.One2many('res.partner', 'parent_id', string='Contact', domain=[('active', '=', True)])
child_ids: Partner = fields.One2many('res.partner', 'parent_id', string='Contact', domain=[('active', '=', True)], context={'active_test': False})
ref = fields.Char(string='Reference', index=True)
lang = fields.Selection(_lang_get, string='Language',
help="All the emails and documents sent to this contact will be translated in this language.")
active_lang_count = fields.Integer(compute='_compute_active_lang_count')
tz = fields.Selection(_tz_get, string='Timezone', default=lambda self: self._context.get('tz'),
tz = fields.Selection(_tzs, string='Timezone', default=lambda self: self._context.get('tz'),
help="When printing documents and exporting/importing data, time values are computed according to this timezone.\n"
"If the timezone is not set, UTC (Coordinated Universal Time) is used.\n"
"Anywhere else, time values are computed according to the time offset of your web client.")
tz_offset = fields.Char(compute='_compute_tz_offset', string='Timezone offset')
user_id = fields.Many2one(
user_id: ResUsers = fields.Many2one(
'res.users', string='Salesperson',
compute='_compute_user_id',
precompute=True, # avoid queries post-create
readonly=False, store=True,
help='The internal user in charge of this contact.')
vat = fields.Char(string='Tax ID', index=True, help="The Tax Identification Number. Values here will be validated based on the country format. You can use '/' to indicate that the partner is not subject to tax.")
same_vat_partner_id = fields.Many2one('res.partner', string='Partner with same Tax ID', compute='_compute_same_vat_partner_id', store=False)
same_company_registry_partner_id = fields.Many2one('res.partner', string='Partner with same Company Registry', compute='_compute_same_vat_partner_id', store=False)
company_registry = fields.Char(string="Company ID", compute='_compute_company_registry', store=True, readonly=False,
vat_label = fields.Char(string='Tax ID Label', compute='_compute_vat_label')
same_vat_partner_id: Partner = fields.Many2one('res.partner', string='Partner with same Tax ID', compute='_compute_same_vat_partner_id', store=False)
same_company_registry_partner_id: Partner = fields.Many2one('res.partner', string='Partner with same Company Registry', compute='_compute_same_vat_partner_id', store=False)
company_registry = fields.Char(string="Company ID", compute='_compute_company_registry', store=True, readonly=False, index='btree_not_null',
help="The registry number of the company. Use it if it is different from the Tax ID. It must be unique across all partners of a same country")
bank_ids = fields.One2many('res.partner.bank', 'partner_id', string='Banks')
company_registry_label = fields.Char(string='Company ID Label', compute='_compute_company_registry_label')
bank_ids: ResPartnerBank = fields.One2many('res.partner.bank', 'partner_id', string='Banks')
website = fields.Char('Website Link')
comment = fields.Html(string='Notes')
category_id = fields.Many2many('res.partner.category', column1='partner_id',
category_id: PartnerCategory = fields.Many2many('res.partner.category', column1='partner_id',
column2='category_id', string='Tags', default=_default_category)
active = fields.Boolean(default=True)
employee = fields.Boolean(help="Check this box if this contact is an Employee.")
@ -249,18 +266,14 @@ class Partner(models.Model):
('delivery', 'Delivery Address'),
('other', 'Other Address'),
], string='Address Type',
default='contact',
help="- Contact: Use this to organize the contact details of employees of a given company (e.g. CEO, CFO, ...).\n"
"- Invoice Address: Preferred address for all invoices. Selected by default when you invoice an order that belongs to this company.\n"
"- Delivery Address: Preferred address for all deliveries. Selected by default when you deliver an order that belongs to this company.\n"
"- Other: Other address for the company (e.g. subsidiary, ...)")
default='contact')
# address fields
street = fields.Char()
street2 = fields.Char()
zip = fields.Char(change_default=True)
city = fields.Char()
state_id = fields.Many2one("res.country.state", string='State', ondelete='restrict', domain="[('country_id', '=?', country_id)]")
country_id = fields.Many2one('res.country', string='Country', ondelete='restrict')
state_id: CountryState = fields.Many2one("res.country.state", string='State', ondelete='restrict', domain="[('country_id', '=?', country_id)]")
country_id: Country = fields.Many2one('res.country', string='Country', ondelete='restrict')
country_code = fields.Char(related='country_id.code', string="Country Code")
partner_latitude = fields.Float(string='Geo Latitude', digits=(10, 7))
partner_longitude = fields.Float(string='Geo Longitude', digits=(10, 7))
@ -268,19 +281,19 @@ class Partner(models.Model):
email_formatted = fields.Char(
'Formatted Email', compute='_compute_email_formatted',
help='Format email address "Name <email@domain>"')
phone = fields.Char(unaccent=False)
mobile = fields.Char(unaccent=False)
phone = fields.Char()
mobile = fields.Char()
is_company = fields.Boolean(string='Is a Company', default=False,
help="Check if the contact is a company, otherwise it is a person")
is_public = fields.Boolean(compute='_compute_is_public')
industry_id = fields.Many2one('res.partner.industry', 'Industry')
industry_id: ResPartnerIndustry = fields.Many2one('res.partner.industry', 'Industry')
# company_type is only an interface field, do not use it in business logic
company_type = fields.Selection(string='Company Type',
selection=[('person', 'Individual'), ('company', 'Company')],
compute='_compute_company_type', inverse='_write_company_type')
company_id = fields.Many2one('res.company', 'Company', index=True)
company_id: ResCompany = fields.Many2one('res.company', 'Company', index=True)
color = fields.Integer(string='Color Index', default=0)
user_ids = fields.One2many('res.users', 'partner_id', string='Users', auto_join=True)
user_ids: ResUsers = fields.One2many('res.users', 'partner_id', string='Users', auto_join=True)
partner_share = fields.Boolean(
'Share Partner', compute='_compute_partner_share', store=True,
help="Either customer (not a user), either shared user. Indicated the current partner is a customer without "
@ -288,7 +301,7 @@ class Partner(models.Model):
contact_address = fields.Char(compute='_compute_contact_address', string='Complete Address')
# technical field used for managing commercial fields
commercial_partner_id = fields.Many2one(
commercial_partner_id: Partner = fields.Many2one(
'res.partner', string='Commercial Entity',
compute='_compute_commercial_partner', store=True,
recursive=True, index=True)
@ -298,7 +311,7 @@ class Partner(models.Model):
barcode = fields.Char(help="Use a barcode to identify this contact.", copy=False, company_dependent=True)
# hack to allow using plain browse record in qweb views, and used in ir.qweb.field.contact
self = fields.Many2one(comodel_name=_name, compute='_compute_get_ids')
self: Partner = fields.Many2one(comodel_name='res.partner', compute='_compute_get_ids')
_sql_constraints = [
('check_name', "CHECK( (type='contact' AND name IS NOT NULL) or (type!='contact') )", 'Contacts require a name'),
@ -419,6 +432,10 @@ class Partner(models.Model):
domain += [('id', '!=', partner_id), '!', ('id', 'child_of', partner_id)]
partner.same_company_registry_partner_id = bool(partner.company_registry) and not partner.parent_id and Partner.search(domain, limit=1)
@api.depends_context('company')
def _compute_vat_label(self):
self.vat_label = self.env.company.country_id.vat_label or _("Tax ID")
@api.depends(lambda self: self._display_address_depends())
def _compute_contact_address(self):
for partner in self:
@ -447,26 +464,40 @@ class Partner(models.Model):
for company in self:
company.company_registry = company.company_registry
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
company = self.env.company
if company.country_id.vat_label:
for node in arch.xpath("//field[@name='vat']"):
node.attrib["string"] = company.country_id.vat_label
return arch, view
@api.depends('country_id')
def _compute_company_registry_label(self):
label_by_country = self._get_company_registry_labels()
for company in self:
country_code = company.country_id.code
company.company_registry_label = label_by_country.get(country_code, _("Company ID"))
def _get_company_registry_labels(self):
return {}
@api.constrains('parent_id')
def _check_parent_id(self):
if not self._check_recursion():
if self._has_cycle():
raise ValidationError(_('You cannot create recursive Partner hierarchies.'))
def copy(self, default=None):
self.ensure_one()
chosen_name = default.get('name') if default else ''
new_name = chosen_name or _('%s (copy)', self.name)
default = dict(default or {}, name=new_name)
return super(Partner, self).copy(default)
@api.constrains('company_id')
def _check_partner_company(self):
"""
Check that for every partner which has a company,
if there exists a company linked to that partner,
the company_id set on the partner is that company
"""
partners = self.filtered(lambda p: p.is_company and p.company_id)
companies = self.env['res.company'].search_fetch([('partner_id', 'in', partners.ids)], ['partner_id'])
for company in companies:
if company != company.partner_id.company_id:
raise ValidationError(_('The company assigned to this partner does not match the company this partner represents.'))
def copy_data(self, default=None):
default = dict(default or {})
vals_list = super().copy_data(default=default)
if default.get('name'):
return vals_list
return [dict(vals, name=self.env._("%s (copy)", partner.name)) for partner, vals in zip(self, vals_list)]
@api.onchange('parent_id')
def onchange_parent_id(self):
@ -600,7 +631,7 @@ class Partner(models.Model):
def update_address(self, vals):
addr_vals = {key: vals[key] for key in self._address_fields() if key in vals}
if addr_vals:
return super(Partner, self).write(addr_vals)
return super().write(addr_vals)
@api.model
def _commercial_fields(self):
@ -629,35 +660,27 @@ class Partner(models.Model):
self._commercial_sync_to_children()
def _company_dependent_commercial_sync(self):
company_dependent_commercial_field_ids = [
self.env['ir.model.fields']._get(self._name, fname).id
for fname in self._company_dependent_commercial_fields()
]
if company_dependent_commercial_field_ids:
parent_properties = self.env['ir.property'].search([
('fields_id', 'in', company_dependent_commercial_field_ids),
('res_id', '=', f'res.partner,{self.commercial_partner_id.id}'),
# value was already assigned for current company
('company_id', '!=', self.env.company.id),
])
# prevent duplicate keys by removing existing properties from the partner
self.env['ir.property'].search([
('fields_id', 'in', company_dependent_commercial_field_ids),
('res_id', '=', f'res.partner,{self.id}'),
('company_id', '!=', self.env.company.id),
]).unlink()
for prop in parent_properties:
prop.copy({'res_id': f'res.partner,{self.id}'})
if not (fields_to_sync := self._company_dependent_commercial_fields()):
return
def _commercial_sync_to_children(self):
for company_sudo in self.env['res.company'].sudo().search([]):
if company_sudo == self.env.company:
continue # already handled by _commercial_sync_from_company
self_in_company = self.with_company(company_sudo)
self_in_company.write(
self_in_company.commercial_partner_id._update_fields_values(fields_to_sync)
)
def _commercial_sync_to_children(self, fields_to_sync=None):
""" Handle sync of commercial fields to descendants """
commercial_partner = self.commercial_partner_id
sync_vals = commercial_partner._update_fields_values(self._commercial_fields())
if fields_to_sync is None:
fields_to_sync = self._commercial_fields()
sync_vals = commercial_partner._update_fields_values(fields_to_sync)
sync_children = self.child_ids.filtered(lambda c: not c.is_company)
for child in sync_children:
child._commercial_sync_to_children()
child._commercial_sync_to_children(fields_to_sync)
res = sync_children.write(sync_vals)
sync_children._compute_commercial_partner()
return res
def _fields_sync(self, values):
@ -681,13 +704,8 @@ class Partner(models.Model):
return
# 2a. Commercial Fields: sync if commercial entity
if self.commercial_partner_id == self:
commercial_fields = self._commercial_fields()
if any(field in values for field in commercial_fields):
self.sudo()._commercial_sync_to_children()
for child in self.child_ids.filtered(lambda c: not c.is_company):
if child.commercial_partner_id != self.commercial_partner_id:
self.sudo()._commercial_sync_to_children()
break
fields_to_sync = values.keys() & self._commercial_fields()
self.sudo()._commercial_sync_to_children(fields_to_sync)
# 2b. Address fields: sync if address changed
address_fields = self._address_fields()
if any(field in values for field in address_fields):
@ -699,8 +717,12 @@ class Partner(models.Model):
was meant to be company address """
parent = self.parent_id
address_fields = self._address_fields()
if (parent.is_company or not parent.parent_id) and len(parent.child_ids) == 1 and \
any(self[f] for f in address_fields) and not any(parent[f] for f in address_fields):
if (
(parent.is_company or not parent.parent_id)
and any(self[f] for f in address_fields)
and not any(parent[f] for f in address_fields)
and len(parent.child_ids) == 1
):
addr_vals = self._update_fields_values(address_fields)
parent.update_address(addr_vals)
@ -730,7 +752,7 @@ class Partner(models.Model):
self.invalidate_recordset(['user_ids'])
users = self.env['res.users'].sudo().search([('partner_id', 'in', self.ids)])
if users:
if self.env['res.users'].sudo(False).check_access_rights('write', raise_exception=False):
if self.env['res.users'].sudo(False).has_access('write'):
error_msg = _('You cannot archive contacts linked to an active user.\n'
'You first need to archive their associated user.\n\n'
'Linked active users : %(names)s', names=", ".join([u.display_name for u in users]))
@ -762,13 +784,13 @@ class Partner(models.Model):
partner.child_ids.write({'company_id': company_id})
result = True
# To write in SUPERUSER on field is_company and avoid access rights problems.
if 'is_company' in vals and self.user_has_groups('base.group_partner_manager') and not self.env.su:
if 'is_company' in vals and not self.env.su and self.env.user.has_group('base.group_partner_manager'):
result = super(Partner, self.sudo()).write({'is_company': vals.get('is_company')})
del vals['is_company']
result = result and super(Partner, self).write(vals)
result = result and super().write(vals)
for partner in self:
if any(u._is_internal() for u in partner.user_ids if u != self.env.user):
self.env['res.users'].check_access_rights('write')
self.env['res.users'].check_access('write')
partner._fields_sync(vals)
return result
@ -781,7 +803,7 @@ class Partner(models.Model):
vals['website'] = self._clean_website(vals['website'])
if vals.get('parent_id'):
vals['company_name'] = False
partners = super(Partner, self).create(vals_list)
partners = super().create(vals_list)
if self.env.context.get('_partners_skip_fields_sync'):
return partners
@ -799,7 +821,7 @@ class Partner(models.Model):
users = self.env['res.users'].sudo().search([('partner_id', 'in', self.ids)])
if not users:
return # no linked user, operation is allowed
if self.env['res.users'].sudo(False).check_access_rights('write', raise_exception=False):
if self.env['res.users'].sudo(False).has_access('write'):
error_msg = _('You cannot delete contacts linked to an active user.\n'
'You should rather archive them after archiving their associated user.\n\n'
'Linked active users : %(names)s', names=", ".join([u.display_name for u in users]))
@ -872,18 +894,6 @@ class Partner(models.Model):
'target': 'current',
}
def open_parent(self):
""" Utility method used to add an "Open Parent" button in partner views """
self.ensure_one()
address_form_id = self.env.ref('base.view_partner_address_form').id
return {'type': 'ir.actions.act_window',
'res_model': 'res.partner',
'view_mode': 'form',
'views': [(address_form_id, 'form')],
'res_id': self.parent_id.id,
'target': 'new',
}
@api.depends('complete_name', 'email', 'vat', 'state_id', 'country_id', 'commercial_company_name')
@api.depends_context('show_address', 'partner_show_db_id', 'address_inline', 'show_email', 'show_vat', 'lang')
def _compute_display_name(self):
@ -945,9 +955,10 @@ class Partner(models.Model):
if not parsed_email_normalized and assert_valid_email:
raise ValueError(_('A valid email is required for find_or_create to work properly.'))
partners = self.search([('email', '=ilike', parsed_email_normalized)], limit=1)
if partners:
return partners
if parsed_email_normalized:
partners = self.search([('email', '=ilike', parsed_email_normalized)], limit=1)
if partners:
return partners
create_values = {self._rec_name: parsed_name or parsed_email_normalized}
if parsed_email_normalized: # keep default_email in context
@ -967,10 +978,6 @@ class Partner(models.Model):
return False
return base64.b64encode(res.content)
def _email_send(self, email_from, subject, body, on_error=None):
warnings.warn("Partner._email_send has not done anything but raise errors since 15.0", stacklevel=2, category=DeprecationWarning)
return True
def address_get(self, adr_pref=None):
""" Find contacts/addresses of the right type(s) by doing a depth-first-search
through descendants within company boundaries (stop at entities flagged ``is_company``)
@ -1018,12 +1025,6 @@ class Partner(models.Model):
)
return super().view_header_get(view_id, view_type)
@api.model
@api.returns('self')
def main_partner(self):
''' Return the main partner '''
return self.env.ref('base.main_partner')
@api.model
def _get_default_address_format(self):
return "%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s"

View file

@ -37,7 +37,7 @@ class ResUsersDeletion(models.Model):
user_deletion.user_id_int = user_deletion.user_id.id
@api.model
def _gc_portal_users(self, batch_size=10):
def _gc_portal_users(self, batch_size=50):
"""Remove the portal users that asked to deactivate their account.
(see <res.users>::_deactivate_portal_user)
@ -53,6 +53,8 @@ class ResUsersDeletion(models.Model):
done_requests.state = "done"
todo_requests = delete_requests - done_requests
cron_done, cron_remaining = len(done_requests), len(todo_requests)
self.env['ir.cron']._notify_progress(done=cron_done, remaining=cron_remaining)
batch_requests = todo_requests[:batch_size]
auto_commit = not getattr(threading.current_thread(), "testing", False)
@ -74,7 +76,9 @@ class ResUsersDeletion(models.Model):
user.id, user_name, requester_name, e)
delete_request.state = "fail"
# make sure we never rollback the work we've done, this can take a long time
cron_done, cron_remaining = cron_done + 1, cron_remaining - 1
if auto_commit:
self.env['ir.cron']._notify_progress(done=cron_done, remaining=cron_remaining)
self.env.cr.commit()
if delete_request.state == "fail":
continue
@ -92,5 +96,4 @@ class ResUsersDeletion(models.Model):
# make sure we never rollback the work we've done, this can take a long time
if auto_commit:
self.env.cr.commit()
if len(todo_requests) > batch_size:
self.env.ref("base.ir_cron_res_users_deletion")._trigger()
self.env['ir.cron']._notify_progress(done=cron_done, remaining=cron_remaining)

View file

@ -15,6 +15,11 @@ class ResUsersSettings(models.Model):
('unique_user_id', 'UNIQUE(user_id)', 'One user should only have one user settings.')
]
@api.model
def _get_fields_blacklist(self):
""" Get list of fields that won't be formatted. """
return []
@api.model
def _find_or_create_for_user(self, user):
settings = user.sudo().res_users_settings_ids
@ -24,8 +29,11 @@ class ResUsersSettings(models.Model):
def _res_users_settings_format(self, fields_to_format=None):
self.ensure_one()
if not fields_to_format:
fields_to_format = [name for name, field in self._fields.items() if name == 'id' or not field.automatic]
fields_blacklist = self._get_fields_blacklist()
if fields_to_format:
fields_to_format = [field for field in fields_to_format if field not in fields_blacklist]
else:
fields_to_format = [name for name, field in self._fields.items() if name == 'id' or (not field.automatic and name not in fields_blacklist)]
res = self._format_settings(fields_to_format)
return res