19.0 vanilla

This commit is contained in:
Ernad Husremovic 2026-03-09 09:30:27 +01:00
parent d1963a3c3a
commit 2d3ee4855a
7430 changed files with 2687981 additions and 2965473 deletions

View file

@ -512,7 +512,7 @@ class IrActionsServerHistory(models.Model):
self.display_name = False
for history in self.filtered('create_date'):
locale = get_lang(self.env).code
tzinfo = pytz.timezone(self.env.user.tz)
tzinfo = self.env.tz
datetime = history.create_date.replace(microsecond=0)
datetime = pytz.utc.localize(datetime, is_dst=False)
datetime = datetime.astimezone(tzinfo) if tzinfo else datetime

View file

@ -22,7 +22,7 @@ from lxml import etree
from markupsafe import Markup
from odoo import api, fields, models, modules, tools, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.exceptions import UserError, AccessError, RedirectWarning, ValidationError
from odoo.fields import Domain
from odoo.service import security
from odoo.http import request, root
@ -80,6 +80,7 @@ class WkhtmlInfo(typing.NamedTuple):
dpi_zoom_ratio: bool
bin: str
version: str
is_patched_qt: bool
wkhtmltoimage_bin: str
wkhtmltoimage_version: tuple[str, ...] | None
@ -89,6 +90,7 @@ def _wkhtml() -> WkhtmlInfo:
state = 'install'
bin_path = 'wkhtmltopdf'
version = ''
is_patched_qt = False
dpi_zoom_ratio = False
try:
bin_path = find_in_path('wkhtmltopdf')
@ -101,6 +103,8 @@ def _wkhtml() -> WkhtmlInfo:
_logger.info('Will use the Wkhtmltopdf binary at %s', bin_path)
out, _err = process.communicate()
version = out.decode('ascii')
if '(with patched qt)' in version:
is_patched_qt = True
match = re.search(r'([0-9.]+)', version)
if match:
version = match.group(0)
@ -144,6 +148,7 @@ def _wkhtml() -> WkhtmlInfo:
dpi_zoom_ratio=dpi_zoom_ratio,
bin=bin_path,
version=version,
is_patched_qt=is_patched_qt,
wkhtmltoimage_bin=image_bin_path,
wkhtmltoimage_version=wkhtmltoimage_version,
)
@ -327,7 +332,7 @@ class IrActionsReport(models.Model):
command_args.extend(['--page-width', str(paperformat_id.page_width) + 'mm'])
command_args.extend(['--page-height', str(paperformat_id.page_height) + 'mm'])
if specific_paperformat_args and specific_paperformat_args.get('data-report-margin-top'):
if specific_paperformat_args and 'data-report-margin-top' in specific_paperformat_args:
command_args.extend(['--margin-top', str(specific_paperformat_args['data-report-margin-top'])])
else:
command_args.extend(['--margin-top', str(paperformat_id.margin_top)])
@ -346,14 +351,14 @@ class IrActionsReport(models.Model):
if _wkhtml().dpi_zoom_ratio:
command_args.extend(['--zoom', str(96.0 / dpi)])
if specific_paperformat_args and specific_paperformat_args.get('data-report-header-spacing'):
if specific_paperformat_args and 'data-report-header-spacing' in specific_paperformat_args:
command_args.extend(['--header-spacing', str(specific_paperformat_args['data-report-header-spacing'])])
elif paperformat_id.header_spacing:
command_args.extend(['--header-spacing', str(paperformat_id.header_spacing)])
command_args.extend(['--margin-left', str(paperformat_id.margin_left)])
if specific_paperformat_args and specific_paperformat_args.get('data-report-margin-bottom'):
if specific_paperformat_args and 'data-report-margin-bottom' in specific_paperformat_args:
command_args.extend(['--margin-bottom', str(specific_paperformat_args['data-report-margin-bottom'])])
else:
command_args.extend(['--margin-bottom', str(paperformat_id.margin_bottom)])
@ -457,15 +462,15 @@ class IrActionsReport(models.Model):
return bodies, res_ids, header, footer, specific_paperformat_args
def _run_wkhtmltoimage(self, bodies, width, height, image_format="jpg"):
def _run_wkhtmltoimage(self, bodies, width, height, image_format="jpg") -> list[bytes | None]:
"""
:bodies str: valid html documents as strings
:param width int: width in pixels
:param height int: height in pixels
:param image_format union['jpg', 'png']: format of the image
:return list[bytes|None]:
:param str bodies: valid html documents as strings
:param int width: width in pixels
:param int height: height in pixels
:param image_format: format of the image
:type image_format: typing.Literal['jpg', 'png']
"""
if (modules.module.current_test or tools.config['test_enable']) and not self.env.context.get('force_image_rendering'):
if modules.module.current_test:
return [None] * len(bodies)
wkhtmltoimage_version = _wkhtml().wkhtmltoimage_version
if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'):
@ -617,8 +622,7 @@ class IrActionsReport(models.Model):
pass
case 1:
if body_idx:
wk_version = _wkhtml().version
if '(with patched qt)' not in wk_version:
if not _wkhtml().is_patched_qt:
if modules.module.current_test:
raise unittest.SkipTest("Unable to convert multiple documents via wkhtmltopdf using unpatched QT")
raise UserError(_("Tried to convert multiple documents in wkhtmltopdf using unpatched QT"))
@ -798,7 +802,10 @@ class IrActionsReport(models.Model):
handle_error(error=e, error_stream=stream)
result_stream = io.BytesIO()
streams.append(result_stream)
writer.write(result_stream)
try:
writer.write(result_stream)
except PdfReadError:
raise UserError(_("Odoo is unable to merge the generated PDFs."))
return result_stream
def _render_qweb_pdf_prepare_streams(self, report_ref, data, res_ids=None):
@ -1198,28 +1205,10 @@ class IrActionsReport(models.Model):
@api.model
def _prepare_local_attachments(self, attachments):
attachments_with_data = self.env['ir.attachment']
for attachment in attachments:
if not attachment._is_remote_source():
attachments_with_data |= attachment
elif (stream := attachment._to_http_stream()) and stream.url:
# call `_to_http_stream()` in case the attachment is an url or cloud storage attachment
if attachment._is_remote_source():
try:
response = requests.get(stream.url, timeout=10)
response.raise_for_status()
attachment_data = response.content
if not attachment_data:
_logger.warning("Attachment %s at with URL %s retrieved successfully, but no content was found.", attachment.id, attachment.url)
continue
attachments_with_data |= self.env['ir.attachment'].new({
'db_datas': attachment_data,
'name': attachment.name,
'mimetype': attachment.mimetype,
'res_model': attachment.res_model,
'res_id': attachment.res_id
})
except requests.exceptions.RequestException as e:
_logger.error("Request for attachment %s with URL %s failed: %s", attachment.id, attachment.url, e)
else:
_logger.error("Unexpected edge case: Is not being considered as a local or remote attachment, attachment ID:%s will be skipped.", attachment.id)
return attachments_with_data
attachment._migrate_remote_to_local()
except (ValidationError, requests.exceptions.RequestException) as e:
_logger.error("Failed to migrate attachment %s to local: %s", attachment.id, e)
return attachments.filtered(lambda a: not a._is_remote_source())

View file

@ -166,11 +166,13 @@ class IrAsset(models.Model):
:param bundle: name of the bundle from which to fetch the file paths
:param addons: list of addon names as strings
:param css: boolean: whether or not to include style files
:param js: boolean: whether or not to include script files
:param xml: boolean: whether or not to include template files
:param asset_paths: the AssetPath object to fill
:param seen: a list of bundles already checked to avoid circularity
:param assets_params: Keyword arguments:
* css: bool: whether or not to include style files
* js: bool: whether or not to include script files
* xml: bool: whether or not to include template files
"""
if bundle in seen:
raise Exception("Circular assets bundle declaration: %s" % " > ".join(seen + [bundle]))
@ -343,7 +345,8 @@ class IrAsset(models.Model):
if addon_manifest:
if addon not in installed:
# Assert that the path is in the installed addons
raise Exception(f"Unallowed to fetch files from addon {addon} for file {path_def}")
raise Exception(f"""Unallowed to fetch files from addon {addon} for file {path_def}. """
f"""Addon {addon} is not installed""")
addons_path = addon_manifest.addons_path
full_path = os.path.normpath(os.path.join(addons_path, *path_parts))
# forbid escape from the current addon

View file

@ -7,22 +7,35 @@ import hashlib
import logging
import mimetypes
import os
import psycopg2
import re
import uuid
import warnings
import werkzeug
from collections import defaultdict
from collections.abc import Collection
from odoo import api, fields, models, _
from odoo.exceptions import AccessError, MissingError, ValidationError, UserError
import psycopg2
import werkzeug
from odoo import _, api, fields, models
from odoo.exceptions import AccessError, MissingError, UserError, ValidationError
from odoo.fields import Domain
from odoo.http import Stream, root, request
from odoo.tools import config, consteq, human_size, image, split_every, str2bool, OrderedSet
from odoo.http import Stream, request, root
from odoo.tools import (
OrderedSet,
config,
consteq,
human_size,
image,
split_every,
str2bool,
)
from odoo.tools.constants import PREFETCH_MAX
from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes
from odoo.tools.mimetypes import (
MIMETYPE_HEAD_SIZE,
_olecf_mimetypes,
fix_filename_extension,
guess_mimetype,
)
from odoo.tools.misc import limited_field_access_token
_logger = logging.getLogger(__name__)
@ -257,6 +270,14 @@ class IrAttachment(models.Model):
else:
attach.raw = attach.db_datas
def _get_pdf_raw(self):
self.ensure_one()
if self.type != 'binary':
return False
if self.mimetype != 'application/pdf':
return False
return self.raw
def _inverse_raw(self):
self._set_attachment_data(lambda a: a.raw or b'')
@ -378,7 +399,10 @@ class IrAttachment(models.Model):
nw, nh = map(int, max_resolution.split('x'))
if w > nw or h > nh:
img = img.resize(nw, nh)
quality = int(ICP('base.image_autoresize_quality', 80))
if _subtype == 'jpeg': # Do not affect PNGs color palette
quality = int(ICP('base.image_autoresize_quality', 80))
else:
quality = 0
image_data = img.image_quality(quality=quality)
if is_raw:
values['raw'] = image_data
@ -407,19 +431,15 @@ class IrAttachment(models.Model):
return values
@api.model
def _index(self, bin_data, file_type, checksum=None):
def _index(self, bin_data: bytes, file_type: str, checksum=None) -> str | None:
""" compute the index content of the given binary data.
This is a python implementation of the unix command 'strings'.
:param bin_data : datas in binary form
:return index_content : string containing all the printable character of the binary data
This is a python implementation of the unix command 'strings'.
"""
index_content = False
if file_type:
index_content = file_type.split('/')[0]
if index_content == 'text': # compute index_content only for text type
words = re.findall(b"[\x20-\x7E]{4,}", bin_data)
index_content = b"\n".join(words).decode('ascii')
return index_content
# compute index_content only for text type
if file_type and file_type.startswith('text/'):
words = re.findall(rb"[\x20-\x7E]{4,}", bin_data)
return b"\n".join(words).decode('ascii')
return None
@api.model
def get_serving_groups(self):
@ -595,6 +615,7 @@ class IrAttachment(models.Model):
if (
not self.env.context.get('skip_res_field_check')
and not any(d.field_expr in ('id', 'res_field') for d in domain.iter_conditions())
and not bypass_access
):
disable_binary_fields_attachments = True
domain &= Domain('res_field', '=', False)
@ -734,17 +755,20 @@ class IrAttachment(models.Model):
checksum_raw_map = {}
for values in vals_list:
values = self._check_contents(values)
raw, datas = values.pop('raw', None), values.pop('datas', None)
if raw or datas:
# needs to be popped in all cases to bypass `_inverse_datas`
datas = values.pop('datas', None)
if raw := values.get('raw'):
if isinstance(raw, str):
# b64decode handles str input but raw needs explicit encoding
raw = raw.encode()
elif not raw:
raw = base64.b64decode(datas or b'')
values['raw'] = raw.encode()
elif datas:
values['raw'] = base64.b64decode(datas)
else:
values['raw'] = b''
values = self._check_contents(values)
if raw := values.pop('raw'):
values.update(self._get_datas_related_values(raw, values['mimetype']))
if raw:
checksum_raw_map[values['checksum']] = raw
checksum_raw_map[values['checksum']] = raw
# 'check()' only uses res_model and res_id from values, and make an exists.
# We can group the values by model, res_id to make only one query when
@ -858,7 +882,7 @@ class IrAttachment(models.Model):
mimetype = file.content_type
filename = file.filename
elif mimetype == 'GUESS':
head = file.read(1024)
head = file.read(MIMETYPE_HEAD_SIZE)
file.seek(-len(head), 1) # rewind
mimetype = guess_mimetype(head)
filename = fix_filename_extension(file.filename, mimetype)
@ -943,3 +967,9 @@ class IrAttachment(models.Model):
self.check_access('read')
return True
return super()._can_return_content(field_name, access_token)
def _migrate_remote_to_local(self):
if self.type == 'binary':
return
if self.type == 'url':
raise ValidationError(_("URL attachment (%s) shouldn't be migrated to local.", self.id))

View file

@ -1,17 +1,17 @@
import logging
import werkzeug.http
from datetime import datetime
from mimetypes import guess_extension
import werkzeug.http
from odoo import models
from odoo.exceptions import AccessError, MissingError, UserError
from odoo.exceptions import MissingError, UserError
from odoo.http import Stream, request
from odoo.tools import file_open, replace_exceptions
from odoo.tools.image import image_process, image_guess_size_from_field_name
from odoo.tools.mimetypes import guess_mimetype, get_extension
from odoo.tools.image import image_guess_size_from_field_name, image_process
from odoo.tools.mimetypes import MIMETYPE_HEAD_SIZE, get_extension, guess_mimetype
from odoo.tools.misc import verify_limited_field_access_token
DEFAULT_PLACEHOLDER_PATH = 'web/static/img/placeholder.png'
_logger = logging.getLogger(__name__)
@ -128,10 +128,10 @@ class IrBinary(models.AbstractModel):
stream.mimetype = mimetype
elif not stream.mimetype:
if stream.type == 'data':
head = stream.data[:1024]
head = stream.data[:MIMETYPE_HEAD_SIZE]
else:
with open(stream.path, 'rb') as file:
head = file.read(1024)
head = file.read(MIMETYPE_HEAD_SIZE)
stream.mimetype = guess_mimetype(head, default=default_mimetype)
if filename:

View file

@ -1,17 +1,21 @@
from __future__ import annotations
import contextvars
import copy
import logging
import os
import threading
import time
import os
import psycopg2
import psycopg2.errors
import typing
from datetime import datetime, timedelta, timezone
import psycopg2
import psycopg2.errors
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, sql_db
from odoo.exceptions import LockError, UserError
from odoo.http import serialize_exception
from odoo.modules import Manifest
from odoo.modules.registry import Registry
from odoo.tools import SQL
@ -19,6 +23,7 @@ from odoo.tools.constants import GC_UNLINK_LIMIT
if typing.TYPE_CHECKING:
from collections.abc import Iterable
from odoo.sql_db import BaseCursor
_logger = logging.getLogger(__name__)
@ -59,6 +64,30 @@ class CompletionStatus: # inherit from enum.StrEnum in 3.11
FAILED = 'failed'
class ListLogHandler(logging.Handler):
def __init__(self, logger, level=logging.NOTSET):
super().__init__(level)
self.logger = logger
self.list_log_handler = contextvars.ContextVar('list_log_handler', default=None)
def emit(self, record):
logs = self.list_log_handler.get(None)
if logs is None:
return
record = copy.copy(record)
logs.append(record)
def __enter__(self):
# set a list in the current context
logs = []
self.list_log_handler.set(logs)
self.logger.addHandler(self)
return logs
def __exit__(self, *exc):
self.logger.removeHandler(self)
class IrCron(models.Model):
""" Model describing cron jobs (also called actions or tasks).
"""
@ -135,7 +164,23 @@ class IrCron(models.Model):
job = self._acquire_one_job(cron_cr, self.id, include_not_ready=True)
if not job:
raise UserError(self.env._("Job '%s' already executing", self.name))
self._process_job(cron_cr, job)
with ListLogHandler(_logger, logging.ERROR) as capture:
self._process_job(cron_cr, job)
if log_record := next((lr for lr in capture if getattr(lr, 'exc_info', None)), None):
_exc_type, exception, _traceback = log_record.exc_info
e = RuntimeError()
e.__cause__ = exception
error = {
'code': 0, # we don't care of this code
'message': "Odoo Server Error",
'data': serialize_exception(e),
}
return {
'type': 'ir.actions.client',
'tag': 'display_exception',
'params': error,
}
return True
@staticmethod
@ -187,7 +232,7 @@ class IrCron(models.Model):
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = Registry(db_name)
registry = Registry(db_name).check_signaling()
registry[IrCron._name]._process_job(cron_cr, job)
cron_cr.commit()
_logger.debug("job %s updated and released", job_id)
@ -447,39 +492,65 @@ class IrCron(models.Model):
# stop after MIN_RUNS_PER_JOB runs and MIN_TIME_PER_JOB seconds, or
# upon full completion or failure
while (
while status is None and (
loop_count < MIN_RUNS_PER_JOB
or time.monotonic() < env.context['cron_end_time']
):
cron, progress = cron._add_progress(timed_out_counter=timed_out_counter)
job_cr.commit()
success = False
try:
# signaling check and commit is done inside `_callback`
cron._callback(job['cron_name'], job['ir_actions_server_id'])
success = True
except Exception: # noqa: BLE001
_logger.exception('Job %r (%s) server action #%s failed',
job['cron_name'], job['id'], job['ir_actions_server_id'])
if progress.done and progress.remaining:
# we do not consider it a failure if some progress has
# been committed
status = CompletionStatus.PARTIALLY_DONE
else:
status = CompletionStatus.FAILED
else:
if not progress.remaining:
# assume the server action doesn't use the progress API
# and that there is nothing left to process
status = CompletionStatus.FULLY_DONE
else:
status = CompletionStatus.PARTIALLY_DONE
if not progress.done:
break
if status == CompletionStatus.FULLY_DONE and progress.deactivate:
job['active'] = False
finally:
done, remaining = progress.done, progress.remaining
match (success, done, remaining):
case (False, d, r) if d and r:
# The cron action failed but was nonetheless able
# to commit some progress.
# Hopefully this failure is temporary.
pass
case (False, _, _):
# The cron action failed, and was unable to commit
# any progress this time. Consider it failed even
# if it progressed in a previous loop iteration.
status = CompletionStatus.FAILED
case (True, _, 0):
# The cron action completed. Either it doesn't use
# the progress API, either it reported no remaining
# stuff to process.
status = CompletionStatus.FULLY_DONE
if progress.deactivate:
job['active'] = False
case (True, 0, _) if loop_count == 0:
# The cron action was able to determine there are
# remaining records to process, but couldn't
# process any of them.
# Hopefully this condition is temporary.
status = CompletionStatus.PARTIALLY_DONE
_logger.warning("Job %r (%s) processed no record",
job['cron_name'], job['id'])
case (True, 0, _):
# The cron action was able to determine there are
# remaining records to process, did process some
# records in a previous loop iteration, but
# processed none this time.
status = CompletionStatus.PARTIALLY_DONE
case (True, _, _):
# The cron action was able to process some but not
# all records. Loop.
pass
loop_count += 1
progress.timed_out_counter = 0
timed_out_counter = 0
@ -488,9 +559,7 @@ class IrCron(models.Model):
_logger.debug('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], done, remaining)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
break
status = status or CompletionStatus.PARTIALLY_DONE
_logger.info(
'Job %r (%s) %s (#loop %s; done %s; remaining %s; duration %.2fs)',
job['cron_name'], job['id'], status,

View file

@ -26,13 +26,20 @@ class IrDefault(models.Model):
condition = fields.Char('Condition', help="If set, applies the default upon condition.")
json_value = fields.Char('Default Value (JSON format)', required=True)
@api.constrains('json_value')
@api.constrains('json_value', 'field_id')
def _check_json_format(self):
for record in self:
model_name = record.sudo().field_id.model_id.model
model = self.env[model_name]
field = model._fields[record.field_id.name]
try:
json.loads(record.json_value)
value = json.loads(record.json_value)
field.convert_to_cache(value, model)
except json.JSONDecodeError:
raise ValidationError(self.env._('Invalid JSON format in Default Value field.'))
except Exception: # noqa: BLE001
raise ValidationError(self.env._("Invalid value in Default Value field. Expected type '%(field_type)s' for '%(model_name)s.%(field_name)s'.",
field_type=record.field_id.ttype, model_name=model_name, field_name=record.field_id.name))
@api.model_create_multi
def create(self, vals_list):
@ -102,7 +109,7 @@ class IrDefault(models.Model):
('user_id', '=', user_id),
('company_id', '=', company_id),
('condition', '=', condition),
])
], limit=1)
if default:
# Avoid clearing the cache if nothing changes
if default.json_value != json_value:

View file

@ -158,12 +158,10 @@ class IrHttp(models.AbstractModel):
uni = unicodedata.normalize('NFKD', value)
slugified_segments = []
for slug in re.split('-|_| ', uni):
slug = re.sub(r'([^\w-])+', '', slug)
slug = re.sub(r'--+', '-', slug)
slug = slug.strip('-')
slug = re.sub(r'([^\w])+', '', slug)
if slug:
slugified_segments.append(slug.lower())
slugified_str = '-'.join(slugified_segments)
slugified_str = unicodedata.normalize('NFC', '-'.join(slugified_segments))
return slugified_str[:max_length]
@classmethod

View file

@ -35,7 +35,8 @@ class IrLogging(models.Model):
def init(self):
super(IrLogging, self).init()
self.env.cr.execute("select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'")
self.env.cr.execute("select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'"
" and table_schema = current_schema")
if self.env.cr.rowcount:
# DROP CONSTRAINT unconditionally takes an ACCESS EXCLUSIVE lock
# on the table, even "IF EXISTS" is set and not matching; disabling

View file

@ -1,27 +1,52 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import datetime
import email
import email.policy
import functools
import idna
import logging
import re
import smtplib
import ssl
from email.message import EmailMessage
from email.parser import BytesParser
from email.utils import make_msgid
from socket import gaierror, timeout
import idna
import OpenSSL
from OpenSSL import crypto as SSLCrypto
from OpenSSL.crypto import Error as SSLCryptoError, FILETYPE_PEM
from OpenSSL.SSL import Error as SSLError, VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT
from OpenSSL.crypto import FILETYPE_PEM
from OpenSSL.crypto import Error as SSLCryptoError
from OpenSSL.SSL import VERIFY_FAIL_IF_NO_PEER_CERT, VERIFY_PEER
from OpenSSL.SSL import Error as SSLError
from urllib3.contrib.pyopenssl import PyOpenSSLContext, get_subj_alt_name
from odoo import api, fields, models, tools, _, modules
from odoo import _, api, fields, models, modules, tools
from odoo.exceptions import UserError
from odoo.tools import formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize, human_size
from odoo.tools import (
email_domain_extract,
email_domain_normalize,
email_normalize,
encapsulate_email,
formataddr,
human_size,
parse_version,
)
if parse_version(OpenSSL.__version__) >= parse_version('24.3.0'):
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.x509 import load_pem_x509_certificate
else:
from OpenSSL import crypto as SSLCrypto
from OpenSSL.crypto import FILETYPE_PEM
from OpenSSL.crypto import Error as SSLCryptoError
def load_pem_private_key(pem_key, password):
return SSLCrypto.load_privatekey(FILETYPE_PEM, pem_key)
def load_pem_x509_certificate(pem_cert):
return SSLCrypto.load_certificate(FILETYPE_PEM, pem_cert)
try:
# urllib3 1.26 (ubuntu jammy and up, debian bullseye and up)
@ -423,12 +448,11 @@ class IrMail_Server(models.Model):
)
else: # ssl, starttls
ssl_context.verify_mode = ssl.CERT_NONE
smtp_ssl_certificate = base64.b64decode(mail_server.smtp_ssl_certificate)
certificate = SSLCrypto.load_certificate(FILETYPE_PEM, smtp_ssl_certificate)
smtp_ssl_private_key = base64.b64decode(mail_server.smtp_ssl_private_key)
private_key = SSLCrypto.load_privatekey(FILETYPE_PEM, smtp_ssl_private_key)
ssl_context._ctx.use_certificate(certificate)
ssl_context._ctx.use_privatekey(private_key)
ssl_context._ctx.use_certificate(load_pem_x509_certificate(
base64.b64decode(mail_server.smtp_ssl_certificate)))
ssl_context._ctx.use_privatekey(load_pem_private_key(
base64.b64decode(mail_server.smtp_ssl_private_key),
password=None))
# Check that the private key match the certificate
ssl_context._ctx.check_privatekey()
except SSLCryptoError as e:
@ -607,8 +631,7 @@ class IrMail_Server(models.Model):
for (fname, fcontent, mime) in attachments:
maintype, subtype = mime.split('/') if mime and '/' in mime else ('application', 'octet-stream')
if maintype == 'message' and subtype == 'rfc822':
# Use binary encoding for "message/rfc822" attachments (see RFC 2046 Section 5.2.1)
msg.add_attachment(fcontent, maintype, subtype, filename=fname, cte='binary')
msg.add_attachment(BytesParser().parsebytes(fcontent), filename=fname)
else:
msg.add_attachment(fcontent, maintype, subtype, filename=fname)
return msg

View file

@ -365,6 +365,11 @@ class IrModel(models.Model):
if crons:
crons.unlink()
# delete related ir_model_data
model_data = self.env['ir.model.data'].search([('model', 'in', self.mapped('model'))])
if model_data:
model_data.unlink()
self._drop_table()
res = super().unlink()
@ -625,7 +630,12 @@ class IrModelFields(models.Model):
@api.constrains('domain')
def _check_domain(self):
for field in self:
safe_eval(field.domain or '[]')
try:
safe_eval(field.domain or '[]')
except ValueError as e:
raise ValidationError(
_("An error occurred while evaluating the domain:\n%(error)s", error=e)
) from e
@api.constrains('name')
def _check_name(self):
@ -1015,11 +1025,16 @@ class IrModelFields(models.Model):
if relation and not IrModel._get_id(relation):
raise UserError(_("Model %s does not exist!", vals['relation']))
if vals.get('ttype') == 'one2many' and not self.search_count([
('ttype', '=', 'many2one'),
('model', '=', vals['relation']),
('name', '=', vals['relation_field']),
]):
if (
vals.get('ttype') == 'one2many' and
vals.get("store", True) and
not vals.get("related") and
not self.search_count([
('ttype', '=', 'many2one'),
('model', '=', vals['relation']),
('name', '=', vals['relation_field']),
])
):
raise UserError(_("Many2one %(field)s on model %(model)s does not exist!", field=vals['relation_field'], model=vals['relation']))
if any(model in self.pool for model in models):
@ -1084,6 +1099,11 @@ class IrModelFields(models.Model):
_logger.warning("Deprecated since Odoo 19, ir.model.fields.translate becomes Selection, the value should be a string")
vals['translate'] = 'html_translate' if vals.get('ttype') == 'html' else 'standard'
if column_rename and self.state == 'manual':
# renaming a studio field, remove inherits fields
# we need to set the uninstall flag to allow removing them
(self._prepare_update() - self).with_context(**{MODULE_UNINSTALL_FLAG: True}).unlink()
res = super(IrModelFields, self).write(vals)
self.env.flush_all()
@ -1752,6 +1772,10 @@ class IrModelFieldsSelection(models.Model):
if not field or not field.store or not Model._auto:
continue
# Field changed its type, skip it.
if field.type not in ('selection', 'reference'):
continue
ondelete = (field.ondelete or {}).get(selection.value)
# special case for custom fields
if ondelete is None and field.manual and not field.required:
@ -1760,33 +1784,53 @@ class IrModelFieldsSelection(models.Model):
if ondelete is None:
# nothing to do, the selection does not come from a field extension
continue
elif callable(ondelete):
ondelete(selection._get_records())
elif ondelete == 'set null':
safe_write(selection._get_records(), field.name, False)
elif ondelete == 'set default':
value = field.convert_to_write(field.default(Model), Model)
safe_write(selection._get_records(), field.name, value)
elif ondelete.startswith('set '):
safe_write(selection._get_records(), field.name, ondelete[4:])
elif ondelete == 'cascade':
selection._get_records().unlink()
else:
# this shouldn't happen... simply a sanity check
raise ValueError(_(
'The ondelete policy "%(policy)s" is not valid for field "%(field)s"',
policy=ondelete, field=selection,
))
companies = self.env.companies if self.field_id.company_dependent else [self.env.company]
for company in companies:
# make a company-specific env for the Model and selection
Model = Model.with_company(company.id)
selection = selection.with_company(company.id)
if callable(ondelete):
ondelete(selection._get_records())
elif ondelete == 'set null':
safe_write(selection._get_records(), field.name, False)
elif ondelete == 'set default':
value = field.convert_to_write(field.default(Model), Model)
safe_write(selection._get_records(), field.name, value)
elif ondelete.startswith('set '):
safe_write(selection._get_records(), field.name, ondelete[4:])
elif ondelete == 'cascade':
selection._get_records().unlink()
else:
# this shouldn't happen... simply a sanity check
raise ValueError(_(
'The ondelete policy "%(policy)s" is not valid for field "%(field)s"',
policy=ondelete, field=selection,
))
def _get_records(self):
""" Return the records having 'self' as a value. """
self.ensure_one()
Model = self.env[self.field_id.model]
Model.flush_model([self.field_id.name])
query = 'SELECT id FROM "{table}" WHERE "{field}"=%s'.format(
table=Model._table, field=self.field_id.name,
)
self.env.cr.execute(query, [self.value])
if self.field_id.company_dependent:
# company-dependent fields are stored as jsonb (e.g; {company_id: value})
query = SQL(
"SELECT id FROM %s WHERE %s ->> %s = %s",
SQL.identifier(Model._table),
SQL.identifier(self.field_id.name),
str(self.env.company.id),
self.value,
)
else:
# normal selection fields are stored as general datatype
query = SQL(
"SELECT id FROM %s WHERE %s = %s",
SQL.identifier(Model._table),
SQL.identifier(self.field_id.name),
self.value,
)
self.env.cr.execute(query)
return Model.browse(r[0] for r in self.env.cr.fetchall())
@ -1848,6 +1892,7 @@ class IrModelConstraint(models.Model):
JOIN pg_class cl
ON (cs.conrelid = cl.oid)
WHERE cs.contype IN %s AND cs.conname = %s AND cl.relname = %s
AND cl.relnamespace = current_schema::regnamespace
""", ('c', 'u', 'x') if typ == 'u' else (typ,), hname, table
)):
self.env.execute_query(SQL(
@ -2511,7 +2556,12 @@ class IrModelData(models.Model):
# remove non-model records first, grouped by batches of the same model
for model, items in itertools.groupby(unique(records_items), itemgetter(0)):
delete(self.env[model].browse(item[1] for item in items))
ids = [item[1] for item in items]
# we cannot guarantee that the ir.model.data points to an existing model
if model in self.env:
delete(self.env[model].browse(ids))
else:
_logger.info("Orphan ir.model.data records %s refer to unavailable model '%s'", ids, model)
# Remove copied views. This must happen after removing all records from
# the modules to remove, otherwise ondelete='restrict' may prevent the

View file

@ -13,6 +13,7 @@ from docutils import nodes
from docutils.core import publish_string
from docutils.transforms import Transform, writer_aux
from docutils.writers.html4css1 import Writer
from markupsafe import Markup
import lxml.html
import psycopg2
@ -21,6 +22,7 @@ from odoo import api, fields, models, modules, tools, _
from odoo.addons.base.models.ir_model import MODULE_UNINSTALL_FLAG
from odoo.exceptions import AccessDenied, UserError, ValidationError
from odoo.fields import Domain
from odoo.tools import config
from odoo.tools.parse_version import parse_version
from odoo.tools.misc import topological_sort, get_flag
from odoo.tools.translate import TranslationImporter, get_po_paths, get_datafile_translation_path
@ -122,7 +124,7 @@ class MyFilterMessages(Transform):
nodes_iter = self.document.traverse(nodes.system_message)
for node in nodes_iter:
_logger.warning("docutils' system message present: %s", str(node))
_logger.debug("docutils' system message present: %s", str(node))
node.parent.remove(node)
@ -196,7 +198,14 @@ class IrModuleModule(models.Model):
'xml_declaration': False,
'file_insertion_enabled': False,
}
output = publish_string(source=module.description if not module.application and module.description else '', settings_overrides=overrides, writer=MyWriter())
raw_description = module.description or ''
try:
output = publish_string(source=raw_description, settings_overrides=overrides, writer=MyWriter())
except Exception as e: # noqa: BLE001
_logger.warning("Failed to render module description for %s: %s. Falling back to raw description.", module.name, e)
output = Markup('<pre><code>%s</code></pre>') % raw_description
module.description_html = _apply_description_images(output)
@api.depends('name')
@ -355,7 +364,7 @@ class IrModuleModule(models.Model):
install_package = None
if platform.system() == 'Linux':
distro = platform.freedesktop_os_release()
id_likes = {distro['ID'], *distro.get('ID_LIKE').split()}
id_likes = {distro['ID'], *distro.get('ID_LIKE', '').split()}
if 'debian' in id_likes or 'ubuntu' in id_likes:
if package := manifest['external_dependencies'].get('apt', {}).get(e.dependency):
install_package = f'apt install {package}'
@ -417,7 +426,11 @@ class IrModuleModule(models.Model):
modules._state_update('to install', ['uninstalled'])
# Determine which auto-installable modules must be installed.
modules = self.search(auto_domain).filtered(must_install)
if config.get('skip_auto_install'):
modules = None
else:
modules = self.search(auto_domain).filtered(must_install)
# the modules that are installed/to install/to upgrade
install_mods = self.search([('state', 'in', list(install_states))])
@ -483,12 +496,12 @@ class IrModuleModule(models.Model):
def button_reset_state(self):
# reset the transient state for all modules in case the module operation is stopped in an unexpected way.
self.search([('state', '=', 'to install')]).state = 'uninstalled'
self.search([('state', 'in', ('to update', 'to remove'))]).state = 'installed'
self.search([('state', 'in', ('to upgrade', 'to remove'))]).state = 'installed'
return True
@api.model
def check_module_update(self):
return bool(self.sudo().search_count([('state', 'in', ('to install', 'to update', 'to remove'))], limit=1))
return bool(self.sudo().search_count([('state', 'in', ('to install', 'to upgrade', 'to remove'))], limit=1))
@assert_log_admin_access
def module_uninstall(self):

View file

@ -480,6 +480,10 @@ T_CALL_SLOT = '0'
ETREE_TEMPLATE_REF = count()
# Only allow a javascript scheme if it is followed by [ ][window.]history.back()
MALICIOUS_SCHEMES = re.compile(r'javascript:(?!( ?)((window\.)?)history\.back\(\)$)', re.I).findall
def _id_or_xmlid(ref):
try:
return int(ref)
@ -530,13 +534,14 @@ class QWebError(Exception):
class QWebErrorInfo:
def __init__(self, error: str, ref_name: str | int | None, ref: int | None, path: str | None, element: str | None, source: list[tuple[int | str, str, str]]):
def __init__(self, error: str, ref_name: str | int | None, ref: int | None, path: str | None, element: str | None, source: list[tuple[int | str, str, str]], surrounding: str):
self.error = error
self.template = ref_name
self.ref = ref
self.path = path
self.element = element
self.source = source
self.surrounding = surrounding
def __str__(self):
info = [self.error]
@ -551,6 +556,8 @@ class QWebErrorInfo:
if self.source:
source = '\n '.join(str(v) for v in self.source)
info.append(f'From: {source}')
if self.surrounding:
info.append(f'QWeb generated code:\n{self.surrounding}')
return '\n '.join(info)
@ -846,6 +853,8 @@ class IrQweb(models.AbstractModel):
raise
def _get_error_info(self, error, stack: list[QwebStackFrame], frame: QwebStackFrame) -> QWebErrorInfo:
no_id_ref = 'etree._Element'
path = None
html = None
loaded_codes = self.env.context['__qweb_loaded_codes']
@ -855,7 +864,7 @@ class IrQweb(models.AbstractModel):
options = self.env.context['__qweb_loaded_options'].get(frame.params.view_ref) or {}
ref = options.get('ref') or frame.params.view_ref # The template can have a null reference, for example for a provided etree.
ref_name = options.get('ref_name') or None
code = loaded_codes.get(frame.params.view_ref) or loaded_codes.get(False)
code = loaded_codes.get(frame.params.view_ref) or loaded_codes.get(no_id_ref)
if ref == self.env.context['_qweb_error_path_xml'][0]:
path = self.env.context['_qweb_error_path_xml'][1]
html = self.env.context['_qweb_error_path_xml'][2]
@ -864,23 +873,25 @@ class IrQweb(models.AbstractModel):
options = stack[-2].options or {} # The compilation may have failed before the compilation options were loaded.
ref = options.get('ref')
ref_name = options.get('ref_name')
code = loaded_codes.get(ref) or loaded_codes.get(False)
code = loaded_codes.get(ref) or loaded_codes.get(no_id_ref)
if frame.params.path_xml:
path = frame.params.path_xml[1]
html = frame.params.path_xml[2]
source_file_ref = None if ref == no_id_ref else ref
line_nb = 0
trace = traceback.format_exc()
for error_line in reversed(trace.split('\n')):
if f'File "<{ref}>"' in error_line or (ref is None and 'File "<' in error_line):
if f'File "<{source_file_ref}>"' in error_line or (ref is None and 'File "<' in error_line):
line_function = error_line.split(', line ')[1]
line_nb = int(line_function.split(',')[0])
break
source = [info.params.path_xml for info in stack if info.params.path_xml]
code_lines = (code or '').split('\n')
found = False
for code_line in reversed((code or '').split('\n')[:line_nb]):
for code_line in reversed(code_lines[:line_nb]):
if code_line.startswith('def '):
break
match = re.match(r'\s*# element: (.*) , (.*)', code_line)
@ -900,7 +911,24 @@ class IrQweb(models.AbstractModel):
if path:
source.append((ref, path, html))
return QWebErrorInfo(f'{error.__class__.__name__}: {error}', ref if ref_name is None else ref_name, ref, path, html, source)
surrounding = None
if self.env.context.get('dev_mode') and line_nb:
if html and ' t-if=' in html and ' if ' in '\n'.join(code_lines[line_nb - 2:line_nb - 1]):
line_nb -= 1
previous_lines = '\n'.join(code_lines[max(line_nb - 25, 0):line_nb - 1])
line = code_lines[line_nb - 1]
next_lines = '\n'.join(code_lines[line_nb:line_nb + 5])
indent = re.search(r"^(\s*)", line).group(0)
surrounding = textwrap.indent(
textwrap.dedent(
f"{previous_lines}\n"
f"{indent}########### Line triggering the error ############\n{line}\n"
f"{indent}##################################################\n{next_lines}"
),
' ' * 8
)
return QWebErrorInfo(f'{error.__class__.__name__}: {error}', ref if ref_name is None else ref_name, ref, path, html, source, surrounding)
# assume cache will be invalidated by third party on write to ir.ui.view
def _get_template_cache_keys(self):
@ -1680,7 +1708,7 @@ class IrQweb(models.AbstractModel):
""" Compile a purely static element into a list of string. """
if not el.nsmap:
unqualified_el_tag = el_tag = el.tag
attrib = self._post_processing_att(el.tag, el.attrib)
attrib = self._post_processing_att(el.tag, {**el.attrib, '__is_static_node': True})
else:
# Etree will remove the ns prefixes indirection by inlining the corresponding
# nsmap definition into the tag attribute. Restore the tag and prefix here.
@ -1711,7 +1739,7 @@ class IrQweb(models.AbstractModel):
else:
attrib[name] = value
attrib = self._post_processing_att(el.tag, attrib)
attrib = self._post_processing_att(el.tag, {**attrib, '__is_static_node': True})
# Update the dict of inherited namespaces before continuing the recursion. Note:
# since `compile_context['nsmap']` is a dict (and therefore mutable) and we do **not**
@ -2014,7 +2042,10 @@ class IrQweb(models.AbstractModel):
code.append(indent_code(f"values[{varname!r}] = {self._compile_format(exprf)}", level))
elif 't-valuef.translate' in el.attrib:
exprf = el.attrib.pop('t-valuef.translate')
code.append(indent_code(f"values[{varname!r}] = {self._compile_format(exprf)}", level))
if self.env.context.get('edit_translations'):
code.append(indent_code(f"values[{varname!r}] = Markup({self._compile_format(exprf)})", level))
else:
code.append(indent_code(f"values[{varname!r}] = {self._compile_format(exprf)}", level))
elif varname[0] == '{':
code.append(indent_code(f"values.update({self._compile_expr(varname)})", level))
else:
@ -2549,10 +2580,17 @@ class IrQweb(models.AbstractModel):
# args to values
for key in list(el.attrib):
if key.endswith(('.f', '.translate')):
name = key.removesuffix(".f").removesuffix(".translate")
if key.endswith('.f'):
name = key.removesuffix(".f")
value = el.attrib.pop(key)
code.append(indent_code(f"t_call_values[{name!r}] = {self._compile_format(value)}", level))
elif key.endswith('.translate'):
name = key.removesuffix(".f").removesuffix(".translate")
value = el.attrib.pop(key)
if self.env.context.get('edit_translations'):
code.append(indent_code(f"t_call_values[{name!r}] = Markup({self._compile_format(value)})", level))
else:
code.append(indent_code(f"t_call_values[{name!r}] = {self._compile_format(value)}", level))
elif not key.startswith('t-'):
value = el.attrib.pop(key)
code.append(indent_code(f"t_call_values[{key!r}] = {self._compile_expr(value)}", level))
@ -2668,6 +2706,8 @@ class IrQweb(models.AbstractModel):
@returns dict
"""
if not atts.pop('__is_static_node', False) and (href := atts.get('href')) and MALICIOUS_SCHEMES(str(href)):
atts['href'] = ""
return atts
def _get_field(self, record, field_name, expression, tagName, field_options, values):

View file

@ -3,6 +3,7 @@ import base64
import binascii
from datetime import time
import logging
import math
import re
from io import BytesIO
@ -217,33 +218,39 @@ class IrQwebFieldFloat(models.AbstractModel):
@api.model
def value_to_html(self, value, options):
min_precision = options.get('min_precision')
if 'decimal_precision' in options:
precision = self.env['decimal.precision'].precision_get(options['decimal_precision'])
elif options.get('precision') is None:
int_digits = int(math.log10(abs(value))) + 1 if value != 0 else 1
max_dec_digits = max(15 - int_digits, 0)
# We display maximum 6 decimal digits or the number of significant decimal digits if it's lower
precision = min(6, max_dec_digits)
min_precision = min_precision or 1
else:
precision = options['precision']
if precision is None:
fmt = '%f'
else:
value = float_utils.float_round(value, precision_digits=precision)
fmt = '%.{precision}f'.format(precision=precision)
fmt = f'%.{precision}f'
if min_precision and min_precision < precision:
_, dec_part = float_utils.float_split_str(value, precision)
digits_count = len(dec_part.rstrip('0'))
if digits_count < min_precision:
fmt = f'%.{min_precision}f'
elif digits_count < precision:
fmt = f'%.{digits_count}f'
formatted = self.user_lang().format(fmt, value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
# %f does not strip trailing zeroes. %g does but its precision causes
# it to switch to scientific notation starting at a million *and* to
# strip decimals. So use %f and if no precision was specified manually
# strip trailing 0.
if precision is None:
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
return formatted
value = float_utils.float_round(value, precision_digits=precision)
return self.user_lang().format(fmt, value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
@api.model
def record_to_html(self, record, field_name, options):
field = record._fields[field_name]
if 'precision' not in options and 'decimal_precision' not in options:
_, precision = record._fields[field_name].get_digits(record.env) or (None, None)
_, precision = field.get_digits(record.env) or (None, None)
options = dict(options, precision=precision)
if 'min_precision' not in options and hasattr(field, 'get_min_display_digits'):
min_precision = field.get_min_display_digits(record.env)
options = dict(options, min_precision=min_precision)
return super().record_to_html(record, field_name, options)
@ -356,7 +363,7 @@ class IrQwebFieldSelection(models.AbstractModel):
def value_to_html(self, value, options):
if not value:
return ''
return escape(options['selection'][value] or '')
return escape(options['selection'].get(value, value) or '')
@api.model
def record_to_html(self, record, field_name, options):
@ -393,6 +400,19 @@ class IrQwebFieldMany2many(models.AbstractModel):
return nl2br(text)
class IrQwebFieldOne2many(models.AbstractModel):
_name = 'ir.qweb.field.one2many'
_description = 'Qweb field one2many'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
if not value:
return False
text = ', '.join(value.sudo().mapped('display_name'))
return nl2br(text)
class IrQwebFieldHtml(models.AbstractModel):
_name = 'ir.qweb.field.html'
_description = 'Qweb Field HTML'

View file

@ -32,7 +32,12 @@ def _alter_sequence(cr, seq_name, number_increment=None, number_next=None):
""" Alter a PostreSQL sequence. """
if number_increment == 0:
raise UserError(_("Step must not be zero."))
cr.execute("SELECT relname FROM pg_class WHERE relkind=%s AND relname=%s", ('S', seq_name))
cr.execute(
"SELECT relname FROM pg_class"
" WHERE relkind = %s AND relname = %s"
" AND relnamespace = current_schema::regnamespace",
('S', seq_name)
)
if not cr.fetchone():
# sequence is not created yet, we're inside create() so ignore it, will be set later
return

View file

@ -216,8 +216,8 @@ actual arch.
return re.sub(r'(?P<prefix>[^%])%\((?P<xmlid>.*?)\)[ds]', replacer, arch_fs)
lang = self.env.lang or 'en_US'
env_en = self.with_context(edit_translations=None, lang='en_US').env
env_lang = self.with_context(lang=lang).env
env_en = self.with_context(edit_translations=None, lang='en_US', check_translations=True).env
env_lang = self.with_context(lang=lang, check_translations=True).env
field_arch_db = self._fields['arch_db']
for view in self:
arch_fs = None
@ -246,6 +246,7 @@ actual arch.
def _inverse_arch(self):
for view in self:
self._validate_xml_encoding(view.arch)
data = dict(arch_db=view.arch)
if 'install_filename' in self.env.context:
# we store the relative path to the resource instead of the absolute path, if found
@ -272,6 +273,7 @@ actual arch.
def _inverse_arch_base(self):
for view, view_wo_lang in zip(self, self.with_context(lang=None)):
self._validate_xml_encoding(view.arch_base)
view_wo_lang.arch = view.arch_base
def reset_arch(self, mode='soft'):
@ -320,6 +322,7 @@ actual arch.
@api.depends('arch', 'inherit_id')
def _compute_invalid_locators(self):
def assess_locator(source, spec):
node = None
with suppress(ValidationError): # Syntax error
# If locate_node returns None here:
# Invalid expression: Ok Syntax, but cannot be anchored to the parent view.
@ -439,7 +442,8 @@ actual arch.
combined_arch = view._get_combined_arch()
# check primary view that extends this current view
if view.inherit_id or view.inherit_children_ids:
# keep a way to skip this check to avoid marking too many views as failed during an upgrade
if not self.env.context.get('_skip_primary_extensions_check') and (view.inherit_id or view.inherit_children_ids):
root = view
while root.inherit_id and root.mode != 'primary':
root = root.inherit_id
@ -591,8 +595,6 @@ actual arch.
# delete empty arch_db to avoid triggering _check_xml before _inverse_arch_base is called
del values['arch_db']
if values.get('arch_base'):
self._validate_xml_encoding(values['arch_base'])
if not values.get('type'):
if values.get('inherit_id'):
values['type'] = self.browse(values['inherit_id']).type
@ -609,7 +611,7 @@ actual arch.
"Allowed types are: %(valid_types)s",
view_type=values['type'], valid_types=', '.join(valid_types)
))
except LxmlError:
except (etree.ParseError, ValueError):
# don't raise here, the constraint that runs `self._check_xml` will
# do the job properly.
pass
@ -652,8 +654,6 @@ actual arch.
if 'arch_db' in vals and not self.env.context.get('no_save_prev'):
vals['arch_prev'] = self.arch_db
if vals.get('arch_base'):
self._validate_xml_encoding(vals['arch_base'])
res = super().write(self._compute_defaults(vals))
# Check the xml of the view if it gets re-activated or changed.
@ -692,8 +692,11 @@ actual arch.
:return: id of the default view of False if none found
:rtype: int
"""
domain = [('model', '=', model), ('type', '=', view_type), ('mode', '=', 'primary')]
return self.search(domain, limit=1).id
return self.search(self._get_default_view_domain(model, view_type), limit=1).id
@api.model
def _get_default_view_domain(self, model, view_type):
return Domain([('model', '=', model), ('type', '=', view_type), ('mode', '=', 'primary')])
#------------------------------------------------------
# Inheritance mecanism
@ -1331,7 +1334,15 @@ actual arch.
# check the read/visibility access
for node in tree.xpath('//*[@__groups_key__]'):
if not has_access(node.attrib.pop('__groups_key__')):
node.getparent().remove(node)
tail = node.tail
parent = node.getparent()
previous = node.getprevious()
parent.remove(node)
if tail:
if previous is not None:
previous.tail = (previous.tail or '') + tail
elif parent is not None:
parent.text = (parent.text or '') + tail
elif node.tag == 't' and not node.attrib:
# Move content of <t groups=""> blocks
# and remove the <t> node.
@ -3197,9 +3208,9 @@ class Base(models.AbstractModel):
:rtype: list
"""
return [
'change_default', 'context', 'currency_field', 'definition_record', 'definition_record_field', 'digits', 'domain', 'aggregator', 'groups',
'help', 'model_field', 'name', 'readonly', 'related', 'relation', 'relation_field', 'required', 'searchable', 'selection', 'size',
'sortable', 'store', 'string', 'translate', 'trim', 'type', 'groupable', 'falsy_value_label'
'change_default', 'context', 'currency_field', 'definition_record', 'definition_record_field', 'digits', 'min_display_digits', 'domain',
'aggregator', 'groups', 'help', 'model_field', 'name', 'readonly', 'related', 'relation', 'relation_field', 'required', 'searchable',
'selection', 'size', 'sortable', 'store', 'string', 'translate', 'trim', 'type', 'groupable', 'falsy_value_label'
]
@api.readonly

View file

@ -3,7 +3,8 @@ import re
from collections.abc import Iterable
from odoo import api, fields, models
from odoo.tools import _, SQL
from odoo.exceptions import UserError
from odoo.tools import _, clean_context
def sanitize_account_number(acc_number):
@ -123,7 +124,7 @@ class ResPartnerBank(models.Model):
for bank in self:
bank.acc_type = self.retrieve_acc_type(bank.acc_number)
@api.depends('partner_id.name')
@api.depends('partner_id')
def _compute_account_holder_name(self):
for bank in self:
bank.acc_holder_name = bank.partner_id.name
@ -144,6 +145,22 @@ class ResPartnerBank(models.Model):
for bank in self:
bank.color = 10 if bank.allow_out_payment else 1
def _sanitize_vals(self, vals):
if 'sanitized_acc_number' in vals: # do not allow to write on sanitized directly
vals['acc_number'] = vals.pop('sanitized_acc_number')
if 'acc_number' in vals:
vals['sanitized_acc_number'] = sanitize_account_number(vals['acc_number'])
@api.model_create_multi
def create(self, vals_list):
for vals in vals_list:
self._sanitize_vals(vals)
return super().create(vals_list)
def write(self, vals):
self._sanitize_vals(vals)
return super().write(vals)
def action_archive_bank(self):
"""
Custom archive function because the basic action_archive don't trigger a re-rendering of the page, so
@ -160,3 +177,47 @@ class ResPartnerBank(models.Model):
"""
self.action_archive()
return True
def _user_can_trust(self):
self.ensure_one()
return True
def _find_or_create_bank_account(self, account_number, partner, company, *, allow_company_account_creation=False, extra_create_vals=None):
"""Find a bank account for the given partner and number. Create it if it doesn't exist.
Manage different corner cases:
- make sure that we don't try to create the bank number if we look for it but it exists restricted in another
company; because of the unique constraint
- make sure that we don't create a bank account number for one of the database's companies, unless
`allow_company_account_creation` is specified
:param account_number: the bank account number to search for (or to create)
:param partner: the partner linked to the account number
:param company: the company that the bank needs to be accessible from (only for searching)
:param allow_company_account_creation: whether we disable the protection to create an account for our own
companies
:param extra_create_vals: values to be added when creating the account, but not to write if the account was
found and e.g. modified manually beforehands
"""
bank_account = self.env['res.partner.bank'].sudo().with_context(active_test=False).search([
('acc_number', '=', account_number),
('partner_id', 'child_of', partner.commercial_partner_id.id),
])
if not bank_account:
if not allow_company_account_creation and partner.id in self.env['res.company']._get_company_partner_ids():
raise UserError(_(
"Please add your own bank account manually: %(account_number)s (%(partner)s)",
account_number=account_number,
partner=partner.display_name,
))
bank_account = self.env['res.partner.bank'].with_context(clean_context(self.env.context)).create({
**(extra_create_vals or {}),
'acc_number': account_number,
'partner_id': partner.id,
'allow_out_payment': False,
})
return bank_account.filtered_domain([
*self.env['res.partner.bank']._check_company_domain(company),
('active', '=', True),
]).sorted(lambda b: b.partner_id != partner).sudo(False)[:1]

View file

@ -374,8 +374,12 @@ class ResCompany(models.Model):
('id', 'child_of', company.id),
('id', '!=', company.id),
])
for fname in sorted(changed):
branches[fname] = company[fname]
changed_vals = {
fname: self._fields[fname].convert_to_write(company[fname], branches)
for fname in sorted(changed)
}
branches.write(changed_vals)
if companies_needs_l10n:
companies_needs_l10n.install_l10n_modules()
@ -483,3 +487,7 @@ class ResCompany(models.Model):
'company_id': self.id,
'company_ids': [(6, 0, [self.id])],
})
@ormcache()
def _get_company_partner_ids(self):
return tuple(self.env['res.company'].sudo().with_context(active_test=False).search([]).partner_id.ids)

View file

@ -20,6 +20,7 @@ FLAG_MAPPING = {
"RE": "fr",
"MF": "fr",
"UM": "us",
"XI": "uk",
}
NO_FLAG_COUNTRIES = [

View file

@ -1,11 +1,11 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from contextlib import nullcontext
from datetime import datetime
from datetime import datetime, timedelta
import logging
from odoo import api, fields, models, tools
from odoo.http import GeoIP, request, root, STORED_SESSION_BYTES
from odoo.http import GeoIP, get_session_max_inactivity, request, root, STORED_SESSION_BYTES
from odoo.tools import SQL, OrderedSet, unique
from odoo.tools.translate import _
from .res_users import check_identity
@ -137,24 +137,34 @@ class ResDeviceLog(models.Model):
Set the field ``revoked`` to ``True`` for ``res.device.log``
for which the session file no longer exists on the filesystem.
"""
device_logs_by_session_identifier = {}
for session_identifier, device_logs in self.env['res.device.log']._read_group(
domain=[('revoked', '=', False)],
groupby=['session_identifier'],
aggregates=['id:recordset'],
):
device_logs_by_session_identifier[session_identifier] = device_logs
batch_size = 100_000
offset = 0
revoked_session_identifiers = root.session_store.get_missing_session_identifiers(
device_logs_by_session_identifier.keys()
)
device_logs_to_revoke = self.env['res.device.log'].concat(*map(
device_logs_by_session_identifier.get,
revoked_session_identifiers
))
# Initial run may take 5-10 minutes due to many non-revoked sessions,
# marking them enables index use on ``revoked IS NOT TRUE``.
device_logs_to_revoke.sudo().write({'revoked': True})
while True:
candidate_device_log_ids = self.env['res.device.log'].search_fetch(
[
('revoked', '=', False),
('last_activity', '<', datetime.now() - timedelta(seconds=get_session_max_inactivity(self.env))),
],
['session_identifier'],
order='id',
limit=batch_size,
offset=offset,
)
if not candidate_device_log_ids:
break
offset += batch_size
revoked_session_identifiers = root.session_store.get_missing_session_identifiers(
set(candidate_device_log_ids.mapped('session_identifier'))
)
if not revoked_session_identifiers:
continue
to_revoke = candidate_device_log_ids.filtered(
lambda candidate: candidate.session_identifier in revoked_session_identifiers
)
to_revoke.write({'revoked': True})
self.env.cr.commit()
offset -= len(to_revoke)
class ResDevice(models.Model):

View file

@ -319,7 +319,7 @@ class ResGroups(models.Model):
self.view_group_hierarchy = self._get_view_group_hierarchy()
@api.model
@tools.ormcache(cache='groups')
@tools.ormcache('self.env.lang', cache='groups')
def _get_view_group_hierarchy(self):
return {
'groups': {

View file

@ -64,6 +64,9 @@ class ResLang(models.Model):
('%d-%m-%Y', '31-01-%s' % current_year),
('%m-%d-%Y', '01-31-%s' % current_year),
('%Y-%m-%d', '%s-01-31' % current_year),
('%d.%m.%Y', '31.01.%s' % current_year),
('%m.%d.%Y', '01.31.%s' % current_year),
('%Y.%m.%d', '%s.01.31' % current_year),
]
name = fields.Char(required=True)
@ -165,6 +168,16 @@ class ResLang(models.Model):
lang.active = True
return lang
def _activate_and_install_lang(self, code):
""" Activate languages and update their translations
:param code: code of the language to activate
:return: the language matching 'code' activated
"""
lang = self.with_context(active_test=False).search([('code', '=', code)])
if lang and not lang.active:
lang.action_unarchive()
return lang
def _create_lang(self, lang, lang_name=None):
""" Create the given language and make it active. """
# create the language with locale information

View file

@ -188,6 +188,7 @@ class ResPartner(models.Model):
_order = "complete_name ASC, id DESC"
_rec_names_search = ['complete_name', 'email', 'ref', 'vat', 'company_registry'] # TODO vat must be sanitized the same way for storing/searching
_allow_sudo_commands = False
_check_company_auto = True
_check_company_domain = models.check_company_domain_parent_of
# the partner types that must be added to a partner's complete name, like "Delivery"
@ -865,6 +866,11 @@ class ResPartner(models.Model):
vals['website'] = self._clean_website(vals['website'])
if vals.get('parent_id'):
vals['company_name'] = False
if vals.get('name'):
for partner in self:
for bank in partner.bank_ids:
if bank.acc_holder_name == partner.name:
bank.acc_holder_name = vals['name']
# filter to keep only really updated values -> field synchronize goes through
# partner tree and we should avoid infinite loops in case same value is
@ -896,9 +902,9 @@ class ResPartner(models.Model):
del vals['is_company']
result = result and super().write(vals)
for partner, pre_values in zip(self, pre_values_list, strict=True):
if any(u._is_internal() for u in partner.user_ids if u != self.env.user):
self.env['res.users'].check_access('write')
updated = {fname: fvalue for fname, fvalue in vals.items() if partner[fname] != pre_values[fname]}
if internal_users := partner.user_ids.filtered(lambda u: u._is_internal() and u != self.env.user):
internal_users.check_access('write')
updated = {fname: fvalue for fname, fvalue in vals.items() if partner[fname] != pre_values.get(fname)}
if updated:
partner._fields_sync(updated)
return result
@ -923,6 +929,7 @@ class ResPartner(models.Model):
return partners
for partner, vals in zip(partners, vals_list):
vals = self.env['res.partner']._add_missing_default_values(vals)
partner._fields_sync(vals)
return partners
@ -982,11 +989,7 @@ class ResPartner(models.Model):
def create_company(self):
self.ensure_one()
if self.company_name:
# Create parent company
values = dict(name=self.company_name, is_company=True, vat=self.vat)
values.update(self._convert_fields_to_values(self._address_fields()))
new_company = self.create(values)
if (new_company := self._create_contact_parent_company()):
# Set new company as my parent
self.write({
'parent_id': new_company.id,
@ -994,6 +997,15 @@ class ResPartner(models.Model):
})
return True
def _create_contact_parent_company(self):
self.ensure_one()
if self.company_name:
# Create parent company
values = dict(name=self.company_name, is_company=True, vat=self.vat)
values.update(self._convert_fields_to_values(self._address_fields()))
return self.create(values)
return self.browse()
def open_commercial_entity(self):
""" Utility method used to add an "Open Company" button in partner views """
self.ensure_one()

View file

@ -25,7 +25,7 @@ from odoo.api import SUPERUSER_ID
from odoo.exceptions import AccessDenied, AccessError, UserError, ValidationError
from odoo.fields import Command, Domain
from odoo.http import request, DEFAULT_LANG
from odoo.tools import email_domain_extract, is_html_empty, frozendict, reset_cached_properties, SQL
from odoo.tools import email_domain_extract, is_html_empty, frozendict, reset_cached_properties, str2bool, SQL
_logger = logging.getLogger(__name__)
@ -268,7 +268,7 @@ class ResUsers(models.Model):
def _default_view_group_hierarchy(self):
return self.env['res.groups']._get_view_group_hierarchy()
view_group_hierarchy = fields.Json(string='Technical field for user group setting', store=False, default=_default_view_group_hierarchy)
view_group_hierarchy = fields.Json(string='Technical field for user group setting', store=False, copy=False, default=_default_view_group_hierarchy)
role = fields.Selection([('group_user', 'User'), ('group_system', 'Administrator')], compute='_compute_role', readonly=False, string="Role")
_login_key = models.Constraint("UNIQUE (login)",
@ -454,7 +454,7 @@ class ResUsers(models.Model):
@api.depends('name')
def _compute_signature(self):
for user in self.filtered(lambda user: user.name and is_html_empty(user.signature)):
user.signature = Markup('<p>%s</p>') % user['name']
user.signature = Markup('<div>%s</div>') % user['name']
@api.depends('all_group_ids')
def _compute_share(self):
@ -647,6 +647,7 @@ class ResUsers(models.Model):
@api.ondelete(at_uninstall=True)
def _unlink_except_master_data(self):
portal_user_template = self.env.ref('base.template_portal_user_id', False)
public_user = self.env.ref('base.public_user', False)
if SUPERUSER_ID in self.ids:
raise UserError(_('You can not remove the admin user as it is used internally for resources created by Odoo (updates, module installation, ...)'))
user_admin = self.env.ref('base.user_admin', raise_if_not_found=False)
@ -655,6 +656,8 @@ class ResUsers(models.Model):
self.env.registry.clear_cache()
if portal_user_template and portal_user_template in self:
raise UserError(_('Deleting the template users is not allowed. Deleting this profile will compromise critical functionalities.'))
if public_user and public_user in self:
raise UserError(_("Deleting the public user is not allowed. Deleting this profile will compromise critical functionalities."))
@api.model
def name_search(self, name='', domain=None, operator='ilike', limit=100):
@ -1356,9 +1359,10 @@ class UsersMultiCompany(models.Model):
'base.group_multi_company', raise_if_not_found=False)
if group_multi_company_id:
for user in users:
if len(user.company_ids) <= 1 and group_multi_company_id in user.group_ids.ids:
company_count = len(user.sudo().company_ids)
if company_count <= 1 and group_multi_company_id in user.group_ids.ids:
user.write({'group_ids': [Command.unlink(group_multi_company_id)]})
elif len(user.company_ids) > 1 and group_multi_company_id not in user.group_ids.ids:
elif company_count > 1 and group_multi_company_id not in user.group_ids.ids:
user.write({'group_ids': [Command.link(group_multi_company_id)]})
return users
@ -1370,9 +1374,10 @@ class UsersMultiCompany(models.Model):
'base.group_multi_company', raise_if_not_found=False)
if group_multi_company_id:
for user in self:
if len(user.company_ids) <= 1 and group_multi_company_id in user.group_ids.ids:
company_count = len(user.sudo().company_ids)
if company_count <= 1 and group_multi_company_id in user.group_ids.ids:
user.write({'group_ids': [Command.unlink(group_multi_company_id)]})
elif len(user.company_ids) > 1 and group_multi_company_id not in user.group_ids.ids:
elif company_count > 1 and group_multi_company_id not in user.group_ids.ids:
user.write({'group_ids': [Command.link(group_multi_company_id)]})
return res
@ -1384,9 +1389,10 @@ class UsersMultiCompany(models.Model):
group_multi_company_id = self.env['ir.model.data']._xmlid_to_res_id(
'base.group_multi_company', raise_if_not_found=False)
if group_multi_company_id:
if len(user.company_ids) <= 1 and group_multi_company_id in user.group_ids.ids:
company_count = len(user.sudo().company_ids)
if company_count <= 1 and group_multi_company_id in user.group_ids.ids:
user.update({'group_ids': [Command.unlink(group_multi_company_id)]})
elif len(user.company_ids) > 1 and group_multi_company_id not in user.group_ids.ids:
elif company_count > 1 and group_multi_company_id not in user.group_ids.ids:
user.update({'group_ids': [Command.link(group_multi_company_id)]})
return user
@ -1507,6 +1513,7 @@ KEY_CRYPT_CONTEXT = CryptContext(
# attacks on API keys isn't much of a concern
['pbkdf2_sha512'], pbkdf2_sha512__rounds=6000,
)
DEFAULT_PROGRAMMATIC_API_KEYS_LIMIT = 10 # programmatic API key creation is refused if the user already has at least this amount of API keys
class ResUsersApikeys(models.Model):
@ -1560,6 +1567,7 @@ class ResUsersApikeys(models.Model):
_logger.info("API key(s) removed: scope: <%s> for '%s' (#%s) from %s",
self.mapped('scope'), self.env.user.login, self.env.uid, ip)
self.sudo().unlink()
self.env.registry.clear_cache()
return {'type': 'ir.actions.act_window_close'}
raise AccessError(_("You can not remove API keys unless they're yours or you are a system user"))
@ -1622,6 +1630,89 @@ class ResUsersApikeys(models.Model):
return k
def _ensure_can_manage_keys_programmatically(self):
# Administrators would not be restricted by the ICP check alone,
# as they could temporarily enable the setting via set_param().
# However, this is considered bad practice because it would create a time window
# where anyone could manage API keys programmatically.
# Additionally, the enable / call / restore process involves three distinct calls,
# which is not atomic and prone to errors (e.g., server unavailability during restore),
# potentially leaving the configuration enabled for all users.
# To avoid this, an exception is made for Administrators.
# However, if programmatic API key management were to be enabled by default,
# this exception should be removed, as disabling the feature should be global.
ICP = self.env['ir.config_parameter'].sudo()
programmatic_api_keys_enabled = str2bool(ICP.get_param('base.enable_programmatic_api_keys'), False)
if not (self.env.is_system() or programmatic_api_keys_enabled):
raise UserError(_("Programmatic API keys are not enabled"))
@api.model
def generate(self, key, scope, name, expiration_date):
"""
Generate a new API key with an existing API key.
The provided `key` must be an existing API key that belongs to the current user.
Its scope must be compatible with `scope`.
The `expiration_date` must be allowed for the user's group.
To renew a key, generate the new one, store it, and then call `revoke` on the previous one.
"""
self._ensure_can_manage_keys_programmatically()
with self.env['res.users']._assert_can_auth(user=key[:INDEX_SIZE]):
if not isinstance(expiration_date, datetime.datetime):
expiration_date = fields.Datetime.from_string(expiration_date)
nb_keys = self.search_count([('user_id', '=', self.env.uid),
'|', ('expiration_date', '=', False), ('expiration_date', '>=', self.env.cr.now())])
try:
ICP = self.env['ir.config_parameter'].sudo()
nb_keys_limit = int(ICP.get_param('base.programmatic_api_keys_limit', DEFAULT_PROGRAMMATIC_API_KEYS_LIMIT))
except ValueError:
_logger.warning("Invalid value for 'base.programmatic_api_keys_limit', using default value.")
nb_keys_limit = DEFAULT_PROGRAMMATIC_API_KEYS_LIMIT
if nb_keys >= nb_keys_limit:
raise UserError(_('Limit of %s API keys is reached for programmatic creation', nb_keys_limit))
# Scope compatibility rules:
# - A global key can generate credentials for any scope (including global).
# - A scoped key can only generate credentials for its own scope.
#
# This is enforced in _check_credentials by validating scope usage,
# and the validated scope is then reused when calling _generate.
uid = self.env['res.users.apikeys']._check_credentials(scope=scope or 'rpc', key=key)
if not uid or uid != self.env.uid:
raise AccessDenied(_("The provided API key is invalid or does not belong to the current user."))
new_key = self._generate(scope, name, expiration_date)
_logger.info("%s %r generated from %r", self._description, new_key[:INDEX_SIZE], key[:INDEX_SIZE])
return new_key
@api.model
def revoke(self, key):
"""
Revoke an existing API key.
If it exists, the `key` will be removed from the server.
"""
self._ensure_can_manage_keys_programmatically()
assert key, "key required"
with self.env['res.users']._assert_can_auth(user=key[:INDEX_SIZE]):
self.env.cr.execute(SQL('''
SELECT id, key
FROM %(table)s
WHERE
index = %(index)s
AND (
expiration_date IS NULL OR
expiration_date >= now() at time zone 'utc'
)
''', table=SQL.identifier(self._table), index=key[:INDEX_SIZE]))
for key_id, current_key in self.env.cr.fetchall():
if key and KEY_CRYPT_CONTEXT.verify(key, current_key):
self.env['res.users.apikeys'].browse(key_id)._remove()
return True
raise AccessDenied(_("The provided API key is invalid."))
@api.autovacuum
def _gc_user_apikeys(self):
self.env.cr.execute(SQL("""