vanilla 17.0

This commit is contained in:
Ernad Husremovic 2025-10-08 10:47:08 +02:00
parent d72e748793
commit a9bcec8e91
1986 changed files with 1613876 additions and 568976 deletions

View file

@ -7,3 +7,6 @@ from . import ir_model
from . import ir_ui_menu
from . import models
from . import base_document_layout
from . import res_config_settings
from . import res_partner
from . import res_users

View file

@ -2,12 +2,13 @@
import markupsafe
import os
from markupsafe import Markup
from math import ceil
from odoo import api, fields, models, tools
from odoo.addons.base.models.ir_qweb_fields import nl2br
from odoo.modules import get_resource_path
from odoo.tools import file_path, html2plaintext, is_html_empty
from odoo.tools import html2plaintext, is_html_empty
from odoo.tools.misc import file_path
try:
import sass as libsass
@ -217,7 +218,7 @@ class BaseDocumentLayout(models.TransientModel):
return False, False
base_w, base_h = image.size
w = int(50 * base_w / base_h)
w = ceil(50 * base_w / base_h)
h = 50
# Converts to RGBA (if already RGBA, this is a noop)
@ -251,14 +252,6 @@ class BaseDocumentLayout(models.TransientModel):
return tools.rgb_to_hex(primary), tools.rgb_to_hex(secondary)
@api.model
def action_open_base_document_layout(self, action_ref=None):
if not action_ref:
action_ref = 'web.action_base_document_layout_configurator'
res = self.env["ir.actions.actions"]._for_xml_id(action_ref)
self.env[res["res_model"]].check_access_rights('write')
return res
def document_layout_save(self):
# meant to be overridden
return self.env.context.get('report_action') or {'type': 'ir.actions.act_window_close'}
@ -306,7 +299,7 @@ class BaseDocumentLayout(models.TransientModel):
precision = 8
output_style = 'expanded'
bootstrap_path = get_resource_path('web', 'static', 'lib', 'bootstrap', 'scss')
bootstrap_path = file_path('web/static/lib/bootstrap/scss')
try:
return libsass.compile(

View file

@ -2,17 +2,14 @@
import hashlib
import json
import logging
import odoo
from odoo import api, http, models
from odoo.http import request
from odoo.tools import file_open, image_process, ustr
from odoo import api, models
from odoo.http import request, DEFAULT_MAX_CONTENT_LENGTH
from odoo.tools import ormcache, ustr
from odoo.tools.misc import str2bool
_logger = logging.getLogger(__name__)
"""
Debug mode is stored in session and should always be a string.
It can be activated with an URL query string `debug=<mode>` where mode
@ -58,6 +55,11 @@ class Http(models.AbstractModel):
super()._pre_dispatch(rule, args)
cls._handle_debug()
@classmethod
def _post_logout(cls):
super()._post_logout()
request.future_response.set_cookie('cids', max_age=0)
def webclient_rendering_context(self):
return {
'menu_data': request.env['ir.ui.menu'].load_menus(request.session.debug),
@ -79,24 +81,27 @@ class Http(models.AbstractModel):
IrConfigSudo = self.env['ir.config_parameter'].sudo()
max_file_upload_size = int(IrConfigSudo.get_param(
'web.max_file_upload_size',
default=128 * 1024 * 1024, # 128MiB
default=DEFAULT_MAX_CONTENT_LENGTH,
))
mods = odoo.conf.server_wide_modules or []
if request.db:
mods = list(request.registry._init_modules) + mods
is_internal_user = user.has_group('base.group_user')
session_info = {
"uid": session_uid,
"is_system": user._is_system() if session_uid else False,
"is_admin": user._is_admin() if session_uid else False,
"is_public": user._is_public(),
"is_internal_user": is_internal_user,
"user_context": user_context,
"db": self.env.cr.dbname,
"user_settings": self.env['res.users.settings']._find_or_create_for_user(user)._res_users_settings_format(),
"server_version": version_info.get('server_version'),
"server_version_info": version_info.get('server_version_info'),
"support_url": "https://www.odoo.com/buy",
"name": user.name,
"username": user.login,
"partner_display_name": user.partner_id.display_name,
"company_id": user.company_id.id if session_uid else None, # YTI TODO: Remove this from the user context
"partner_id": user.partner_id.id if session_uid and user.partner_id else None,
"web.base.url": IrConfigSudo.get_param('web.base.url', default=''),
"active_ids_limit": int(IrConfigSudo.get_param('web.active_ids_limit', default='20000')),
@ -117,7 +122,7 @@ class Http(models.AbstractModel):
}
if request.session.debug:
session_info['bundle_params']['debug'] = request.session.debug
if self.env.user.has_group('base.group_user'):
if is_internal_user:
# the following is only useful in the context of a webclient bootstrapping
# but is still included in some other calls (e.g. '/web/session/authenticate')
# to avoid access errors and unnecessary information, it is only included for users
@ -128,6 +133,9 @@ class Http(models.AbstractModel):
session_info['cache_hashes'].update({
"load_menus": hashlib.sha512(menu_json_utf8).hexdigest()[:64], # sha512/256
})
# We need sudo since a user may not have access to ancestor companies
disallowed_ancestor_companies_sudo = user.company_ids.sudo().parent_ids - user.company_ids
all_companies_in_hierarchy_sudo = disallowed_ancestor_companies_sudo + user.company_ids
session_info.update({
# current_company should be default_company
"user_companies": {
@ -137,8 +145,19 @@ class Http(models.AbstractModel):
'id': comp.id,
'name': comp.name,
'sequence': comp.sequence,
'child_ids': (comp.child_ids & all_companies_in_hierarchy_sudo).ids,
'parent_id': comp.parent_id.id,
} for comp in user.company_ids
},
'disallowed_ancestor_companies': {
comp.id: {
'id': comp.id,
'name': comp.name,
'sequence': comp.sequence,
'child_ids': (comp.child_ids & all_companies_in_hierarchy_sudo).ids,
'parent_id': comp.parent_id.id,
} for comp in disallowed_ancestor_companies_sudo
},
},
"show_effect": True,
"display_switch_company_menu": user.has_group('base.group_multi_company') and len(user.company_ids) > 1,
@ -152,6 +171,7 @@ class Http(models.AbstractModel):
session_info = {
'is_admin': user._is_admin() if session_uid else False,
'is_system': user._is_system() if session_uid else False,
'is_public': user._is_public(),
'is_website_user': user._is_public() if session_uid else False,
'user_id': user.id if session_uid else False,
'is_frontend': True,
@ -159,6 +179,7 @@ class Http(models.AbstractModel):
'profile_collectors': request.session.profile_collectors,
'profile_params': request.session.profile_params,
'show_effect': bool(request.env['ir.config_parameter'].sudo().get_param('base_setup.show_effect')),
'currencies': self.get_currencies(),
'bundle_params': {
'lang': request.session.context['lang'],
},
@ -173,7 +194,11 @@ class Http(models.AbstractModel):
})
return session_info
@ormcache()
def get_currencies(self):
Currency = self.env['res.currency']
currencies = Currency.search([]).read(['symbol', 'position', 'decimal_places'])
return {c['id']: {'symbol': c['symbol'], 'position': c['position'], 'digits': [69,c['decimal_places']]} for c in currencies}
currencies = Currency.search_fetch([], ['symbol', 'position', 'decimal_places'])
return {
c.id: {'symbol': c.symbol, 'position': c.position, 'digits': [69, c.decimal_places]}
for c in currencies
}

View file

@ -19,7 +19,7 @@ class IrModel(models.Model):
accessible_models = []
not_accessible_models = []
for model in models:
if self._check_model_access(model):
if self._is_valid_for_model_selector(model):
accessible_models.append(model)
else:
not_accessible_models.append({"display_name": model, "model": model})
@ -34,9 +34,15 @@ class IrModel(models.Model):
} for model in records]
@api.model
def _check_model_access(self, model):
return (self.env.user._is_internal() and model in self.env
and self.env[model].check_access_rights("read", raise_exception=False))
def _is_valid_for_model_selector(self, model):
model = self.env.get(model)
return (
self.env.user._is_internal()
and model is not None
and model.check_access_rights("read", raise_exception=False)
and not model._transient
and not model._abstract
)
@api.model
def get_available_models(self):
@ -44,5 +50,5 @@ class IrModel(models.Model):
Return the list of models the current user has access to, with their
corresponding display name.
"""
accessible_models = [model for model in self.pool.keys() if self._check_model_access(model)]
accessible_models = [model for model in self.pool if self._is_valid_for_model_selector(model)]
return self._display_name_for(accessible_models)

View file

@ -6,7 +6,7 @@ from collections import OrderedDict
from werkzeug.urls import url_quote
from markupsafe import Markup
from odoo import api, models
from odoo import api, models, fields
from odoo.tools import pycompat
from odoo.tools import html_escape as escape
@ -36,7 +36,7 @@ class Image(models.AbstractModel):
if max_width or max_height:
max_size = '%sx%s' % (max_width, max_height)
sha = hashlib.sha512(str(getattr(record, '__last_update')).encode('utf-8')).hexdigest()[:7]
sha = hashlib.sha512(str(getattr(record, 'write_date', fields.Datetime.now())).encode('utf-8')).hexdigest()[:7]
max_size = '' if max_size is None else '/%s' % max_size
if options.get('filename-field') and options['filename-field'] in record and record[options['filename-field']]:

View file

@ -32,6 +32,7 @@ class IrUiMenu(models.Model):
"actionModel": False,
"webIcon": None,
"webIconData": None,
"webIconDataMimetype": None,
"backgroundImage": menu.get('backgroundImage'),
}
else:
@ -57,6 +58,7 @@ class IrUiMenu(models.Model):
"actionModel": action_model,
"webIcon": menu['web_icon'],
"webIconData": menu['web_icon_data'],
"webIconDataMimetype": menu['web_icon_data_mimetype'],
}
return web_menus

View file

@ -1,14 +1,19 @@
# -*- coding: utf-8 -*-
from typing import Dict, List
import babel.dates
import pytz
from lxml import etree
import base64
import copy
import itertools
import json
import pytz
from odoo import _, _lt, api, fields, models
from odoo.fields import Command
from odoo.models import BaseModel, NewId
from odoo.osv.expression import AND, TRUE_DOMAIN, normalize_domain
from odoo.tools import date_utils, lazy, OrderedSet
from odoo.tools.misc import get_lang
from odoo.tools import date_utils, unique
from odoo.tools.misc import OrderedSet, get_lang
from odoo.exceptions import UserError
from collections import defaultdict
@ -33,37 +38,20 @@ DISPLAY_DATE_FORMATS = {
}
class IrActionsActWindowView(models.Model):
_inherit = 'ir.actions.act_window.view'
view_mode = fields.Selection(selection_add=[
('qweb', 'QWeb')
], ondelete={'qweb': 'cascade'})
class Base(models.AbstractModel):
_inherit = 'base'
@api.model
def web_search_read(self, domain=None, fields=None, offset=0, limit=None, order=None, count_limit=None):
"""
Performs a search_read and a search_count.
def web_search_read(self, domain, specification, offset=0, limit=None, order=None, count_limit=None):
records = self.search_fetch(domain, specification.keys(), offset=offset, limit=limit, order=order)
values_records = records.web_read(specification)
return self._format_web_search_read_results(domain, values_records, offset, limit, count_limit)
:param domain: search domain
:param fields: list of fields to read
:param limit: maximum number of records to read
:param offset: number of records to skip
:param order: columns to sort results
:return: {
'records': array of read records (result of a call to 'search_read')
'length': number of records matching the domain (result of a call to 'search_count')
}
"""
records = self.search_read(domain, fields, offset=offset, limit=limit, order=order)
def _format_web_search_read_results(self, domain, records, offset=0, limit=None, count_limit=None):
if not records:
return {
'length': 0,
'records': []
'records': [],
}
current_length = len(records) + offset
limit_reached = len(records) == limit
@ -75,15 +63,166 @@ class Base(models.AbstractModel):
length = current_length
return {
'length': length,
'records': records
'records': records,
}
def web_save(self, vals, specification: Dict[str, Dict], next_id=None) -> List[Dict]:
if self:
self.write(vals)
else:
self = self.create(vals)
if next_id:
self = self.browse(next_id)
return self.with_context(bin_size=True).web_read(specification)
def web_read(self, specification: Dict[str, Dict]) -> List[Dict]:
fields_to_read = list(specification) or ['id']
if fields_to_read == ['id']:
# if we request to read only the ids, we have them already so we can build the return dictionaries immediately
# this also avoid a call to read on the co-model that might have different access rules
values_list = [{'id': id_} for id_ in self._ids]
else:
values_list: List[Dict] = self.read(fields_to_read, load=None)
if not values_list:
return values_list
def cleanup(vals: Dict) -> Dict:
""" Fixup vals['id'] of a new record. """
if not vals['id']:
vals['id'] = vals['id'].origin or False
return vals
for field_name, field_spec in specification.items():
field = self._fields.get(field_name)
if field is None:
continue
if field.type == 'many2one':
if 'fields' not in field_spec:
for values in values_list:
if isinstance(values[field_name], NewId):
values[field_name] = values[field_name].origin
continue
co_records = self[field_name]
if 'context' in field_spec:
co_records = co_records.with_context(**field_spec['context'])
extra_fields = dict(field_spec['fields'])
extra_fields.pop('display_name', None)
many2one_data = {
vals['id']: cleanup(vals)
for vals in co_records.web_read(extra_fields)
}
if 'display_name' in field_spec['fields']:
for rec in co_records.sudo():
many2one_data[rec.id]['display_name'] = rec.display_name
for values in values_list:
if values[field_name] is False:
continue
vals = many2one_data[values[field_name]]
values[field_name] = vals['id'] and vals
elif field.type in ('one2many', 'many2many'):
if not field_spec:
continue
co_records = self[field_name]
if 'order' in field_spec and field_spec['order']:
co_records = co_records.with_context(active_test=False).search(
[('id', 'in', co_records.ids)], order=field_spec['order'],
).with_context(co_records.env.context) # Reapply previous context
order_key = {
co_record.id: index
for index, co_record in enumerate(co_records)
}
for values in values_list:
# filter out inaccessible corecords in case of "cache pollution"
values[field_name] = [id_ for id_ in values[field_name] if id_ in order_key]
values[field_name] = sorted(values[field_name], key=order_key.__getitem__)
if 'context' in field_spec:
co_records = co_records.with_context(**field_spec['context'])
if 'fields' in field_spec:
if field_spec.get('limit') is not None:
limit = field_spec['limit']
ids_to_read = OrderedSet(
id_
for values in values_list
for id_ in values[field_name][:limit]
)
co_records = co_records.browse(ids_to_read)
x2many_data = {
vals['id']: vals
for vals in co_records.web_read(field_spec['fields'])
}
for values in values_list:
values[field_name] = [x2many_data.get(id_) or {'id': id_} for id_ in values[field_name]]
elif field.type in ('reference', 'many2one_reference'):
if not field_spec:
continue
values_by_id = {
vals['id']: vals
for vals in values_list
}
for record in self:
if not record[field_name]:
continue
if field.type == 'reference':
co_record = record[field_name]
else: # field.type == 'many2one_reference'
co_record = self.env[record[field.model_field]].browse(record[field_name])
if 'context' in field_spec:
co_record = co_record.with_context(**field_spec['context'])
if 'fields' in field_spec:
reference_read = co_record.web_read(field_spec['fields'])
if any(fname != 'id' for fname in field_spec['fields']):
# we can infer that if we can read fields for the co-record, it exists
co_record_exists = bool(reference_read)
else:
co_record_exists = co_record.exists()
else:
# If there are no fields to read (field_spec.get('fields') --> None) and we web_read ids, it will
# not actually read the records so we do not know if they exist.
# This ensures the record actually exists
co_record_exists = co_record.exists()
record_values = values_by_id[record.id]
if not co_record_exists:
record_values[field_name] = False
if field.type == 'many2one_reference':
record_values[field.model_field] = False
continue
if 'fields' in field_spec:
record_values[field_name] = reference_read[0]
if field.type == 'reference':
record_values[field_name]['id'] = {
'id': co_record.id,
'model': co_record._name
}
return values_list
@api.model
def web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False,
lazy=True, expand=False, expand_limit=None, expand_orderby=False):
def web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False, lazy=True):
"""
Returns the result of a read_group (and optionally search for and read records inside each
group), and the total number of groups matching the search domain.
Returns the result of a read_group and the total number of groups matching the search domain.
:param domain: search domain
:param fields: list of fields to read (see ``fields``` param of ``read_group``)
@ -92,29 +231,23 @@ class Base(models.AbstractModel):
:param offset: see ``offset`` param of ``read_group``
:param orderby: see ``orderby`` param of ``read_group``
:param lazy: see ``lazy`` param of ``read_group``
:param expand: if true, and groupby only contains one field, read records inside each group
:param expand_limit: maximum number of records to read in each group
:param expand_orderby: order to apply when reading records in each group
:return: {
'groups': array of read groups
'length': total number of groups
}
"""
groups = self._web_read_group(domain, fields, groupby, limit, offset, orderby, lazy, expand,
expand_limit, expand_orderby)
groups = self._web_read_group(domain, fields, groupby, limit, offset, orderby, lazy)
if not groups:
length = 0
elif limit and len(groups) == limit:
# We need to fetch all groups to know the total number
# this cannot be done all at once to avoid MemoryError
length = limit
chunk_size = 100000
while True:
more = len(self.read_group(domain, ['display_name'], groupby, offset=length, limit=chunk_size, lazy=True))
length += more
if more < chunk_size:
break
annoted_groupby = self._read_group_get_annoted_groupby(groupby, lazy=lazy)
length = limit + len(self._read_group(
domain,
groupby=annoted_groupby.values(),
offset=limit,
))
else:
length = len(groups) + offset
return {
@ -123,23 +256,14 @@ class Base(models.AbstractModel):
}
@api.model
def _web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False,
lazy=True, expand=False, expand_limit=None, expand_orderby=False):
def _web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False, lazy=True):
"""
Performs a read_group and optionally a web_search_read for each group.
See ``web_read_group`` for params description.
:returns: array of groups
"""
groups = self.read_group(domain, fields, groupby, offset=offset, limit=limit,
orderby=orderby, lazy=lazy)
if expand and len(groupby) == 1:
for group in groups:
group['__data'] = self.web_search_read(domain=group['__domain'], fields=fields,
offset=0, limit=expand_limit,
order=expand_orderby)
return groups
@api.model
@ -156,8 +280,9 @@ class Base(models.AbstractModel):
:return a dictionnary mapping group_by values to dictionnaries mapping
progress bar field values to the related number of records
"""
group_by_fname = group_by.partition(':')[0]
field_type = self._fields[group_by_fname].type
group_by_fullname = group_by.partition(':')[0]
group_by_fieldname = group_by_fullname.split(".")[0] # split on "." in case we group on a property
field_type = self._fields[group_by_fieldname].type
if field_type == 'selection':
selection_labels = dict(self.fields_get()[group_by]['selection'])
@ -185,26 +310,40 @@ class Base(models.AbstractModel):
try:
fname = progress_bar['field']
return self.read_group(domain, [fname], [group_by, fname], lazy=False)
except UserError:
except ValueError:
# possibly failed because of grouping on or aggregating non-stored
# field; fallback on alternative implementation
pass
# Workaround to match read_group's infrastructure
# TO DO in master: harmonize this function and readgroup to allow factorization
group_by_name = group_by.partition(':')[0]
group_by_fullname = group_by.partition(':')[0]
group_by_fieldname = group_by_fullname.split(".")[0] # split on "." in case we group on a property
group_by_modifier = group_by.partition(':')[2] or 'month'
records_values = self.search_read(domain or [], [progress_bar['field'], group_by_name])
field_type = self._fields[group_by_name].type
records_values = self.search_read(domain or [], [progress_bar['field'], group_by_fieldname])
field_type = self._fields[group_by_fieldname].type
for record_values in records_values:
group_by_value = record_values.pop(group_by_name)
group_by_value = record_values.pop(group_by_fieldname)
property_name = group_by_fullname.partition('.')[2]
if field_type == "properties" and group_by_value:
group_by_value = next(
(definition['value'] for definition in group_by_value
if definition['name'] == property_name),
False,
)
# Again, imitating what _read_group_format_result and _read_group_prepare_data do
if group_by_value and field_type in ['date', 'datetime']:
locale = get_lang(self.env).code
group_by_value = date_utils.start_of(fields.Datetime.to_datetime(group_by_value), group_by_modifier)
group_by_value = fields.Datetime.to_datetime(group_by_value)
if group_by_modifier != 'week':
# start_of(v, 'week') does not take into account the locale
# to determine the first day of the week; this part is not
# necessary, since the formatting below handles the locale
# as expected, and outputs correct results
group_by_value = date_utils.start_of(group_by_value, group_by_modifier)
group_by_value = pytz.timezone('UTC').localize(group_by_value)
tz_info = None
if field_type == 'datetime' and self._context.get('tz') in pytz.all_timezones:
@ -225,32 +364,6 @@ class Base(models.AbstractModel):
return records_values
##### qweb view hooks #####
@api.model
def qweb_render_view(self, view_id, domain):
assert view_id
return self.env['ir.qweb']._render(
view_id,
{
'model': self,
'domain': domain,
# not necessarily necessary as env is already part of the
# non-minimal qcontext
'context': self.env.context,
'records': lazy(self.search, domain),
})
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
# avoid leaking the raw (un-rendered) template, also avoids bloating
# the response payload for no reason. Only send the root node,
# to send attributes such as `js_class`.
if view_type == 'qweb':
root = arch
arch = etree.Element('qweb', root.attrib)
return arch, view
@api.model
def _search_panel_field_image(self, field_name, **kwargs):
"""
@ -779,6 +892,232 @@ class Base(models.AbstractModel):
return { 'values': field_range, }
def onchange(self, values: Dict, field_names: List[str], fields_spec: Dict):
"""
Perform an onchange on the given fields, and return the result.
:param values: dictionary mapping field names to values on the form view,
giving the current state of modification
:param field_names: names of the modified fields
:param fields_spec: dictionary specifying the fields in the view,
just like the one used by :meth:`web_read`; it is used to format
the resulting values
When creating a record from scratch, the client should call this with an
empty list as ``field_names``. In that case, the method first adds
default values to ``values``, computes the remaining fields, applies
onchange methods to them, and return all the fields in ``fields_spec``.
The result is a dictionary with two optional keys. The key ``"value"``
is used to return field values that should be modified on the caller.
The corresponding value is a dict mapping field names to their value,
in the format of :meth:`web_read`, except for x2many fields, where the
value is a list of commands to be applied on the caller's field value.
The key ``"warning"`` provides a warning message to the caller. The
corresponding value is a dictionary like::
{
"title": "Be careful!", # subject of message
"message": "Blah blah blah.", # full warning message
"type": "dialog", # how to display the warning
}
"""
# this is for tests using `Form`
self.env.flush_all()
env = self.env
cache = env.cache
first_call = not field_names
if any(fname not in self._fields for fname in field_names):
return {}
if first_call:
field_names = [fname for fname in values if fname != 'id']
missing_names = [fname for fname in fields_spec if fname not in values]
defaults = self.default_get(missing_names)
for field_name in missing_names:
values[field_name] = defaults.get(field_name, False)
if field_name in defaults:
field_names.append(field_name)
# prefetch x2many lines: this speeds up the initial snapshot by avoiding
# computing fields on new records as much as possible, as that can be
# costly and is not necessary at all
self.fetch(fields_spec.keys())
for field_name, field_spec in fields_spec.items():
field = self._fields[field_name]
if field.type not in ('one2many', 'many2many'):
continue
sub_fields_spec = field_spec.get('fields') or {}
if sub_fields_spec and values.get(field_name):
# retrieve all line ids in commands
line_ids = OrderedSet(self[field_name].ids)
for cmd in values[field_name]:
if cmd[0] in (Command.UPDATE, Command.LINK):
line_ids.add(cmd[1])
elif cmd[0] == Command.SET:
line_ids.update(cmd[2])
# prefetch stored fields on lines
lines = self[field_name].browse(line_ids)
lines.fetch(sub_fields_spec.keys())
# copy the cache of lines to their corresponding new records;
# this avoids computing computed stored fields on new_lines
new_lines = lines.browse(map(NewId, line_ids))
for field_name in sub_fields_spec:
field = lines._fields[field_name]
line_values = [
field.convert_to_cache(line[field_name], new_line, validate=False)
for new_line, line in zip(new_lines, lines)
]
cache.update(new_lines, field, line_values)
# Isolate changed values, to handle inconsistent data sent from the
# client side: when a form view contains two one2many fields that
# overlap, the lines that appear in both fields may be sent with
# different data. Consider, for instance:
#
# foo_ids: [line with value=1, ...]
# bar_ids: [line with value=1, ...]
#
# If value=2 is set on 'line' in 'bar_ids', the client sends
#
# foo_ids: [line with value=1, ...]
# bar_ids: [line with value=2, ...]
#
# The idea is to put 'foo_ids' in cache first, so that the snapshot
# contains value=1 for line in 'foo_ids'. The snapshot is then updated
# with the value of `bar_ids`, which will contain value=2 on line.
#
# The issue also occurs with other fields. For instance, an onchange on
# a move line has a value for the field 'move_id' that contains the
# values of the move, among which the one2many that contains the line
# itself, with old values!
#
initial_values = dict(values)
changed_values = {fname: initial_values.pop(fname) for fname in field_names}
# do not force delegate fields to False
for parent_name in self._inherits.values():
if not initial_values.get(parent_name, True):
initial_values.pop(parent_name)
# create a new record with initial values
if self:
# fill in the cache of record with the values of self
cache_values = {fname: self[fname] for fname in fields_spec}
record = self.new(cache_values, origin=self)
# apply initial values on top of the values of self
record._update_cache(initial_values)
else:
# set changed values to null in initial_values; not setting them
# triggers default_get() on the new record when creating snapshot0
initial_values.update(dict.fromkeys(field_names, False))
record = self.new(initial_values, origin=self)
# make parent records match with the form values; this ensures that
# computed fields on parent records have all their dependencies at
# their expected value
for field_name in initial_values:
field = self._fields.get(field_name)
if field and field.inherited:
parent_name, field_name = field.related.split('.', 1)
if parent := record[parent_name]:
parent._update_cache({field_name: record[field_name]})
# make a snapshot based on the initial values of record
snapshot0 = RecordSnapshot(record, fields_spec, fetch=(not first_call))
# store changed values in cache; also trigger recomputations based on
# subfields (e.g., line.a has been modified, line.b is computed stored
# and depends on line.a, but line.b is not in the form view)
record._update_cache(changed_values)
# update snapshot0 with changed values
for field_name in field_names:
snapshot0.fetch(field_name)
# Determine which field(s) should be triggered an onchange. On the first
# call, 'names' only contains fields with a default. If 'self' is a new
# line in a one2many field, 'names' also contains the one2many's inverse
# field, and that field may not be in nametree.
todo = list(unique(itertools.chain(field_names, fields_spec))) if first_call else list(field_names)
done = set()
# mark fields to do as modified to trigger recomputations
protected = [self._fields[fname] for fname in field_names]
with self.env.protecting(protected, record):
record.modified(todo)
for field_name in todo:
field = self._fields[field_name]
if field.inherited:
# modifying an inherited field should modify the parent
# record accordingly; because we don't actually assign the
# modified field on the record, the modification on the
# parent record has to be done explicitly
parent = record[field.related.split('.')[0]]
parent[field_name] = record[field_name]
result = {'warnings': OrderedSet()}
# process names in order
while todo:
# apply field-specific onchange methods
for field_name in todo:
record._apply_onchange_methods(field_name, result)
done.add(field_name)
if not env.context.get('recursive_onchanges', True):
break
# determine which fields to process for the next pass
todo = [
field_name
for field_name in fields_spec
if field_name not in done and snapshot0.has_changed(field_name)
]
# make the snapshot with the final values of record
snapshot1 = RecordSnapshot(record, fields_spec)
# determine values that have changed by comparing snapshots
result['value'] = snapshot1.diff(snapshot0, force=first_call)
# format warnings
warnings = result.pop('warnings')
if len(warnings) == 1:
title, message, type_ = warnings.pop()
if not type_:
type_ = 'dialog'
result['warning'] = dict(title=title, message=message, type=type_)
elif len(warnings) > 1:
# concatenate warning titles and messages
title = _("Warnings")
message = '\n\n'.join([warn_title + '\n\n' + warn_message for warn_title, warn_message, warn_type in warnings])
result['warning'] = dict(title=title, message=message, type='dialog')
return result
def web_override_translations(self, values):
"""
This method is used to override all the modal translations of the given fields
with the provided value for each field.
:param values: dictionary of the translations to apply for each field name
ex: { "field_name": "new_value" }
"""
self.ensure_one()
for field_name in values:
field = self._fields[field_name]
if field.translate is True:
translations = {lang: False for lang, _ in self.env['res.lang'].get_installed()}
translations['en_US'] = values[field_name]
translations[self.env.lang or 'en_US'] = values[field_name]
self.update_field_translations(field_name, translations)
class ResCompany(models.Model):
_inherit = 'res.company'
@ -815,3 +1154,114 @@ class ResCompany(models.Model):
b64_val = self._get_asset_style_b64()
if b64_val != asset_attachment.datas:
asset_attachment.write({'datas': b64_val})
class RecordSnapshot(dict):
""" A dict with the values of a record, following a prefix tree. """
__slots__ = ['record', 'fields_spec']
def __init__(self, record: BaseModel, fields_spec: Dict, fetch=True):
# put record in dict to include it when comparing snapshots
super().__init__()
self.record = record
self.fields_spec = fields_spec
if fetch:
for name in fields_spec:
self.fetch(name)
def __eq__(self, other: 'RecordSnapshot'):
return self.record == other.record and super().__eq__(other)
def fetch(self, field_name):
""" Set the value of field ``name`` from the record's value. """
if self.record._fields[field_name].type in ('one2many', 'many2many'):
# x2many fields are serialized as a dict of line snapshots
lines = self.record[field_name]
if 'context' in self.fields_spec[field_name]:
lines = lines.with_context(**self.fields_spec[field_name]['context'])
sub_fields_spec = self.fields_spec[field_name].get('fields') or {}
self[field_name] = {line.id: RecordSnapshot(line, sub_fields_spec) for line in lines}
else:
self[field_name] = self.record[field_name]
def has_changed(self, field_name) -> bool:
""" Return whether a field on the record has changed. """
if field_name not in self:
return True
if self.record._fields[field_name].type not in ('one2many', 'many2many'):
return self[field_name] != self.record[field_name]
return self[field_name].keys() != set(self.record[field_name]._ids) or any(
line_snapshot.has_changed(subname)
for line_snapshot in self[field_name].values()
for subname in self.fields_spec[field_name].get('fields') or {}
)
def diff(self, other: 'RecordSnapshot', force=False):
""" Return the values in ``self`` that differ from ``other``. """
# determine fields to return
simple_fields_spec = {}
x2many_fields_spec = {}
for field_name, field_spec in self.fields_spec.items():
if field_name == 'id':
continue
if not force and other.get(field_name) == self[field_name]:
continue
field = self.record._fields[field_name]
if field.type in ('one2many', 'many2many'):
x2many_fields_spec[field_name] = field_spec
else:
simple_fields_spec[field_name] = field_spec
# use web_read() for simple fields
[result] = self.record.web_read(simple_fields_spec)
# discard the NewId from the dict
result.pop('id')
# for x2many fields: serialize value as commands
for field_name, field_spec in x2many_fields_spec.items():
commands = []
self_value = self[field_name]
other_value = {} if force else other.get(field_name) or {}
if any(other_value):
# other may be a snapshot for a real record, adapt its x2many ids
other_value = {NewId(id_): snap for id_, snap in other_value.items()}
# commands for removed lines
field = self.record._fields[field_name]
remove = Command.delete if field.type == 'one2many' else Command.unlink
for id_ in other_value:
if id_ not in self_value:
commands.append(remove(id_.origin or id_.ref or 0))
# commands for modified or extra lines
for id_, line_snapshot in self_value.items():
if not force and id_ in other_value:
# existing line: check diff and send update
line_diff = line_snapshot.diff(other_value[id_])
if line_diff:
commands.append(Command.update(id_.origin or id_.ref or 0, line_diff))
elif not id_.origin:
# new line: send diff from scratch
line_diff = line_snapshot.diff({})
commands.append((Command.CREATE, id_.origin or id_.ref or 0, line_diff))
else:
# link line: send data to client
base_line = line_snapshot.record._origin
[base_data] = base_line.web_read(field_spec.get('fields') or {})
commands.append((Command.LINK, base_line.id, base_data))
# check diff and send update
base_snapshot = RecordSnapshot(base_line, field_spec.get('fields') or {})
line_diff = line_snapshot.diff(base_snapshot)
if line_diff:
commands.append(Command.update(id_.origin, line_diff))
if commands:
result[field_name] = commands
return result

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
web_app_name = fields.Char('Web App Name', config_parameter='web.web_app_name')

View file

@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from base64 import b64decode
from odoo import models
from odoo.tools.facade import Proxy, ProxyAttr, ProxyFunc
_logger = logging.getLogger(__name__)
try:
import vobject
except ImportError:
_logger.warning("`vobject` Python module not found, vcard file generation disabled. Consider installing this module if you want to generate vcard files")
vobject = None
if vobject is not None:
class VBaseProxy(Proxy):
_wrapped__ = vobject.base.VBase
encoding_param = ProxyAttr()
type_param = ProxyAttr()
value = ProxyAttr(None)
class VCardContentsProxy(Proxy):
_wrapped__ = dict
__delitem__ = ProxyFunc()
__contains__ = ProxyFunc()
get = ProxyFunc(lambda lines: [VBaseProxy(line) for line in lines])
class VComponentProxy(Proxy):
_wrapped__ = vobject.base.Component
add = ProxyFunc(VBaseProxy)
contents = ProxyAttr(VCardContentsProxy)
serialize = ProxyFunc()
class ResPartner(models.Model):
_inherit = 'res.partner'
def _build_vcard(self):
""" Build the partner's vCard.
:returns a vobject.vCard object
"""
if not vobject:
return False
vcard = vobject.vCard()
# Name
n = vcard.add('n')
n.value = vobject.vcard.Name(family=self.name or self.complete_name or '')
if self.title:
n.value.prefix = self.title.name
# Formatted Name
fn = vcard.add('fn')
fn.value = self.name or self.complete_name or ''
# Address
adr = vcard.add('adr')
adr.value = vobject.vcard.Address(street=self.street or '', city=self.city or '', code=self.zip or '')
if self.state_id:
adr.value.region = self.state_id.name
if self.country_id:
adr.value.country = self.country_id.name
# Email
if self.email:
email = vcard.add('email')
email.value = self.email
email.type_param = 'INTERNET'
# Telephone numbers
if self.phone:
tel = vcard.add('tel')
tel.type_param = 'work'
tel.value = self.phone
if self.mobile:
tel = vcard.add('tel')
tel.type_param = 'cell'
tel.value = self.mobile
# URL
if self.website:
url = vcard.add('url')
url.value = self.website
# Organisation
if self.commercial_company_name:
org = vcard.add('org')
org.value = [self.commercial_company_name]
if self.function:
function = vcard.add('title')
function.value = self.function
# Photo
photo = vcard.add('photo')
photo.value = b64decode(self.avatar_512)
photo.encoding_param = 'B'
photo.type_param = 'JPG'
return VComponentProxy(vcard)
def _get_vcard_file(self):
vcard = self._build_vcard()
if vcard:
return vcard.serialize().encode()
return False

View file

@ -0,0 +1,30 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
from odoo.osv import expression
class ResUsers(models.Model):
_inherit = "res.users"
@api.model
def _name_search(self, name, domain=None, operator='ilike', limit=None, order=None):
user_query = super()._name_search(name, domain, operator, limit, order)
if limit is None:
return user_query
user_ids = list(user_query)
if self._uid in user_ids:
if user_ids.index(self._uid) != 0:
user_ids.remove(self._uid)
user_ids.insert(0, self._uid)
elif limit and len(user_ids) == limit:
new_user_ids = super()._name_search(
name,
expression.AND([domain or [], [('id', '=', self._uid)]]),
operator,
limit=1,
)
if new_user_ids:
user_ids.pop()
user_ids.insert(0, self._uid)
return user_ids