mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-20 13:12:02 +02:00
Initial commit: Core packages
This commit is contained in:
commit
12c29a983b
9512 changed files with 8379910 additions and 0 deletions
|
|
@ -0,0 +1,48 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import assetsbundle
|
||||
|
||||
from . import ir_model
|
||||
from . import ir_sequence
|
||||
from . import ir_ui_menu
|
||||
from . import ir_ui_view
|
||||
from . import ir_asset
|
||||
from . import ir_actions
|
||||
from . import ir_actions_report
|
||||
from . import ir_attachment
|
||||
from . import ir_binary
|
||||
from . import ir_cron
|
||||
from . import ir_filters
|
||||
from . import ir_default
|
||||
from . import ir_exports
|
||||
from . import ir_rule
|
||||
from . import ir_config_parameter
|
||||
from . import ir_autovacuum
|
||||
from . import ir_mail_server
|
||||
from . import ir_fields
|
||||
from . import ir_qweb
|
||||
from . import ir_qweb_fields
|
||||
from . import ir_http
|
||||
from . import ir_logging
|
||||
from . import ir_property
|
||||
from . import ir_module
|
||||
from . import ir_demo
|
||||
from . import ir_demo_failure
|
||||
from . import report_layout
|
||||
from . import report_paperformat
|
||||
|
||||
from . import ir_profile
|
||||
from . import image_mixin
|
||||
from . import avatar_mixin
|
||||
|
||||
from . import res_country
|
||||
from . import res_lang
|
||||
from . import res_partner
|
||||
from . import res_bank
|
||||
from . import res_config
|
||||
from . import res_currency
|
||||
from . import res_company
|
||||
from . import res_users
|
||||
from . import res_users_deletion
|
||||
|
||||
from . import decimal_precision
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
1322
odoo-bringout-oca-ocb-base/odoo/addons/base/models/assetsbundle.py
Normal file
1322
odoo-bringout-oca-ocb-base/odoo/addons/base/models/assetsbundle.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,79 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from base64 import b64encode
|
||||
from hashlib import sha512
|
||||
from odoo import models, fields, api
|
||||
from odoo.tools import html_escape, file_open
|
||||
|
||||
|
||||
def get_hsl_from_seed(seed):
|
||||
hashed_seed = sha512(seed.encode()).hexdigest()
|
||||
# full range of colors, in degree
|
||||
hue = int(hashed_seed[0:2], 16) * 360 / 255
|
||||
# colorful result but not too flashy, in percent
|
||||
sat = int(hashed_seed[2:4], 16) * ((70 - 40) / 255) + 40
|
||||
# not too bright and not too dark, in percent
|
||||
lig = 45
|
||||
return f'hsl({hue:.0f}, {sat:.0f}%, {lig:.0f}%)'
|
||||
|
||||
|
||||
class AvatarMixin(models.AbstractModel):
|
||||
_name = 'avatar.mixin'
|
||||
_inherit = ['image.mixin']
|
||||
_description = "Avatar Mixin"
|
||||
_avatar_name_field = "name"
|
||||
|
||||
# all image fields are base64 encoded and PIL-supported
|
||||
avatar_1920 = fields.Image("Avatar", compute="_compute_avatar_1920")
|
||||
avatar_1024 = fields.Image("Avatar 1024", compute="_compute_avatar_1024")
|
||||
avatar_512 = fields.Image("Avatar 512", compute="_compute_avatar_512")
|
||||
avatar_256 = fields.Image("Avatar 256", compute="_compute_avatar_256")
|
||||
avatar_128 = fields.Image("Avatar 128", compute="_compute_avatar_128")
|
||||
|
||||
def _compute_avatar(self, avatar_field, image_field):
|
||||
for record in self:
|
||||
avatar = record[image_field]
|
||||
if not avatar:
|
||||
if record.id and record[record._avatar_name_field]:
|
||||
avatar = record._avatar_generate_svg()
|
||||
else:
|
||||
avatar = b64encode(record._avatar_get_placeholder())
|
||||
record[avatar_field] = avatar
|
||||
|
||||
@api.depends(lambda self: [self._avatar_name_field, 'image_1920'])
|
||||
def _compute_avatar_1920(self):
|
||||
self._compute_avatar('avatar_1920', 'image_1920')
|
||||
|
||||
@api.depends(lambda self: [self._avatar_name_field, 'image_1024'])
|
||||
def _compute_avatar_1024(self):
|
||||
self._compute_avatar('avatar_1024', 'image_1024')
|
||||
|
||||
@api.depends(lambda self: [self._avatar_name_field, 'image_512'])
|
||||
def _compute_avatar_512(self):
|
||||
self._compute_avatar('avatar_512', 'image_512')
|
||||
|
||||
@api.depends(lambda self: [self._avatar_name_field, 'image_256'])
|
||||
def _compute_avatar_256(self):
|
||||
self._compute_avatar('avatar_256', 'image_256')
|
||||
|
||||
@api.depends(lambda self: [self._avatar_name_field, 'image_128'])
|
||||
def _compute_avatar_128(self):
|
||||
self._compute_avatar('avatar_128', 'image_128')
|
||||
|
||||
def _avatar_generate_svg(self):
|
||||
initial = html_escape(self[self._avatar_name_field][0].upper())
|
||||
bgcolor = get_hsl_from_seed(self[self._avatar_name_field] + str(self.create_date.timestamp() if self.create_date else ""))
|
||||
return b64encode((
|
||||
"<?xml version='1.0' encoding='UTF-8' ?>"
|
||||
"<svg height='180' width='180' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink'>"
|
||||
f"<rect fill='{bgcolor}' height='180' width='180'/>"
|
||||
f"<text fill='#ffffff' font-size='96' text-anchor='middle' x='90' y='125' font-family='sans-serif'>{initial}</text>"
|
||||
"</svg>"
|
||||
).encode())
|
||||
|
||||
def _avatar_get_placeholder_path(self):
|
||||
return "base/static/img/avatar_grey.png"
|
||||
|
||||
def _avatar_get_placeholder(self):
|
||||
return file_open(self._avatar_get_placeholder_path(), 'rb').read()
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
import odoo.addons
|
||||
|
||||
import logging
|
||||
import sys
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_precision(application):
|
||||
_logger.warning("Deprecated call to decimal_precision.get_precision(<application>), use digits=<application> instead")
|
||||
return application
|
||||
|
||||
|
||||
class DecimalPrecision(models.Model):
|
||||
_name = 'decimal.precision'
|
||||
_description = 'Decimal Precision'
|
||||
|
||||
name = fields.Char('Usage', required=True)
|
||||
digits = fields.Integer('Digits', required=True, default=2)
|
||||
|
||||
_sql_constraints = [
|
||||
('name_uniq', 'unique (name)', """Only one value can be defined for each given usage!"""),
|
||||
]
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('application')
|
||||
def precision_get(self, application):
|
||||
self.flush_model(['name', 'digits'])
|
||||
self.env.cr.execute('select digits from decimal_precision where name=%s', (application,))
|
||||
res = self.env.cr.fetchone()
|
||||
return res[0] if res else 2
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(DecimalPrecision, self).create(vals_list)
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def write(self, data):
|
||||
res = super(DecimalPrecision, self).write(data)
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
res = super(DecimalPrecision, self).unlink()
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
@api.onchange('digits')
|
||||
def _onchange_digits_warning(self):
|
||||
if self.digits < self._origin.digits:
|
||||
return {
|
||||
'warning': {
|
||||
'title': _("Warning for %s", self.name),
|
||||
'message': _(
|
||||
"The precision has been reduced for %s.\n"
|
||||
"Note that existing data WON'T be updated by this change.\n\n"
|
||||
"As decimal precisions impact the whole system, this may cause critical issues.\n"
|
||||
"E.g. reducing the precision could disturb your financial balance.\n\n"
|
||||
"Therefore, changing decimal precisions in a running database is not recommended.",
|
||||
self.name,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
# compatibility for decimal_precision.get_precision(): expose the module in addons namespace
|
||||
dp = sys.modules['odoo.addons.base.models.decimal_precision']
|
||||
odoo.addons.decimal_precision = dp
|
||||
sys.modules['odoo.addons.decimal_precision'] = dp
|
||||
sys.modules['openerp.addons.decimal_precision'] = dp
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models, fields
|
||||
|
||||
|
||||
class ImageMixin(models.AbstractModel):
|
||||
_name = 'image.mixin'
|
||||
_description = "Image Mixin"
|
||||
|
||||
# all image fields are base64 encoded and PIL-supported
|
||||
|
||||
image_1920 = fields.Image("Image", max_width=1920, max_height=1920)
|
||||
|
||||
# resized fields stored (as attachment) for performance
|
||||
image_1024 = fields.Image("Image 1024", related="image_1920", max_width=1024, max_height=1024, store=True)
|
||||
image_512 = fields.Image("Image 512", related="image_1920", max_width=512, max_height=512, store=True)
|
||||
image_256 = fields.Image("Image 256", related="image_1920", max_width=256, max_height=256, store=True)
|
||||
image_128 = fields.Image("Image 128", related="image_1920", max_width=128, max_height=128, store=True)
|
||||
898
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_actions.py
Normal file
898
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_actions.py
Normal file
|
|
@ -0,0 +1,898 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import odoo
|
||||
from odoo import api, fields, models, tools, _, Command
|
||||
from odoo.exceptions import MissingError, ValidationError, AccessError
|
||||
from odoo.tools import frozendict
|
||||
from odoo.tools.safe_eval import safe_eval, test_python_expr
|
||||
from odoo.tools.float_utils import float_compare
|
||||
from odoo.http import request
|
||||
|
||||
import base64
|
||||
from collections import defaultdict
|
||||
import functools
|
||||
import logging
|
||||
import contextlib
|
||||
|
||||
from pytz import timezone
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrActions(models.Model):
|
||||
_name = 'ir.actions.actions'
|
||||
_description = 'Actions'
|
||||
_table = 'ir_actions'
|
||||
_order = 'name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
name = fields.Char(string='Action Name', required=True, translate=True)
|
||||
type = fields.Char(string='Action Type', required=True)
|
||||
xml_id = fields.Char(compute='_compute_xml_id', string="External ID")
|
||||
help = fields.Html(string='Action Description',
|
||||
help='Optional help text for the users with a description of the target view, such as its usage and purpose.',
|
||||
translate=True)
|
||||
binding_model_id = fields.Many2one('ir.model', ondelete='cascade',
|
||||
help="Setting a value makes this action available in the sidebar for the given model.")
|
||||
binding_type = fields.Selection([('action', 'Action'),
|
||||
('report', 'Report')],
|
||||
required=True, default='action')
|
||||
binding_view_types = fields.Char(default='list,form')
|
||||
|
||||
def _compute_xml_id(self):
|
||||
res = self.get_external_id()
|
||||
for record in self:
|
||||
record.xml_id = res.get(record.id)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(IrActions, self).create(vals_list)
|
||||
# self.get_bindings() depends on action records
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
res = super(IrActions, self).write(vals)
|
||||
# self.get_bindings() depends on action records
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
"""unlink ir.action.todo which are related to actions which will be deleted.
|
||||
NOTE: ondelete cascade will not work on ir.actions.actions so we will need to do it manually."""
|
||||
todos = self.env['ir.actions.todo'].search([('action_id', 'in', self.ids)])
|
||||
todos.unlink()
|
||||
res = super(IrActions, self).unlink()
|
||||
# self.get_bindings() depends on action records
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
@api.ondelete(at_uninstall=True)
|
||||
def _unlink_check_home_action(self):
|
||||
self.env['res.users'].with_context(active_test=False).search([('action_id', 'in', self.ids)]).sudo().write({'action_id': None})
|
||||
|
||||
@api.model
|
||||
def _get_eval_context(self, action=None):
|
||||
""" evaluation context to pass to safe_eval """
|
||||
return {
|
||||
'uid': self._uid,
|
||||
'user': self.env.user,
|
||||
'time': tools.safe_eval.time,
|
||||
'datetime': tools.safe_eval.datetime,
|
||||
'dateutil': tools.safe_eval.dateutil,
|
||||
'timezone': timezone,
|
||||
'float_compare': float_compare,
|
||||
'b64encode': base64.b64encode,
|
||||
'b64decode': base64.b64decode,
|
||||
'Command': Command,
|
||||
}
|
||||
|
||||
@api.model
|
||||
def get_bindings(self, model_name):
|
||||
""" Retrieve the list of actions bound to the given model.
|
||||
|
||||
:return: a dict mapping binding types to a list of dict describing
|
||||
actions, where the latter is given by calling the method
|
||||
``read`` on the action record.
|
||||
"""
|
||||
result = {}
|
||||
for action_type, all_actions in self._get_bindings(model_name).items():
|
||||
actions = []
|
||||
for action in all_actions:
|
||||
action = dict(action)
|
||||
groups = action.pop('groups_id', None)
|
||||
if groups and not self.user_has_groups(groups):
|
||||
# the user may not perform this action
|
||||
continue
|
||||
res_model = action.pop('res_model', None)
|
||||
if res_model and not self.env['ir.model.access'].check(
|
||||
res_model,
|
||||
mode='read',
|
||||
raise_exception=False
|
||||
):
|
||||
# the user won't be able to read records
|
||||
continue
|
||||
actions.append(action)
|
||||
if actions:
|
||||
result[action_type] = actions
|
||||
return result
|
||||
|
||||
@tools.ormcache('model_name', 'self.env.lang')
|
||||
def _get_bindings(self, model_name):
|
||||
cr = self.env.cr
|
||||
|
||||
# discard unauthorized actions, and read action definitions
|
||||
result = defaultdict(list)
|
||||
|
||||
self.env.flush_all()
|
||||
cr.execute("""
|
||||
SELECT a.id, a.type, a.binding_type
|
||||
FROM ir_actions a
|
||||
JOIN ir_model m ON a.binding_model_id = m.id
|
||||
WHERE m.model = %s
|
||||
ORDER BY a.id
|
||||
""", [model_name])
|
||||
for action_id, action_model, binding_type in cr.fetchall():
|
||||
try:
|
||||
action = self.env[action_model].sudo().browse(action_id)
|
||||
fields = ['name', 'binding_view_types']
|
||||
for field in ('groups_id', 'res_model', 'sequence'):
|
||||
if field in action._fields:
|
||||
fields.append(field)
|
||||
action = action.read(fields)[0]
|
||||
if action.get('groups_id'):
|
||||
groups = self.env['res.groups'].browse(action['groups_id'])
|
||||
action['groups_id'] = ','.join(ext_id for ext_id in groups._ensure_xml_id().values())
|
||||
result[binding_type].append(frozendict(action))
|
||||
except (MissingError):
|
||||
continue
|
||||
|
||||
# sort actions by their sequence if sequence available
|
||||
if result.get('action'):
|
||||
result['action'] = tuple(sorted(result['action'], key=lambda vals: vals.get('sequence', 0)))
|
||||
return frozendict(result)
|
||||
|
||||
@api.model
|
||||
def _for_xml_id(self, full_xml_id):
|
||||
""" Returns the action content for the provided xml_id
|
||||
|
||||
:param xml_id: the namespace-less id of the action (the @id
|
||||
attribute from the XML file)
|
||||
:return: A read() view of the ir.actions.action safe for web use
|
||||
"""
|
||||
record = self.env.ref(full_xml_id)
|
||||
assert isinstance(self.env[record._name], self.env.registry[self._name])
|
||||
return record._get_action_dict()
|
||||
|
||||
def _get_action_dict(self):
|
||||
""" Returns the action content for the provided action record.
|
||||
"""
|
||||
self.ensure_one()
|
||||
readable_fields = self._get_readable_fields()
|
||||
return {
|
||||
field: value
|
||||
for field, value in self.sudo().read()[0].items()
|
||||
if field in readable_fields
|
||||
}
|
||||
|
||||
def _get_readable_fields(self):
|
||||
""" return the list of fields that are safe to read
|
||||
|
||||
Fetched via /web/action/load or _for_xml_id method
|
||||
Only fields used by the web client should included
|
||||
Accessing content useful for the server-side must
|
||||
be done manually with superuser
|
||||
"""
|
||||
return {
|
||||
"binding_model_id", "binding_type", "binding_view_types",
|
||||
"display_name", "help", "id", "name", "type", "xml_id",
|
||||
}
|
||||
|
||||
|
||||
class IrActionsActWindow(models.Model):
|
||||
_name = 'ir.actions.act_window'
|
||||
_description = 'Action Window'
|
||||
_table = 'ir_act_window'
|
||||
_inherit = 'ir.actions.actions'
|
||||
_order = 'name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
@api.constrains('res_model', 'binding_model_id')
|
||||
def _check_model(self):
|
||||
for action in self:
|
||||
if action.res_model not in self.env:
|
||||
raise ValidationError(_('Invalid model name %r in action definition.', action.res_model))
|
||||
if action.binding_model_id and action.binding_model_id.model not in self.env:
|
||||
raise ValidationError(_('Invalid model name %r in action definition.', action.binding_model_id.model))
|
||||
|
||||
@api.depends('view_ids.view_mode', 'view_mode', 'view_id.type')
|
||||
def _compute_views(self):
|
||||
""" Compute an ordered list of the specific view modes that should be
|
||||
enabled when displaying the result of this action, along with the
|
||||
ID of the specific view to use for each mode, if any were required.
|
||||
|
||||
This function hides the logic of determining the precedence between
|
||||
the view_modes string, the view_ids o2m, and the view_id m2o that
|
||||
can be set on the action.
|
||||
"""
|
||||
for act in self:
|
||||
act.views = [(view.view_id.id, view.view_mode) for view in act.view_ids]
|
||||
got_modes = [view.view_mode for view in act.view_ids]
|
||||
all_modes = act.view_mode.split(',')
|
||||
missing_modes = [mode for mode in all_modes if mode not in got_modes]
|
||||
if missing_modes:
|
||||
if act.view_id.type in missing_modes:
|
||||
# reorder missing modes to put view_id first if present
|
||||
missing_modes.remove(act.view_id.type)
|
||||
act.views.append((act.view_id.id, act.view_id.type))
|
||||
act.views.extend([(False, mode) for mode in missing_modes])
|
||||
|
||||
@api.constrains('view_mode')
|
||||
def _check_view_mode(self):
|
||||
for rec in self:
|
||||
modes = rec.view_mode.split(',')
|
||||
if len(modes) != len(set(modes)):
|
||||
raise ValidationError(_('The modes in view_mode must not be duplicated: %s', modes))
|
||||
if ' ' in modes:
|
||||
raise ValidationError(_('No spaces allowed in view_mode: %r', modes))
|
||||
|
||||
@api.depends('res_model', 'search_view_id')
|
||||
def _compute_search_view(self):
|
||||
for act in self:
|
||||
fvg = self.env[act.res_model].get_view(act.search_view_id.id, 'search')
|
||||
act.search_view = str(fvg)
|
||||
|
||||
type = fields.Char(default="ir.actions.act_window")
|
||||
view_id = fields.Many2one('ir.ui.view', string='View Ref.', ondelete='set null')
|
||||
domain = fields.Char(string='Domain Value',
|
||||
help="Optional domain filtering of the destination data, as a Python expression")
|
||||
context = fields.Char(string='Context Value', default={}, required=True,
|
||||
help="Context dictionary as Python expression, empty by default (Default: {})")
|
||||
res_id = fields.Integer(string='Record ID', help="Database ID of record to open in form view, when ``view_mode`` is set to 'form' only")
|
||||
res_model = fields.Char(string='Destination Model', required=True,
|
||||
help="Model name of the object to open in the view window")
|
||||
target = fields.Selection([('current', 'Current Window'), ('new', 'New Window'), ('inline', 'Inline Edit'), ('fullscreen', 'Full Screen'), ('main', 'Main action of Current Window')], default="current", string='Target Window')
|
||||
view_mode = fields.Char(required=True, default='tree,form',
|
||||
help="Comma-separated list of allowed view modes, such as 'form', 'tree', 'calendar', etc. (Default: tree,form)")
|
||||
usage = fields.Char(string='Action Usage',
|
||||
help="Used to filter menu and home actions from the user form.")
|
||||
view_ids = fields.One2many('ir.actions.act_window.view', 'act_window_id', string='No of Views')
|
||||
views = fields.Binary(compute='_compute_views',
|
||||
help="This function field computes the ordered list of views that should be enabled " \
|
||||
"when displaying the result of an action, federating view mode, views and " \
|
||||
"reference view. The result is returned as an ordered list of pairs (view_id,view_mode).")
|
||||
limit = fields.Integer(default=80, help='Default limit for the list view')
|
||||
groups_id = fields.Many2many('res.groups', 'ir_act_window_group_rel',
|
||||
'act_id', 'gid', string='Groups')
|
||||
search_view_id = fields.Many2one('ir.ui.view', string='Search View Ref.')
|
||||
filter = fields.Boolean()
|
||||
search_view = fields.Text(compute='_compute_search_view')
|
||||
|
||||
def read(self, fields=None, load='_classic_read'):
|
||||
""" call the method get_empty_list_help of the model and set the window action help message
|
||||
"""
|
||||
result = super(IrActionsActWindow, self).read(fields, load=load)
|
||||
if not fields or 'help' in fields:
|
||||
for values in result:
|
||||
model = values.get('res_model')
|
||||
if model in self.env:
|
||||
eval_ctx = dict(self.env.context)
|
||||
try:
|
||||
ctx = safe_eval(values.get('context', '{}'), eval_ctx)
|
||||
except:
|
||||
ctx = {}
|
||||
values['help'] = self.with_context(**ctx).env[model].get_empty_list_help(values.get('help', ''))
|
||||
return result
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
for vals in vals_list:
|
||||
if not vals.get('name') and vals.get('res_model'):
|
||||
vals['name'] = self.env[vals['res_model']]._description
|
||||
return super(IrActionsActWindow, self).create(vals_list)
|
||||
|
||||
def unlink(self):
|
||||
self.clear_caches()
|
||||
return super(IrActionsActWindow, self).unlink()
|
||||
|
||||
def exists(self):
|
||||
ids = self._existing()
|
||||
existing = self.filtered(lambda rec: rec.id in ids)
|
||||
return existing
|
||||
|
||||
@api.model
|
||||
@tools.ormcache()
|
||||
def _existing(self):
|
||||
self._cr.execute("SELECT id FROM %s" % self._table)
|
||||
return set(row[0] for row in self._cr.fetchall())
|
||||
|
||||
|
||||
def _get_readable_fields(self):
|
||||
return super()._get_readable_fields() | {
|
||||
"context", "domain", "filter", "groups_id", "limit", "res_id",
|
||||
"res_model", "search_view", "search_view_id", "target", "view_id",
|
||||
"view_mode", "views",
|
||||
# `flags` is not a real field of ir.actions.act_window but is used
|
||||
# to give the parameters to generate the action
|
||||
"flags"
|
||||
}
|
||||
|
||||
|
||||
VIEW_TYPES = [
|
||||
('tree', 'Tree'),
|
||||
('form', 'Form'),
|
||||
('graph', 'Graph'),
|
||||
('pivot', 'Pivot'),
|
||||
('calendar', 'Calendar'),
|
||||
('gantt', 'Gantt'),
|
||||
('kanban', 'Kanban'),
|
||||
]
|
||||
|
||||
|
||||
class IrActionsActWindowView(models.Model):
|
||||
_name = 'ir.actions.act_window.view'
|
||||
_description = 'Action Window View'
|
||||
_table = 'ir_act_window_view'
|
||||
_rec_name = 'view_id'
|
||||
_order = 'sequence,id'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
sequence = fields.Integer()
|
||||
view_id = fields.Many2one('ir.ui.view', string='View')
|
||||
view_mode = fields.Selection(VIEW_TYPES, string='View Type', required=True)
|
||||
act_window_id = fields.Many2one('ir.actions.act_window', string='Action', ondelete='cascade')
|
||||
multi = fields.Boolean(string='On Multiple Doc.', help="If set to true, the action will not be displayed on the right toolbar of a form view.")
|
||||
|
||||
def _auto_init(self):
|
||||
res = super(IrActionsActWindowView, self)._auto_init()
|
||||
tools.create_unique_index(self._cr, 'act_window_view_unique_mode_per_action',
|
||||
self._table, ['act_window_id', 'view_mode'])
|
||||
return res
|
||||
|
||||
|
||||
class IrActionsActWindowclose(models.Model):
|
||||
_name = 'ir.actions.act_window_close'
|
||||
_description = 'Action Window Close'
|
||||
_inherit = 'ir.actions.actions'
|
||||
_table = 'ir_actions'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
type = fields.Char(default='ir.actions.act_window_close')
|
||||
|
||||
def _get_readable_fields(self):
|
||||
return super()._get_readable_fields() | {
|
||||
# 'effect' and 'infos' are not real fields of `ir.actions.act_window_close` but they are
|
||||
# used to display the rainbowman ('effect') and waited by the action_service ('infos').
|
||||
"effect", "infos"
|
||||
}
|
||||
|
||||
|
||||
class IrActionsActUrl(models.Model):
|
||||
_name = 'ir.actions.act_url'
|
||||
_description = 'Action URL'
|
||||
_table = 'ir_act_url'
|
||||
_inherit = 'ir.actions.actions'
|
||||
_order = 'name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
type = fields.Char(default='ir.actions.act_url')
|
||||
url = fields.Text(string='Action URL', required=True)
|
||||
target = fields.Selection([('new', 'New Window'), ('self', 'This Window')],
|
||||
string='Action Target', default='new', required=True)
|
||||
|
||||
def _get_readable_fields(self):
|
||||
return super()._get_readable_fields() | {
|
||||
"target", "url",
|
||||
}
|
||||
|
||||
|
||||
class IrActionsServer(models.Model):
|
||||
""" Server actions model. Server action work on a base model and offer various
|
||||
type of actions that can be executed automatically, for example using base
|
||||
action rules, of manually, by adding the action in the 'More' contextual
|
||||
menu.
|
||||
|
||||
Since Odoo 8.0 a button 'Create Menu Action' button is available on the
|
||||
action form view. It creates an entry in the More menu of the base model.
|
||||
This allows to create server actions and run them in mass mode easily through
|
||||
the interface.
|
||||
|
||||
The available actions are :
|
||||
|
||||
- 'Execute Python Code': a block of python code that will be executed
|
||||
- 'Create a new Record': create a new record with new values
|
||||
- 'Write on a Record': update the values of a record
|
||||
- 'Execute several actions': define an action that triggers several other
|
||||
server actions
|
||||
"""
|
||||
_name = 'ir.actions.server'
|
||||
_description = 'Server Actions'
|
||||
_table = 'ir_act_server'
|
||||
_inherit = 'ir.actions.actions'
|
||||
_order = 'sequence,name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
DEFAULT_PYTHON_CODE = """# Available variables:
|
||||
# - env: Odoo Environment on which the action is triggered
|
||||
# - model: Odoo Model of the record on which the action is triggered; is a void recordset
|
||||
# - record: record on which the action is triggered; may be void
|
||||
# - records: recordset of all records on which the action is triggered in multi-mode; may be void
|
||||
# - time, datetime, dateutil, timezone: useful Python libraries
|
||||
# - float_compare: Odoo function to compare floats based on specific precisions
|
||||
# - log: log(message, level='info'): logging function to record debug information in ir.logging table
|
||||
# - UserError: Warning Exception to use with raise
|
||||
# - Command: x2Many commands namespace
|
||||
# To return an action, assign: action = {...}\n\n\n\n"""
|
||||
|
||||
type = fields.Char(default='ir.actions.server')
|
||||
usage = fields.Selection([
|
||||
('ir_actions_server', 'Server Action'),
|
||||
('ir_cron', 'Scheduled Action')], string='Usage',
|
||||
default='ir_actions_server', required=True)
|
||||
state = fields.Selection([
|
||||
('code', 'Execute Python Code'),
|
||||
('object_create', 'Create a new Record'),
|
||||
('object_write', 'Update the Record'),
|
||||
('multi', 'Execute several actions')], string='Action To Do',
|
||||
default='object_write', required=True, copy=True,
|
||||
help="Type of server action. The following values are available:\n"
|
||||
"- 'Execute Python Code': a block of python code that will be executed\n"
|
||||
"- 'Create a new Record': create a new record with new values\n"
|
||||
"- 'Update a Record': update the values of a record\n"
|
||||
"- 'Execute several actions': define an action that triggers several other server actions\n"
|
||||
"- 'Send Email': post a message, a note or send an email (Discuss)\n"
|
||||
"- 'Add Followers': add followers to a record (Discuss)\n"
|
||||
"- 'Create Next Activity': create an activity (Discuss)\n"
|
||||
"- 'Send SMS Text Message': send SMS, log them on documents (SMS)")
|
||||
# Generic
|
||||
sequence = fields.Integer(default=5,
|
||||
help="When dealing with multiple actions, the execution order is "
|
||||
"based on the sequence. Low number means high priority.")
|
||||
model_id = fields.Many2one('ir.model', string='Model', required=True, ondelete='cascade', index=True,
|
||||
help="Model on which the server action runs.")
|
||||
model_name = fields.Char(related='model_id.model', string='Model Name', readonly=True, store=True)
|
||||
# Python code
|
||||
code = fields.Text(string='Python Code', groups='base.group_system',
|
||||
default=DEFAULT_PYTHON_CODE,
|
||||
help="Write Python code that the action will execute. Some variables are "
|
||||
"available for use; help about python expression is given in the help tab.")
|
||||
# Multi
|
||||
child_ids = fields.Many2many('ir.actions.server', 'rel_server_actions', 'server_id', 'action_id',
|
||||
string='Child Actions', help='Child server actions that will be executed. Note that the last return returned action value will be used as global return value.')
|
||||
# Create
|
||||
crud_model_id = fields.Many2one(
|
||||
'ir.model', string='Target Model',
|
||||
compute='_compute_crud_model_id', readonly=False, store=True,
|
||||
help="Model for record creation / update. Set this field only to specify a different model than the base model.")
|
||||
crud_model_name = fields.Char(related='crud_model_id.model', string='Target Model Name', readonly=True)
|
||||
link_field_id = fields.Many2one(
|
||||
'ir.model.fields', string='Link Field',
|
||||
compute='_compute_link_field_id', readonly=False, store=True,
|
||||
help="Provide the field used to link the newly created record on the record used by the server action.")
|
||||
fields_lines = fields.One2many('ir.server.object.lines', 'server_id', string='Value Mapping', copy=True)
|
||||
groups_id = fields.Many2many('res.groups', 'ir_act_server_group_rel',
|
||||
'act_id', 'gid', string='Groups')
|
||||
|
||||
@api.onchange('model_id')
|
||||
def _compute_crud_model_id(self):
|
||||
invalid = self.filtered(lambda act: act.crud_model_id != act.model_id)
|
||||
if invalid:
|
||||
invalid.crud_model_id = False
|
||||
|
||||
@api.depends('model_id')
|
||||
def _compute_link_field_id(self):
|
||||
invalid = self.filtered(lambda act: act.link_field_id.model_id != act.model_id)
|
||||
if invalid:
|
||||
invalid.link_field_id = False
|
||||
|
||||
@api.constrains('code')
|
||||
def _check_python_code(self):
|
||||
for action in self.sudo().filtered('code'):
|
||||
msg = test_python_expr(expr=action.code.strip(), mode="exec")
|
||||
if msg:
|
||||
raise ValidationError(msg)
|
||||
|
||||
@api.constrains('child_ids')
|
||||
def _check_recursion(self):
|
||||
if not self._check_m2m_recursion('child_ids'):
|
||||
raise ValidationError(_('Recursion found in child server actions'))
|
||||
|
||||
def _get_readable_fields(self):
|
||||
return super()._get_readable_fields() | {
|
||||
"groups_id", "model_name",
|
||||
}
|
||||
|
||||
def _get_runner(self):
|
||||
multi = True
|
||||
t = self.env.registry[self._name]
|
||||
fn = getattr(t, f'_run_action_{self.state}_multi', None)\
|
||||
or getattr(t, f'run_action_{self.state}_multi', None)
|
||||
if not fn:
|
||||
multi = False
|
||||
fn = getattr(t, f'_run_action_{self.state}', None)\
|
||||
or getattr(t, f'run_action_{self.state}', None)
|
||||
if fn and fn.__name__.startswith('run_action_'):
|
||||
fn = functools.partial(fn, self)
|
||||
return fn, multi
|
||||
|
||||
def _register_hook(self):
|
||||
super()._register_hook()
|
||||
|
||||
for cls in self.env.registry[self._name].mro():
|
||||
for symbol in vars(cls).keys():
|
||||
if symbol.startswith('run_action_'):
|
||||
_logger.warning(
|
||||
"RPC-public action methods are deprecated, found %r (in class %s.%s)",
|
||||
symbol, cls.__module__, cls.__name__
|
||||
)
|
||||
|
||||
def create_action(self):
|
||||
""" Create a contextual action for each server action. """
|
||||
for action in self:
|
||||
action.write({'binding_model_id': action.model_id.id,
|
||||
'binding_type': 'action'})
|
||||
return True
|
||||
|
||||
def unlink_action(self):
|
||||
""" Remove the contextual actions created for the server actions. """
|
||||
self.check_access_rights('write', raise_exception=True)
|
||||
self.filtered('binding_model_id').write({'binding_model_id': False})
|
||||
return True
|
||||
|
||||
def _run_action_code_multi(self, eval_context):
|
||||
safe_eval(self.code.strip(), eval_context, mode="exec", nocopy=True, filename=str(self)) # nocopy allows to return 'action'
|
||||
return eval_context.get('action')
|
||||
|
||||
def _run_action_multi(self, eval_context=None):
|
||||
res = False
|
||||
for act in self.child_ids.sorted():
|
||||
res = act.run() or res
|
||||
return res
|
||||
|
||||
def _run_action_object_write(self, eval_context=None):
|
||||
"""Apply specified write changes to active_id."""
|
||||
vals = self.fields_lines.eval_value(eval_context=eval_context)
|
||||
res = {line.col1.name: vals[line.id] for line in self.fields_lines}
|
||||
|
||||
if self._context.get('onchange_self'):
|
||||
record_cached = self._context['onchange_self']
|
||||
for field, new_value in res.items():
|
||||
record_cached[field] = new_value
|
||||
else:
|
||||
self.env[self.model_id.model].browse(self._context.get('active_id')).write(res)
|
||||
|
||||
def _run_action_object_create(self, eval_context=None):
|
||||
"""Create specified model object with specified values.
|
||||
|
||||
If applicable, link active_id.<self.link_field_id> to the new record.
|
||||
"""
|
||||
vals = self.fields_lines.eval_value(eval_context=eval_context)
|
||||
res = {line.col1.name: vals[line.id] for line in self.fields_lines}
|
||||
|
||||
res = self.env[self.crud_model_id.model].create(res)
|
||||
|
||||
if self.link_field_id:
|
||||
record = self.env[self.model_id.model].browse(self._context.get('active_id'))
|
||||
if self.link_field_id.ttype in ['one2many', 'many2many']:
|
||||
record.write({self.link_field_id.name: [Command.link(res.id)]})
|
||||
else:
|
||||
record.write({self.link_field_id.name: res.id})
|
||||
|
||||
def _get_eval_context(self, action=None):
|
||||
""" Prepare the context used when evaluating python code, like the
|
||||
python formulas or code server actions.
|
||||
|
||||
:param action: the current server action
|
||||
:type action: browse record
|
||||
:returns: dict -- evaluation context given to (safe_)safe_eval """
|
||||
def log(message, level="info"):
|
||||
with self.pool.cursor() as cr:
|
||||
cr.execute("""
|
||||
INSERT INTO ir_logging(create_date, create_uid, type, dbname, name, level, message, path, line, func)
|
||||
VALUES (NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
""", (self.env.uid, 'server', self._cr.dbname, __name__, level, message, "action", action.id, action.name))
|
||||
|
||||
eval_context = super(IrActionsServer, self)._get_eval_context(action=action)
|
||||
model_name = action.model_id.sudo().model
|
||||
model = self.env[model_name]
|
||||
record = None
|
||||
records = None
|
||||
if self._context.get('active_model') == model_name and self._context.get('active_id'):
|
||||
record = model.browse(self._context['active_id'])
|
||||
if self._context.get('active_model') == model_name and self._context.get('active_ids'):
|
||||
records = model.browse(self._context['active_ids'])
|
||||
if self._context.get('onchange_self'):
|
||||
record = self._context['onchange_self']
|
||||
eval_context.update({
|
||||
# orm
|
||||
'env': self.env,
|
||||
'model': model,
|
||||
# Exceptions
|
||||
'Warning': odoo.exceptions.Warning,
|
||||
'UserError': odoo.exceptions.UserError,
|
||||
# record
|
||||
'record': record,
|
||||
'records': records,
|
||||
# helpers
|
||||
'log': log,
|
||||
})
|
||||
return eval_context
|
||||
|
||||
def run(self):
|
||||
""" Runs the server action. For each server action, the
|
||||
:samp:`_run_action_{TYPE}[_multi]` method is called. This allows easy
|
||||
overriding of the server actions.
|
||||
|
||||
The ``_multi`` suffix means the runner can operate on multiple records,
|
||||
otherwise if there are multiple records the runner will be called once
|
||||
for each.
|
||||
|
||||
The call context should contain the following keys:
|
||||
|
||||
active_id
|
||||
id of the current object (single mode)
|
||||
active_model
|
||||
current model that should equal the action's model
|
||||
active_ids (optional)
|
||||
ids of the current records (mass mode). If ``active_ids`` and
|
||||
``active_id`` are present, ``active_ids`` is given precedence.
|
||||
:return: an ``action_id`` to be executed, or ``False`` is finished
|
||||
correctly without return action
|
||||
"""
|
||||
res = False
|
||||
for action in self.sudo():
|
||||
action_groups = action.groups_id
|
||||
if action_groups:
|
||||
if not (action_groups & self.env.user.groups_id):
|
||||
raise AccessError(_("You don't have enough access rights to run this action."))
|
||||
else:
|
||||
try:
|
||||
self.env[action.model_name].check_access_rights("write")
|
||||
except AccessError:
|
||||
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
|
||||
action.name, self.env.user.login, action.model_name,
|
||||
)
|
||||
raise
|
||||
|
||||
eval_context = self._get_eval_context(action)
|
||||
records = eval_context.get('record') or eval_context['model']
|
||||
records |= eval_context.get('records') or eval_context['model']
|
||||
if records:
|
||||
try:
|
||||
records.check_access_rule('write')
|
||||
except AccessError:
|
||||
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
|
||||
action.name, self.env.user.login, records,
|
||||
)
|
||||
raise
|
||||
|
||||
runner, multi = action._get_runner()
|
||||
if runner and multi:
|
||||
# call the multi method
|
||||
run_self = action.with_context(eval_context['env'].context)
|
||||
res = runner(run_self, eval_context=eval_context)
|
||||
elif runner:
|
||||
active_id = self._context.get('active_id')
|
||||
if not active_id and self._context.get('onchange_self'):
|
||||
active_id = self._context['onchange_self']._origin.id
|
||||
if not active_id: # onchange on new record
|
||||
res = runner(action, eval_context=eval_context)
|
||||
active_ids = self._context.get('active_ids', [active_id] if active_id else [])
|
||||
for active_id in active_ids:
|
||||
# run context dedicated to a particular active_id
|
||||
run_self = action.with_context(active_ids=[active_id], active_id=active_id)
|
||||
eval_context["env"].context = run_self._context
|
||||
eval_context['records'] = eval_context['record'] = records.browse(active_id)
|
||||
res = runner(run_self, eval_context=eval_context)
|
||||
else:
|
||||
_logger.warning(
|
||||
"Found no way to execute server action %r of type %r, ignoring it. "
|
||||
"Verify that the type is correct or add a method called "
|
||||
"`_run_action_<type>` or `_run_action_<type>_multi`.",
|
||||
action.name, action.state
|
||||
)
|
||||
return res or False
|
||||
|
||||
|
||||
class IrServerObjectLines(models.Model):
|
||||
_name = 'ir.server.object.lines'
|
||||
_description = 'Server Action value mapping'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
server_id = fields.Many2one('ir.actions.server', string='Related Server Action', ondelete='cascade')
|
||||
col1 = fields.Many2one('ir.model.fields', string='Field', required=True, ondelete='cascade')
|
||||
value = fields.Text(required=True, help="Expression containing a value specification. \n"
|
||||
"When Formula type is selected, this field may be a Python expression "
|
||||
" that can use the same values as for the code field on the server action.\n"
|
||||
"If Value type is selected, the value will be used directly without evaluation.")
|
||||
evaluation_type = fields.Selection([
|
||||
('value', 'Value'),
|
||||
('reference', 'Reference'),
|
||||
('equation', 'Python expression')
|
||||
], 'Evaluation Type', default='value', required=True, change_default=True)
|
||||
resource_ref = fields.Reference(
|
||||
string='Record', selection='_selection_target_model',
|
||||
compute='_compute_resource_ref', inverse='_set_resource_ref')
|
||||
|
||||
@api.model
|
||||
def _selection_target_model(self):
|
||||
return [(model.model, model.name) for model in self.env['ir.model'].sudo().search([])]
|
||||
|
||||
@api.depends('col1.relation', 'value', 'evaluation_type')
|
||||
def _compute_resource_ref(self):
|
||||
for line in self:
|
||||
if line.evaluation_type in ['reference', 'value'] and line.col1 and line.col1.relation:
|
||||
value = line.value or ''
|
||||
try:
|
||||
value = int(value)
|
||||
if not self.env[line.col1.relation].browse(value).exists():
|
||||
record = list(self.env[line.col1.relation]._search([], limit=1))
|
||||
value = record[0] if record else 0
|
||||
except ValueError:
|
||||
record = list(self.env[line.col1.relation]._search([], limit=1))
|
||||
value = record[0] if record else 0
|
||||
line.resource_ref = '%s,%s' % (line.col1.relation, value)
|
||||
else:
|
||||
line.resource_ref = False
|
||||
|
||||
@api.constrains('col1', 'evaluation_type')
|
||||
def _raise_many2many_error(self):
|
||||
pass # TODO: remove in master
|
||||
|
||||
@api.onchange('resource_ref')
|
||||
def _set_resource_ref(self):
|
||||
for line in self.filtered(lambda line: line.evaluation_type == 'reference'):
|
||||
if line.resource_ref:
|
||||
line.value = str(line.resource_ref.id)
|
||||
|
||||
def eval_value(self, eval_context=None):
|
||||
result = {}
|
||||
m2m_exprs = defaultdict(list)
|
||||
for line in self:
|
||||
expr = line.value
|
||||
if line.evaluation_type == 'equation':
|
||||
expr = safe_eval(line.value, eval_context)
|
||||
elif line.col1.ttype in ['many2one', 'integer']:
|
||||
try:
|
||||
expr = int(line.value)
|
||||
except Exception:
|
||||
pass
|
||||
elif line.col1.ttype == 'many2many':
|
||||
with contextlib.suppress(Exception):
|
||||
# if multiple lines target the same column, they need to exist in the same list
|
||||
expr = m2m_exprs[line.col1]
|
||||
expr.append(Command.link(int(line.value)))
|
||||
elif line.col1.ttype == 'float':
|
||||
with contextlib.suppress(Exception):
|
||||
expr = float(line.value)
|
||||
result[line.id] = expr
|
||||
return result
|
||||
|
||||
|
||||
class IrActionsTodo(models.Model):
|
||||
"""
|
||||
Configuration Wizards
|
||||
"""
|
||||
_name = 'ir.actions.todo'
|
||||
_description = "Configuration Wizards"
|
||||
_rec_name = 'action_id'
|
||||
_order = "sequence, id"
|
||||
_allow_sudo_commands = False
|
||||
|
||||
action_id = fields.Many2one('ir.actions.actions', string='Action', required=True, index=True)
|
||||
sequence = fields.Integer(default=10)
|
||||
state = fields.Selection([('open', 'To Do'), ('done', 'Done')], string='Status', default='open', required=True)
|
||||
name = fields.Char()
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
todos = super(IrActionsTodo, self).create(vals_list)
|
||||
for todo in todos:
|
||||
if todo.state == "open":
|
||||
self.ensure_one_open_todo()
|
||||
return todos
|
||||
|
||||
def write(self, vals):
|
||||
res = super(IrActionsTodo, self).write(vals)
|
||||
if vals.get('state', '') == 'open':
|
||||
self.ensure_one_open_todo()
|
||||
return res
|
||||
|
||||
@api.model
|
||||
def ensure_one_open_todo(self):
|
||||
open_todo = self.search([('state', '=', 'open')], order='sequence asc, id desc', offset=1)
|
||||
if open_todo:
|
||||
open_todo.write({'state': 'done'})
|
||||
|
||||
def unlink(self):
|
||||
if self:
|
||||
try:
|
||||
todo_open_menu = self.env.ref('base.open_menu')
|
||||
# don't remove base.open_menu todo but set its original action
|
||||
if todo_open_menu in self:
|
||||
todo_open_menu.action_id = self.env.ref('base.action_client_base_menu').id
|
||||
self -= todo_open_menu
|
||||
except ValueError:
|
||||
pass
|
||||
return super(IrActionsTodo, self).unlink()
|
||||
|
||||
def action_launch(self):
|
||||
""" Launch Action of Wizard"""
|
||||
self.ensure_one()
|
||||
|
||||
self.write({'state': 'done'})
|
||||
|
||||
# Load action
|
||||
action_type = self.action_id.type
|
||||
action = self.env[action_type].browse(self.action_id.id)
|
||||
|
||||
result = action.read()[0]
|
||||
if action_type != 'ir.actions.act_window':
|
||||
return result
|
||||
result.setdefault('context', '{}')
|
||||
|
||||
# Open a specific record when res_id is provided in the context
|
||||
ctx = safe_eval(result['context'], {'user': self.env.user})
|
||||
if ctx.get('res_id'):
|
||||
result['res_id'] = ctx.pop('res_id')
|
||||
|
||||
# disable log for automatic wizards
|
||||
ctx['disable_log'] = True
|
||||
|
||||
result['context'] = ctx
|
||||
|
||||
return result
|
||||
|
||||
def action_open(self):
|
||||
""" Sets configuration wizard in TODO state"""
|
||||
return self.write({'state': 'open'})
|
||||
|
||||
|
||||
class IrActionsActClient(models.Model):
|
||||
_name = 'ir.actions.client'
|
||||
_description = 'Client Action'
|
||||
_inherit = 'ir.actions.actions'
|
||||
_table = 'ir_act_client'
|
||||
_order = 'name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
type = fields.Char(default='ir.actions.client')
|
||||
|
||||
tag = fields.Char(string='Client action tag', required=True,
|
||||
help="An arbitrary string, interpreted by the client"
|
||||
" according to its own needs and wishes. There "
|
||||
"is no central tag repository across clients.")
|
||||
target = fields.Selection([('current', 'Current Window'), ('new', 'New Window'), ('fullscreen', 'Full Screen'), ('main', 'Main action of Current Window')], default="current", string='Target Window')
|
||||
res_model = fields.Char(string='Destination Model', help="Optional model, mostly used for needactions.")
|
||||
context = fields.Char(string='Context Value', default="{}", required=True, help="Context dictionary as Python expression, empty by default (Default: {})")
|
||||
params = fields.Binary(compute='_compute_params', inverse='_inverse_params', string='Supplementary arguments',
|
||||
help="Arguments sent to the client along with "
|
||||
"the view tag")
|
||||
params_store = fields.Binary(string='Params storage', readonly=True, attachment=False)
|
||||
|
||||
@api.depends('params_store')
|
||||
def _compute_params(self):
|
||||
self_bin = self.with_context(bin_size=False, bin_size_params_store=False)
|
||||
for record, record_bin in zip(self, self_bin):
|
||||
record.params = record_bin.params_store and safe_eval(record_bin.params_store, {'uid': self._uid})
|
||||
|
||||
def _inverse_params(self):
|
||||
for record in self:
|
||||
params = record.params
|
||||
record.params_store = repr(params) if isinstance(params, dict) else params
|
||||
|
||||
def _get_default_form_view(self):
|
||||
doc = super(IrActionsActClient, self)._get_default_form_view()
|
||||
params = doc.find(".//field[@name='params']")
|
||||
params.getparent().remove(params)
|
||||
params_store = doc.find(".//field[@name='params_store']")
|
||||
params_store.getparent().remove(params_store)
|
||||
return doc
|
||||
|
||||
|
||||
def _get_readable_fields(self):
|
||||
return super()._get_readable_fields() | {
|
||||
"context", "params", "res_model", "tag", "target",
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
422
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_asset.py
Normal file
422
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_asset.py
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import os
|
||||
from glob import glob
|
||||
from logging import getLogger
|
||||
from werkzeug import urls
|
||||
|
||||
import odoo
|
||||
import odoo.modules.module # get_manifest, don't from-import it
|
||||
from odoo import api, fields, models, tools
|
||||
from odoo.tools import misc
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
SCRIPT_EXTENSIONS = ('js',)
|
||||
STYLE_EXTENSIONS = ('css', 'scss', 'sass', 'less')
|
||||
TEMPLATE_EXTENSIONS = ('xml',)
|
||||
DEFAULT_SEQUENCE = 16
|
||||
|
||||
# Directives are stored in variables for ease of use and syntax checks.
|
||||
APPEND_DIRECTIVE = 'append'
|
||||
PREPEND_DIRECTIVE = 'prepend'
|
||||
AFTER_DIRECTIVE = 'after'
|
||||
BEFORE_DIRECTIVE = 'before'
|
||||
REMOVE_DIRECTIVE = 'remove'
|
||||
REPLACE_DIRECTIVE = 'replace'
|
||||
INCLUDE_DIRECTIVE = 'include'
|
||||
# Those are the directives used with a 'target' argument/field.
|
||||
DIRECTIVES_WITH_TARGET = [AFTER_DIRECTIVE, BEFORE_DIRECTIVE, REPLACE_DIRECTIVE]
|
||||
WILDCARD_CHARACTERS = {'*', "?", "[", "]"}
|
||||
|
||||
|
||||
def fs2web(path):
|
||||
"""Converts a file system path to a web path"""
|
||||
if os.path.sep == '/':
|
||||
return path
|
||||
return '/'.join(path.split(os.path.sep))
|
||||
|
||||
def can_aggregate(url):
|
||||
parsed = urls.url_parse(url)
|
||||
return not parsed.scheme and not parsed.netloc and not url.startswith('/web/content')
|
||||
|
||||
def is_wildcard_glob(path):
|
||||
"""Determine whether a path is a wildcarded glob eg: "/web/file[14].*"
|
||||
or a genuine single file path "/web/myfile.scss"""
|
||||
return not WILDCARD_CHARACTERS.isdisjoint(path)
|
||||
|
||||
|
||||
class IrAsset(models.Model):
|
||||
"""This model contributes to two things:
|
||||
|
||||
1. It provides a function returning a list of all file paths declared
|
||||
in a given list of addons (see _get_addon_paths);
|
||||
|
||||
2. It allows to create 'ir.asset' records to add additional directives
|
||||
to certain bundles.
|
||||
"""
|
||||
_name = 'ir.asset'
|
||||
_description = 'Asset'
|
||||
_order = 'sequence, id'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, values):
|
||||
self.clear_caches()
|
||||
return super().write(values)
|
||||
|
||||
def unlink(self):
|
||||
self.clear_caches()
|
||||
return super().unlink()
|
||||
|
||||
name = fields.Char(string='Name', required=True)
|
||||
bundle = fields.Char(string='Bundle name', required=True)
|
||||
directive = fields.Selection(string='Directive', selection=[
|
||||
(APPEND_DIRECTIVE, 'Append'),
|
||||
(PREPEND_DIRECTIVE, 'Prepend'),
|
||||
(AFTER_DIRECTIVE, 'After'),
|
||||
(BEFORE_DIRECTIVE, 'Before'),
|
||||
(REMOVE_DIRECTIVE, 'Remove'),
|
||||
(REPLACE_DIRECTIVE, 'Replace'),
|
||||
(INCLUDE_DIRECTIVE, 'Include')], default=APPEND_DIRECTIVE)
|
||||
path = fields.Char(string='Path (or glob pattern)', required=True)
|
||||
target = fields.Char(string='Target')
|
||||
active = fields.Boolean(string='active', default=True)
|
||||
sequence = fields.Integer(string="Sequence", default=DEFAULT_SEQUENCE, required=True)
|
||||
|
||||
def _get_asset_paths(self, bundle, addons=None, css=False, js=False):
|
||||
"""
|
||||
Fetches all asset file paths from a given list of addons matching a
|
||||
certain bundle. The returned list is composed of tuples containing the
|
||||
file path [1], the first addon calling it [0] and the bundle name.
|
||||
Asset loading is performed as follows:
|
||||
|
||||
1. All 'ir.asset' records matching the given bundle and with a sequence
|
||||
strictly less than 16 are applied.
|
||||
|
||||
3. The manifests of the given addons are checked for assets declaration
|
||||
for the given bundle. If any, they are read sequentially and their
|
||||
operations are applied to the current list.
|
||||
|
||||
4. After all manifests have been parsed, the remaining 'ir.asset'
|
||||
records matching the bundle are also applied to the current list.
|
||||
|
||||
:param bundle: name of the bundle from which to fetch the file paths
|
||||
:param addons: list of addon names as strings. The files returned will
|
||||
only be contained in the given addons.
|
||||
:param css: boolean: whether or not to include style files
|
||||
:param js: boolean: whether or not to include script files and template
|
||||
files
|
||||
:returns: the list of tuples (path, addon, bundle)
|
||||
"""
|
||||
installed = self._get_installed_addons_list()
|
||||
if addons is None:
|
||||
addons = self._get_active_addons_list()
|
||||
|
||||
asset_paths = AssetPaths()
|
||||
self._fill_asset_paths(bundle, addons, installed, css, js, asset_paths, [])
|
||||
return asset_paths.list
|
||||
|
||||
def _fill_asset_paths(self, bundle, addons, installed, css, js, asset_paths, seen):
|
||||
"""
|
||||
Fills the given AssetPaths instance by applying the operations found in
|
||||
the matching bundle of the given addons manifests.
|
||||
See `_get_asset_paths` for more information.
|
||||
|
||||
:param bundle: name of the bundle from which to fetch the file paths
|
||||
:param addons: list of addon names as strings
|
||||
:param css: boolean: whether or not to include style files
|
||||
:param js: boolean: whether or not to include script files
|
||||
:param xml: boolean: whether or not to include template files
|
||||
:param asset_paths: the AssetPath object to fill
|
||||
:param seen: a list of bundles already checked to avoid circularity
|
||||
"""
|
||||
if bundle in seen:
|
||||
raise Exception("Circular assets bundle declaration: %s" % " > ".join(seen + [bundle]))
|
||||
|
||||
exts = []
|
||||
if js:
|
||||
exts += SCRIPT_EXTENSIONS
|
||||
exts += TEMPLATE_EXTENSIONS
|
||||
if css:
|
||||
exts += STYLE_EXTENSIONS
|
||||
|
||||
# this index is used for prepending: files are inserted at the beginning
|
||||
# of the CURRENT bundle.
|
||||
bundle_start_index = len(asset_paths.list)
|
||||
|
||||
def process_path(directive, target, path_def):
|
||||
"""
|
||||
This sub function is meant to take a directive and a set of
|
||||
arguments and apply them to the current asset_paths list
|
||||
accordingly.
|
||||
|
||||
It is nested inside `_get_asset_paths` since we need the current
|
||||
list of addons, extensions and asset_paths.
|
||||
|
||||
:param directive: string
|
||||
:param target: string or None or False
|
||||
:param path_def: string
|
||||
"""
|
||||
if directive == INCLUDE_DIRECTIVE:
|
||||
# recursively call this function for each INCLUDE_DIRECTIVE directive.
|
||||
self._fill_asset_paths(path_def, addons, installed, css, js, asset_paths, seen + [bundle])
|
||||
return
|
||||
|
||||
addon, paths = self._get_paths(path_def, installed, exts)
|
||||
|
||||
# retrieve target index when it applies
|
||||
if directive in DIRECTIVES_WITH_TARGET:
|
||||
_, target_paths = self._get_paths(target, installed, exts)
|
||||
if not target_paths and target.rpartition('.')[2] not in exts:
|
||||
# nothing to do: the extension of the target is wrong
|
||||
return
|
||||
target_to_index = len(target_paths) and target_paths[0] or target
|
||||
target_index = asset_paths.index(target_to_index, addon, bundle)
|
||||
|
||||
if directive == APPEND_DIRECTIVE:
|
||||
asset_paths.append(paths, addon, bundle)
|
||||
elif directive == PREPEND_DIRECTIVE:
|
||||
asset_paths.insert(paths, addon, bundle, bundle_start_index)
|
||||
elif directive == AFTER_DIRECTIVE:
|
||||
asset_paths.insert(paths, addon, bundle, target_index + 1)
|
||||
elif directive == BEFORE_DIRECTIVE:
|
||||
asset_paths.insert(paths, addon, bundle, target_index)
|
||||
elif directive == REMOVE_DIRECTIVE:
|
||||
asset_paths.remove(paths, addon, bundle)
|
||||
elif directive == REPLACE_DIRECTIVE:
|
||||
asset_paths.insert(paths, addon, bundle, target_index)
|
||||
asset_paths.remove(target_paths, addon, bundle)
|
||||
else:
|
||||
# this should never happen
|
||||
raise ValueError("Unexpected directive")
|
||||
|
||||
# 1. Process the first sequence of 'ir.asset' records
|
||||
assets = self._get_related_assets([('bundle', '=', bundle)]).filtered('active')
|
||||
for asset in assets.filtered(lambda a: a.sequence < DEFAULT_SEQUENCE):
|
||||
process_path(asset.directive, asset.target, asset.path)
|
||||
|
||||
# 2. Process all addons' manifests.
|
||||
for addon in self._topological_sort(tuple(addons)):
|
||||
for command in odoo.modules.module._get_manifest_cached(addon)['assets'].get(bundle, ()):
|
||||
directive, target, path_def = self._process_command(command)
|
||||
process_path(directive, target, path_def)
|
||||
|
||||
# 3. Process the rest of 'ir.asset' records
|
||||
for asset in assets.filtered(lambda a: a.sequence >= DEFAULT_SEQUENCE):
|
||||
process_path(asset.directive, asset.target, asset.path)
|
||||
|
||||
def _get_related_assets(self, domain):
|
||||
"""
|
||||
Returns a set of assets matching the domain, regardless of their
|
||||
active state. This method can be overridden to filter the results.
|
||||
:param domain: search domain
|
||||
:returns: ir.asset recordset
|
||||
"""
|
||||
return self.with_context(active_test=False).sudo().search(domain, order='sequence, id')
|
||||
|
||||
def _get_related_bundle(self, target_path_def, root_bundle):
|
||||
"""
|
||||
Returns the first bundle directly defining a glob matching the target
|
||||
path. This is useful when generating an 'ir.asset' record to override
|
||||
a specific asset and target the right bundle, i.e. the first one
|
||||
defining the target path.
|
||||
|
||||
:param target_path_def: string: path to match.
|
||||
:root_bundle: string: bundle from which to initiate the search.
|
||||
:returns: the first matching bundle or None
|
||||
"""
|
||||
ext = target_path_def.split('.')[-1]
|
||||
installed = self._get_installed_addons_list()
|
||||
target_path = self._get_paths(target_path_def, installed)[1][0]
|
||||
|
||||
css = ext in STYLE_EXTENSIONS
|
||||
js = ext in SCRIPT_EXTENSIONS or ext in TEMPLATE_EXTENSIONS
|
||||
|
||||
asset_paths = self._get_asset_paths(root_bundle, css=css, js=js)
|
||||
|
||||
for path, _, bundle in asset_paths:
|
||||
if path == target_path:
|
||||
return bundle
|
||||
|
||||
return root_bundle
|
||||
|
||||
def _get_active_addons_list(self):
|
||||
"""Can be overridden to filter the returned list of active modules."""
|
||||
return self._get_installed_addons_list()
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('addons_tuple')
|
||||
def _topological_sort(self, addons_tuple):
|
||||
"""Returns a list of sorted modules name accord to the spec in ir.module.module
|
||||
that is, application desc, sequence, name then topologically sorted"""
|
||||
IrModule = self.env['ir.module.module']
|
||||
|
||||
def mapper(addon):
|
||||
manif = odoo.modules.module._get_manifest_cached(addon)
|
||||
from_terp = IrModule.get_values_from_terp(manif)
|
||||
from_terp['name'] = addon
|
||||
from_terp['depends'] = manif.get('depends', ['base'])
|
||||
return from_terp
|
||||
|
||||
manifs = map(mapper, addons_tuple)
|
||||
|
||||
def sort_key(manif):
|
||||
return (not manif['application'], int(manif['sequence']), manif['name'])
|
||||
|
||||
manifs = sorted(manifs, key=sort_key)
|
||||
|
||||
return misc.topological_sort({manif['name']: tuple(manif['depends']) for manif in manifs})
|
||||
|
||||
@api.model
|
||||
@tools.ormcache_context(keys='install_module')
|
||||
def _get_installed_addons_list(self):
|
||||
"""
|
||||
Returns the list of all installed addons.
|
||||
:returns: string[]: list of module names
|
||||
"""
|
||||
# Main source: the current registry list
|
||||
# Second source of modules: server wide modules
|
||||
# Third source: the currently loading module from the context (similar to ir_ui_view)
|
||||
return self.env.registry._init_modules.union(odoo.conf.server_wide_modules or []).union(self.env.context.get('install_module', []))
|
||||
|
||||
def _get_paths(self, path_def, installed, extensions=None):
|
||||
"""
|
||||
Returns a list of file paths matching a given glob (path_def) as well as
|
||||
the addon targeted by the path definition. If no file matches that glob,
|
||||
the path definition is returned as is. This is either because the path is
|
||||
not correctly written or because it points to a URL.
|
||||
|
||||
:param path_def: the definition (glob) of file paths to match
|
||||
:param installed: the list of installed addons
|
||||
:param extensions: a list of extensions that found files must match
|
||||
:returns: a tuple: the addon targeted by the path definition [0] and the
|
||||
list of file paths matching the definition [1] (or the glob itself if
|
||||
none). Note that these paths are filtered on the given `extensions`.
|
||||
"""
|
||||
paths = []
|
||||
path_url = fs2web(path_def)
|
||||
path_parts = [part for part in path_url.split('/') if part]
|
||||
addon = path_parts[0]
|
||||
addon_manifest = odoo.modules.module._get_manifest_cached(addon)
|
||||
|
||||
safe_path = True
|
||||
if addon_manifest:
|
||||
if addon not in installed:
|
||||
# Assert that the path is in the installed addons
|
||||
raise Exception("Unallowed to fetch files from addon %s" % addon)
|
||||
addons_path = os.path.join(addon_manifest['addons_path'], '')[:-1]
|
||||
full_path = os.path.normpath(os.path.join(addons_path, *path_parts))
|
||||
|
||||
# first security layer: forbid escape from the current addon
|
||||
# "/mymodule/../myothermodule" is forbidden
|
||||
# the condition after the or is to further guarantee that we won't access
|
||||
# a directory that happens to be named like an addon (web....)
|
||||
if addon not in full_path or addons_path not in full_path:
|
||||
addon = None
|
||||
safe_path = False
|
||||
else:
|
||||
paths = [
|
||||
path for path in sorted(glob(full_path, recursive=True))
|
||||
]
|
||||
|
||||
# second security layer: do we have the right to access the files
|
||||
# that are grabbed by the glob ?
|
||||
# In particular we don't want to expose data in xmls of the module
|
||||
def is_safe_path(path):
|
||||
try:
|
||||
misc.file_path(path, SCRIPT_EXTENSIONS + STYLE_EXTENSIONS + TEMPLATE_EXTENSIONS)
|
||||
except (ValueError, FileNotFoundError):
|
||||
return False
|
||||
if path.rpartition('.')[2] in TEMPLATE_EXTENSIONS:
|
||||
# normpath will strip the trailing /, which is why it has to be added afterwards
|
||||
static_path = os.path.normpath("%s/static" % addon) + os.path.sep
|
||||
# Forbid xml to leak
|
||||
return static_path in path
|
||||
return True
|
||||
|
||||
len_paths = len(paths)
|
||||
paths = list(filter(is_safe_path, paths))
|
||||
safe_path = safe_path and len_paths == len(paths)
|
||||
|
||||
# Web assets must be loaded using relative paths.
|
||||
paths = [fs2web(path[len(addons_path):]) for path in paths]
|
||||
else:
|
||||
addon = None
|
||||
|
||||
if not paths and (not can_aggregate(path_url) or (safe_path and not is_wildcard_glob(path_url))):
|
||||
# No file matching the path; the path_def could be a url.
|
||||
paths = [path_url]
|
||||
|
||||
if not paths:
|
||||
msg = f'IrAsset: the path "{path_def}" did not resolve to anything.'
|
||||
if not safe_path:
|
||||
msg += " It may be due to security reasons."
|
||||
_logger.warning(msg)
|
||||
# Paths are filtered on the extensions (if any).
|
||||
return addon, [
|
||||
path
|
||||
for path in paths
|
||||
if not extensions or path.split('.')[-1] in extensions
|
||||
]
|
||||
|
||||
def _process_command(self, command):
|
||||
"""Parses a given command to return its directive, target and path definition."""
|
||||
if isinstance(command, str):
|
||||
# Default directive: append
|
||||
directive, target, path_def = APPEND_DIRECTIVE, None, command
|
||||
elif command[0] in DIRECTIVES_WITH_TARGET:
|
||||
directive, target, path_def = command
|
||||
else:
|
||||
directive, path_def = command
|
||||
target = None
|
||||
return directive, target, path_def
|
||||
|
||||
|
||||
class AssetPaths:
|
||||
""" A list of asset paths (path, addon, bundle) with efficient operations. """
|
||||
def __init__(self):
|
||||
self.list = []
|
||||
self.memo = set()
|
||||
|
||||
def index(self, path, addon, bundle):
|
||||
"""Returns the index of the given path in the current assets list."""
|
||||
if path not in self.memo:
|
||||
self._raise_not_found(path, bundle)
|
||||
for index, asset in enumerate(self.list):
|
||||
if asset[0] == path:
|
||||
return index
|
||||
|
||||
def append(self, paths, addon, bundle):
|
||||
"""Appends the given paths to the current list."""
|
||||
for path in paths:
|
||||
if path not in self.memo:
|
||||
self.list.append((path, addon, bundle))
|
||||
self.memo.add(path)
|
||||
|
||||
def insert(self, paths, addon, bundle, index):
|
||||
"""Inserts the given paths to the current list at the given position."""
|
||||
to_insert = []
|
||||
for path in paths:
|
||||
if path not in self.memo:
|
||||
to_insert.append((path, addon, bundle))
|
||||
self.memo.add(path)
|
||||
self.list[index:index] = to_insert
|
||||
|
||||
def remove(self, paths_to_remove, addon, bundle):
|
||||
"""Removes the given paths from the current list."""
|
||||
paths = {path for path in paths_to_remove if path in self.memo}
|
||||
if paths:
|
||||
self.list[:] = [asset for asset in self.list if asset[0] not in paths]
|
||||
self.memo.difference_update(paths)
|
||||
return
|
||||
|
||||
if paths_to_remove:
|
||||
self._raise_not_found(paths_to_remove, bundle)
|
||||
|
||||
def _raise_not_found(self, path, bundle):
|
||||
raise ValueError("File(s) %s not found in bundle %s" % (path, bundle))
|
||||
|
|
@ -0,0 +1,732 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import contextlib
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from collections import defaultdict
|
||||
from PIL import Image
|
||||
|
||||
from odoo import api, fields, models, SUPERUSER_ID, tools, _
|
||||
from odoo.exceptions import AccessError, ValidationError, UserError
|
||||
from odoo.tools import config, human_size, ImageProcess, str2bool, consteq
|
||||
from odoo.tools.mimetypes import guess_mimetype
|
||||
from odoo.osv import expression
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrAttachment(models.Model):
|
||||
"""Attachments are used to link binary files or url to any openerp document.
|
||||
|
||||
External attachment storage
|
||||
---------------------------
|
||||
|
||||
The computed field ``datas`` is implemented using ``_file_read``,
|
||||
``_file_write`` and ``_file_delete``, which can be overridden to implement
|
||||
other storage engines. Such methods should check for other location pseudo
|
||||
uri (example: hdfs://hadoopserver).
|
||||
|
||||
The default implementation is the file:dirname location that stores files
|
||||
on the local filesystem using name based on their sha1 hash
|
||||
"""
|
||||
_name = 'ir.attachment'
|
||||
_description = 'Attachment'
|
||||
_order = 'id desc'
|
||||
|
||||
def _compute_res_name(self):
|
||||
for attachment in self:
|
||||
if attachment.res_model and attachment.res_id:
|
||||
record = self.env[attachment.res_model].browse(attachment.res_id)
|
||||
attachment.res_name = record.display_name
|
||||
else:
|
||||
attachment.res_name = False
|
||||
|
||||
@api.model
|
||||
def _storage(self):
|
||||
return self.env['ir.config_parameter'].sudo().get_param('ir_attachment.location', 'file')
|
||||
|
||||
@api.model
|
||||
def _filestore(self):
|
||||
return config.filestore(self._cr.dbname)
|
||||
|
||||
@api.model
|
||||
def _get_storage_domain(self):
|
||||
# domain to retrieve the attachments to migrate
|
||||
return {
|
||||
'db': [('store_fname', '!=', False)],
|
||||
'file': [('db_datas', '!=', False)],
|
||||
}[self._storage()]
|
||||
|
||||
@api.model
|
||||
def force_storage(self):
|
||||
"""Force all attachments to be stored in the currently configured storage"""
|
||||
if not self.env.is_admin():
|
||||
raise AccessError(_('Only administrators can execute this action.'))
|
||||
|
||||
# Migrate only binary attachments and bypass the res_field automatic
|
||||
# filter added in _search override
|
||||
self.search(expression.AND([
|
||||
self._get_storage_domain(),
|
||||
['&', ('type', '=', 'binary'), '|', ('res_field', '=', False), ('res_field', '!=', False)]
|
||||
]))._migrate()
|
||||
|
||||
def _migrate(self):
|
||||
record_count = len(self)
|
||||
storage = self._storage().upper()
|
||||
for index, attach in enumerate(self):
|
||||
_logger.debug("Migrate attachment %s/%s to %s", index + 1, record_count, storage)
|
||||
# pass mimetype, to avoid recomputation
|
||||
attach.write({'raw': attach.raw, 'mimetype': attach.mimetype})
|
||||
|
||||
@api.model
|
||||
def _full_path(self, path):
|
||||
# sanitize path
|
||||
path = re.sub('[.]', '', path)
|
||||
path = path.strip('/\\')
|
||||
return os.path.join(self._filestore(), path)
|
||||
|
||||
@api.model
|
||||
def _get_path(self, bin_data, sha):
|
||||
# retro compatibility
|
||||
fname = sha[:3] + '/' + sha
|
||||
full_path = self._full_path(fname)
|
||||
if os.path.isfile(full_path):
|
||||
return fname, full_path # keep existing path
|
||||
|
||||
# scatter files across 256 dirs
|
||||
# we use '/' in the db (even on windows)
|
||||
fname = sha[:2] + '/' + sha
|
||||
full_path = self._full_path(fname)
|
||||
dirname = os.path.dirname(full_path)
|
||||
if not os.path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
||||
# prevent sha-1 collision
|
||||
if os.path.isfile(full_path) and not self._same_content(bin_data, full_path):
|
||||
raise UserError(_("The attachment collides with an existing file."))
|
||||
return fname, full_path
|
||||
|
||||
@api.model
|
||||
def _file_read(self, fname):
|
||||
assert isinstance(self, IrAttachment)
|
||||
full_path = self._full_path(fname)
|
||||
try:
|
||||
with open(full_path, 'rb') as f:
|
||||
return f.read()
|
||||
except (IOError, OSError):
|
||||
_logger.info("_read_file reading %s", full_path, exc_info=True)
|
||||
return b''
|
||||
|
||||
@api.model
|
||||
def _file_write(self, bin_value, checksum):
|
||||
assert isinstance(self, IrAttachment)
|
||||
fname, full_path = self._get_path(bin_value, checksum)
|
||||
if not os.path.exists(full_path):
|
||||
try:
|
||||
with open(full_path, 'wb') as fp:
|
||||
fp.write(bin_value)
|
||||
# add fname to checklist, in case the transaction aborts
|
||||
self._mark_for_gc(fname)
|
||||
except IOError:
|
||||
_logger.info("_file_write writing %s", full_path, exc_info=True)
|
||||
return fname
|
||||
|
||||
@api.model
|
||||
def _file_delete(self, fname):
|
||||
# simply add fname to checklist, it will be garbage-collected later
|
||||
self._mark_for_gc(fname)
|
||||
|
||||
def _mark_for_gc(self, fname):
|
||||
""" Add ``fname`` in a checklist for the filestore garbage collection. """
|
||||
assert isinstance(self, IrAttachment)
|
||||
fname = re.sub('[.]', '', fname).strip('/\\')
|
||||
# we use a spooldir: add an empty file in the subdirectory 'checklist'
|
||||
full_path = os.path.join(self._full_path('checklist'), fname)
|
||||
if not os.path.exists(full_path):
|
||||
dirname = os.path.dirname(full_path)
|
||||
if not os.path.isdir(dirname):
|
||||
with contextlib.suppress(OSError):
|
||||
os.makedirs(dirname)
|
||||
open(full_path, 'ab').close()
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_file_store(self):
|
||||
""" Perform the garbage collection of the filestore. """
|
||||
assert isinstance(self, IrAttachment)
|
||||
if self._storage() != 'file':
|
||||
return
|
||||
|
||||
# Continue in a new transaction. The LOCK statement below must be the
|
||||
# first one in the current transaction, otherwise the database snapshot
|
||||
# used by it may not contain the most recent changes made to the table
|
||||
# ir_attachment! Indeed, if concurrent transactions create attachments,
|
||||
# the LOCK statement will wait until those concurrent transactions end.
|
||||
# But this transaction will not see the new attachements if it has done
|
||||
# other requests before the LOCK (like the method _storage() above).
|
||||
cr = self._cr
|
||||
cr.commit()
|
||||
|
||||
# prevent all concurrent updates on ir_attachment while collecting,
|
||||
# but only attempt to grab the lock for a little bit, otherwise it'd
|
||||
# start blocking other transactions. (will be retried later anyway)
|
||||
cr.execute("SET LOCAL lock_timeout TO '10s'")
|
||||
cr.execute("LOCK ir_attachment IN SHARE MODE")
|
||||
|
||||
self._gc_file_store_unsafe()
|
||||
|
||||
# commit to release the lock
|
||||
cr.commit()
|
||||
|
||||
def _gc_file_store_unsafe(self):
|
||||
# retrieve the file names from the checklist
|
||||
checklist = {}
|
||||
for dirpath, _, filenames in os.walk(self._full_path('checklist')):
|
||||
dirname = os.path.basename(dirpath)
|
||||
for filename in filenames:
|
||||
fname = "%s/%s" % (dirname, filename)
|
||||
checklist[fname] = os.path.join(dirpath, filename)
|
||||
|
||||
# Clean up the checklist. The checklist is split in chunks and files are garbage-collected
|
||||
# for each chunk.
|
||||
removed = 0
|
||||
for names in self.env.cr.split_for_in_conditions(checklist):
|
||||
# determine which files to keep among the checklist
|
||||
self.env.cr.execute("SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names])
|
||||
whitelist = set(row[0] for row in self.env.cr.fetchall())
|
||||
|
||||
# remove garbage files, and clean up checklist
|
||||
for fname in names:
|
||||
filepath = checklist[fname]
|
||||
if fname not in whitelist:
|
||||
try:
|
||||
os.unlink(self._full_path(fname))
|
||||
_logger.debug("_file_gc unlinked %s", self._full_path(fname))
|
||||
removed += 1
|
||||
except (OSError, IOError):
|
||||
_logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True)
|
||||
with contextlib.suppress(OSError):
|
||||
os.unlink(filepath)
|
||||
|
||||
_logger.info("filestore gc %d checked, %d removed", len(checklist), removed)
|
||||
|
||||
@api.depends('store_fname', 'db_datas', 'file_size')
|
||||
@api.depends_context('bin_size')
|
||||
def _compute_datas(self):
|
||||
if self._context.get('bin_size'):
|
||||
for attach in self:
|
||||
attach.datas = human_size(attach.file_size)
|
||||
return
|
||||
|
||||
for attach in self:
|
||||
attach.datas = base64.b64encode(attach.raw or b'')
|
||||
|
||||
@api.depends('store_fname', 'db_datas')
|
||||
def _compute_raw(self):
|
||||
for attach in self:
|
||||
if attach.store_fname:
|
||||
attach.raw = attach._file_read(attach.store_fname)
|
||||
else:
|
||||
attach.raw = attach.db_datas
|
||||
|
||||
def _inverse_raw(self):
|
||||
self._set_attachment_data(lambda a: a.raw or b'')
|
||||
|
||||
def _inverse_datas(self):
|
||||
self._set_attachment_data(lambda attach: base64.b64decode(attach.datas or b''))
|
||||
|
||||
def _set_attachment_data(self, asbytes):
|
||||
for attach in self:
|
||||
# compute the fields that depend on datas
|
||||
bin_data = asbytes(attach)
|
||||
vals = self._get_datas_related_values(bin_data, attach.mimetype)
|
||||
|
||||
# take current location in filestore to possibly garbage-collect it
|
||||
fname = attach.store_fname
|
||||
# write as superuser, as user probably does not have write access
|
||||
super(IrAttachment, attach.sudo()).write(vals)
|
||||
if fname:
|
||||
self._file_delete(fname)
|
||||
|
||||
def _get_datas_related_values(self, data, mimetype):
|
||||
checksum = self._compute_checksum(data)
|
||||
try:
|
||||
index_content = self._index(data, mimetype, checksum=checksum)
|
||||
except TypeError:
|
||||
index_content = self._index(data, mimetype)
|
||||
values = {
|
||||
'file_size': len(data),
|
||||
'checksum': checksum,
|
||||
'index_content': index_content,
|
||||
'store_fname': False,
|
||||
'db_datas': data,
|
||||
}
|
||||
if data and self._storage() != 'db':
|
||||
values['store_fname'] = self._file_write(data, values['checksum'])
|
||||
values['db_datas'] = False
|
||||
return values
|
||||
|
||||
def _compute_checksum(self, bin_data):
|
||||
""" compute the checksum for the given datas
|
||||
:param bin_data : datas in its binary form
|
||||
"""
|
||||
# an empty file has a checksum too (for caching)
|
||||
return hashlib.sha1(bin_data or b'').hexdigest()
|
||||
|
||||
@api.model
|
||||
def _same_content(self, bin_data, filepath):
|
||||
BLOCK_SIZE = 1024
|
||||
with open(filepath, 'rb') as fd:
|
||||
i = 0
|
||||
while True:
|
||||
data = fd.read(BLOCK_SIZE)
|
||||
if data != bin_data[i * BLOCK_SIZE:(i + 1) * BLOCK_SIZE]:
|
||||
return False
|
||||
if not data:
|
||||
break
|
||||
i += 1
|
||||
return True
|
||||
|
||||
def _compute_mimetype(self, values):
|
||||
""" compute the mimetype of the given values
|
||||
:param values : dict of values to create or write an ir_attachment
|
||||
:return mime : string indicating the mimetype, or application/octet-stream by default
|
||||
"""
|
||||
mimetype = None
|
||||
if values.get('mimetype'):
|
||||
mimetype = values['mimetype']
|
||||
if not mimetype and values.get('name'):
|
||||
mimetype = mimetypes.guess_type(values['name'])[0]
|
||||
if not mimetype and values.get('url'):
|
||||
mimetype = mimetypes.guess_type(values['url'].split('?')[0])[0]
|
||||
if not mimetype or mimetype == 'application/octet-stream':
|
||||
raw = None
|
||||
if values.get('raw'):
|
||||
raw = values['raw']
|
||||
elif values.get('datas'):
|
||||
raw = base64.b64decode(values['datas'])
|
||||
if raw:
|
||||
mimetype = guess_mimetype(raw)
|
||||
return mimetype and mimetype.lower() or 'application/octet-stream'
|
||||
|
||||
def _postprocess_contents(self, values):
|
||||
ICP = self.env['ir.config_parameter'].sudo().get_param
|
||||
supported_subtype = ICP('base.image_autoresize_extensions', 'png,jpeg,bmp,tiff').split(',')
|
||||
|
||||
mimetype = values['mimetype'] = self._compute_mimetype(values)
|
||||
_type, _, _subtype = mimetype.partition('/')
|
||||
is_image_resizable = _type == 'image' and _subtype in supported_subtype
|
||||
if is_image_resizable and (values.get('datas') or values.get('raw')):
|
||||
is_raw = values.get('raw')
|
||||
|
||||
# Can be set to 0 to skip the resize
|
||||
max_resolution = ICP('base.image_autoresize_max_px', '1920x1920')
|
||||
if str2bool(max_resolution, True):
|
||||
try:
|
||||
img = False
|
||||
if is_raw:
|
||||
img = ImageProcess(values['raw'], verify_resolution=False)
|
||||
else: # datas
|
||||
img = ImageProcess(base64.b64decode(values['datas']), verify_resolution=False)
|
||||
|
||||
w, h = img.image.size
|
||||
nw, nh = map(int, max_resolution.split('x'))
|
||||
if w > nw or h > nh:
|
||||
img = img.resize(nw, nh)
|
||||
quality = int(ICP('base.image_autoresize_quality', 80))
|
||||
image_data = img.image_quality(quality=quality)
|
||||
if is_raw:
|
||||
values['raw'] = image_data
|
||||
else:
|
||||
values['datas'] = base64.b64encode(image_data)
|
||||
except UserError as e:
|
||||
# Catch error during test where we provide fake image
|
||||
# raise UserError(_("This file could not be decoded as an image file. Please try with a different file."))
|
||||
_logger.info('Post processing ignored : %s', e)
|
||||
return values
|
||||
|
||||
def _check_contents(self, values):
|
||||
mimetype = values['mimetype'] = self._compute_mimetype(values)
|
||||
xml_like = 'ht' in mimetype or ( # hta, html, xhtml, etc.
|
||||
'xml' in mimetype and # other xml (svg, text/xml, etc)
|
||||
not mimetype.startswith('application/vnd.openxmlformats')) # exception for Office formats
|
||||
user = self.env.context.get('binary_field_real_user', self.env.user)
|
||||
if not isinstance(user, self.pool['res.users']):
|
||||
raise UserError(_("binary_field_real_user should be a res.users record."))
|
||||
force_text = xml_like and (
|
||||
self.env.context.get('attachments_mime_plainxml') or
|
||||
not self.env['ir.ui.view'].with_user(user).check_access_rights('write', False))
|
||||
if force_text:
|
||||
values['mimetype'] = 'text/plain'
|
||||
if not self.env.context.get('image_no_postprocess'):
|
||||
values = self._postprocess_contents(values)
|
||||
return values
|
||||
|
||||
@api.model
|
||||
def _index(self, bin_data, file_type, checksum=None):
|
||||
""" compute the index content of the given binary data.
|
||||
This is a python implementation of the unix command 'strings'.
|
||||
:param bin_data : datas in binary form
|
||||
:return index_content : string containing all the printable character of the binary data
|
||||
"""
|
||||
index_content = False
|
||||
if file_type:
|
||||
index_content = file_type.split('/')[0]
|
||||
if index_content == 'text': # compute index_content only for text type
|
||||
words = re.findall(b"[\x20-\x7E]{4,}", bin_data)
|
||||
index_content = b"\n".join(words).decode('ascii')
|
||||
return index_content
|
||||
|
||||
@api.model
|
||||
def get_serving_groups(self):
|
||||
""" An ir.attachment record may be used as a fallback in the
|
||||
http dispatch if its type field is set to "binary" and its url
|
||||
field is set as the request's url. Only the groups returned by
|
||||
this method are allowed to create and write on such records.
|
||||
"""
|
||||
return ['base.group_system']
|
||||
|
||||
name = fields.Char('Name', required=True)
|
||||
description = fields.Text('Description')
|
||||
res_name = fields.Char('Resource Name', compute='_compute_res_name')
|
||||
res_model = fields.Char('Resource Model', readonly=True)
|
||||
res_field = fields.Char('Resource Field', readonly=True)
|
||||
res_id = fields.Many2oneReference('Resource ID', model_field='res_model',
|
||||
readonly=True)
|
||||
company_id = fields.Many2one('res.company', string='Company', change_default=True,
|
||||
default=lambda self: self.env.company)
|
||||
type = fields.Selection([('url', 'URL'), ('binary', 'File')],
|
||||
string='Type', required=True, default='binary', change_default=True,
|
||||
help="You can either upload a file from your computer or copy/paste an internet link to your file.")
|
||||
url = fields.Char('Url', index='btree_not_null', size=1024)
|
||||
public = fields.Boolean('Is public document')
|
||||
|
||||
# for external access
|
||||
access_token = fields.Char('Access Token', groups="base.group_user")
|
||||
|
||||
# the field 'datas' is computed and may use the other fields below
|
||||
raw = fields.Binary(string="File Content (raw)", compute='_compute_raw', inverse='_inverse_raw')
|
||||
datas = fields.Binary(string='File Content (base64)', compute='_compute_datas', inverse='_inverse_datas')
|
||||
db_datas = fields.Binary('Database Data', attachment=False)
|
||||
store_fname = fields.Char('Stored Filename', index=True, unaccent=False)
|
||||
file_size = fields.Integer('File Size', readonly=True)
|
||||
checksum = fields.Char("Checksum/SHA1", size=40, index=True, readonly=True)
|
||||
mimetype = fields.Char('Mime Type', readonly=True)
|
||||
index_content = fields.Text('Indexed Content', readonly=True, prefetch=False)
|
||||
|
||||
def _auto_init(self):
|
||||
res = super(IrAttachment, self)._auto_init()
|
||||
tools.create_index(self._cr, 'ir_attachment_res_idx',
|
||||
self._table, ['res_model', 'res_id'])
|
||||
return res
|
||||
|
||||
@api.constrains('type', 'url')
|
||||
def _check_serving_attachments(self):
|
||||
if self.env.is_admin():
|
||||
return
|
||||
for attachment in self:
|
||||
# restrict writing on attachments that could be served by the
|
||||
# ir.http's dispatch exception handling
|
||||
# XDO note: this should be done in check(write), constraints for access rights?
|
||||
# XDO note: if read on sudo, read twice, one for constraints, one for _inverse_datas as user
|
||||
if attachment.type == 'binary' and attachment.url:
|
||||
has_group = self.env.user.has_group
|
||||
if not any(has_group(g) for g in attachment.get_serving_groups()):
|
||||
raise ValidationError(_("Sorry, you are not allowed to write on this document"))
|
||||
|
||||
@api.model
|
||||
def check(self, mode, values=None):
|
||||
""" Restricts the access to an ir.attachment, according to referred mode """
|
||||
if self.env.is_superuser():
|
||||
return True
|
||||
# Always require an internal user (aka, employee) to access to a attachment
|
||||
if not (self.env.is_admin() or self.env.user._is_internal()):
|
||||
raise AccessError(_("Sorry, you are not allowed to access this document."))
|
||||
# collect the records to check (by model)
|
||||
model_ids = defaultdict(set) # {model_name: set(ids)}
|
||||
if self:
|
||||
# DLE P173: `test_01_portal_attachment`
|
||||
self.env['ir.attachment'].flush_model(['res_model', 'res_id', 'create_uid', 'public', 'res_field'])
|
||||
self._cr.execute('SELECT res_model, res_id, create_uid, public, res_field FROM ir_attachment WHERE id IN %s', [tuple(self.ids)])
|
||||
for res_model, res_id, create_uid, public, res_field in self._cr.fetchall():
|
||||
if public and mode == 'read':
|
||||
continue
|
||||
if not self.env.is_system() and (res_field or (not res_id and create_uid != self.env.uid)):
|
||||
raise AccessError(_("Sorry, you are not allowed to access this document."))
|
||||
if not (res_model and res_id):
|
||||
continue
|
||||
model_ids[res_model].add(res_id)
|
||||
if values and values.get('res_model') and values.get('res_id'):
|
||||
model_ids[values['res_model']].add(values['res_id'])
|
||||
|
||||
# check access rights on the records
|
||||
for res_model, res_ids in model_ids.items():
|
||||
# ignore attachments that are not attached to a resource anymore
|
||||
# when checking access rights (resource was deleted but attachment
|
||||
# was not)
|
||||
if res_model not in self.env:
|
||||
continue
|
||||
if res_model == 'res.users' and len(res_ids) == 1 and self.env.uid == list(res_ids)[0]:
|
||||
# by default a user cannot write on itself, despite the list of writeable fields
|
||||
# e.g. in the case of a user inserting an image into his image signature
|
||||
# we need to bypass this check which would needlessly throw us away
|
||||
continue
|
||||
records = self.env[res_model].browse(res_ids).exists()
|
||||
# For related models, check if we can write to the model, as unlinking
|
||||
# and creating attachments can be seen as an update to the model
|
||||
access_mode = 'write' if mode in ('create', 'unlink') else mode
|
||||
records.check_access_rights(access_mode)
|
||||
records.check_access_rule(access_mode)
|
||||
|
||||
@api.model
|
||||
def _filter_attachment_access(self, attachment_ids):
|
||||
"""Filter the given attachment to return only the records the current user have access to.
|
||||
|
||||
:param attachment_ids: List of attachment ids we want to filter
|
||||
:return: <ir.attachment> the current user have access to
|
||||
"""
|
||||
ret_attachments = self.env['ir.attachment']
|
||||
attachments = self.browse(attachment_ids)
|
||||
if not attachments.check_access_rights('read', raise_exception=False):
|
||||
return ret_attachments
|
||||
|
||||
for attachment in attachments.sudo():
|
||||
# Use SUDO here to not raise an error during the prefetch
|
||||
# And then drop SUDO right to check if we can access it
|
||||
try:
|
||||
attachment.sudo(False).check('read')
|
||||
ret_attachments |= attachment
|
||||
except AccessError:
|
||||
continue
|
||||
return ret_attachments
|
||||
|
||||
def _read_group_allowed_fields(self):
|
||||
return ['type', 'company_id', 'res_id', 'create_date', 'create_uid', 'name', 'mimetype', 'id', 'url', 'res_field', 'res_model']
|
||||
|
||||
@api.model
|
||||
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
|
||||
"""Override read_group to add res_field=False in domain if not present."""
|
||||
if not fields:
|
||||
raise AccessError(_("Sorry, you must provide fields to read on attachments"))
|
||||
groupby = [groupby] if isinstance(groupby, str) else groupby
|
||||
if any('(' in field for field in fields + groupby):
|
||||
raise AccessError(_("Sorry, the syntax 'name:agg(field)' is not available for attachments"))
|
||||
if not any(item[0] in ('id', 'res_field') for item in domain):
|
||||
domain.insert(0, ('res_field', '=', False))
|
||||
allowed_fields = self._read_group_allowed_fields()
|
||||
fields_set = set(field.split(':')[0] for field in fields + groupby)
|
||||
if not self.env.is_system() and (not fields or fields_set.difference(allowed_fields)):
|
||||
raise AccessError(_("Sorry, you are not allowed to access these fields on attachments."))
|
||||
return super().read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy)
|
||||
|
||||
@api.model
|
||||
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
|
||||
# add res_field=False in domain if not present; the arg[0] trick below
|
||||
# works for domain items and '&'/'|'/'!' operators too
|
||||
discard_binary_fields_attachments = False
|
||||
if not any(arg[0] in ('id', 'res_field') for arg in args):
|
||||
discard_binary_fields_attachments = True
|
||||
args.insert(0, ('res_field', '=', False))
|
||||
|
||||
ids = super(IrAttachment, self)._search(args, offset=offset, limit=limit, order=order,
|
||||
count=False, access_rights_uid=access_rights_uid)
|
||||
|
||||
if self.env.is_superuser():
|
||||
# rules do not apply for the superuser
|
||||
return len(ids) if count else ids
|
||||
|
||||
if not ids:
|
||||
return 0 if count else []
|
||||
|
||||
# Work with a set, as list.remove() is prohibitive for large lists of documents
|
||||
# (takes 20+ seconds on a db with 100k docs during search_count()!)
|
||||
orig_ids = ids
|
||||
ids = set(ids)
|
||||
|
||||
# For attachments, the permissions of the document they are attached to
|
||||
# apply, so we must remove attachments for which the user cannot access
|
||||
# the linked document.
|
||||
# Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
|
||||
# and the permissions are checked in super() and below anyway.
|
||||
model_attachments = defaultdict(lambda: defaultdict(set)) # {res_model: {res_id: set(ids)}}
|
||||
binary_fields_attachments = set()
|
||||
self._cr.execute("""SELECT id, res_model, res_id, public, res_field FROM ir_attachment WHERE id IN %s""", [tuple(ids)])
|
||||
for row in self._cr.dictfetchall():
|
||||
if not row['res_model'] or row['public']:
|
||||
continue
|
||||
# model_attachments = {res_model: {res_id: set(ids)}}
|
||||
model_attachments[row['res_model']][row['res_id']].add(row['id'])
|
||||
# Should not retrieve binary fields attachments if not explicitly required
|
||||
if discard_binary_fields_attachments and row['res_field']:
|
||||
binary_fields_attachments.add(row['id'])
|
||||
|
||||
if binary_fields_attachments:
|
||||
ids.difference_update(binary_fields_attachments)
|
||||
|
||||
# To avoid multiple queries for each attachment found, checks are
|
||||
# performed in batch as much as possible.
|
||||
for res_model, targets in model_attachments.items():
|
||||
if res_model not in self.env:
|
||||
continue
|
||||
if not self.env[res_model].check_access_rights('read', False):
|
||||
# remove all corresponding attachment ids
|
||||
ids.difference_update(itertools.chain(*targets.values()))
|
||||
continue
|
||||
# filter ids according to what access rules permit
|
||||
target_ids = list(targets)
|
||||
allowed = self.env[res_model].with_context(active_test=False).search([('id', 'in', target_ids)])
|
||||
for res_id in set(target_ids).difference(allowed.ids):
|
||||
ids.difference_update(targets[res_id])
|
||||
|
||||
# sort result according to the original sort ordering
|
||||
result = [id for id in orig_ids if id in ids]
|
||||
|
||||
# If the original search reached the limit, it is important the
|
||||
# filtered record set does so too. When a JS view receive a
|
||||
# record set whose length is below the limit, it thinks it
|
||||
# reached the last page. To avoid an infinite recursion due to the
|
||||
# permission checks the sub-call need to be aware of the number of
|
||||
# expected records to retrieve
|
||||
if len(orig_ids) == limit and len(result) < self._context.get('need', limit):
|
||||
need = self._context.get('need', limit) - len(result)
|
||||
result.extend(self.with_context(need=need)._search(args, offset=offset + len(orig_ids),
|
||||
limit=limit, order=order, count=False,
|
||||
access_rights_uid=access_rights_uid)[:limit - len(result)])
|
||||
|
||||
return len(result) if count else list(result)
|
||||
|
||||
def _read(self, fields):
|
||||
self.check('read')
|
||||
return super(IrAttachment, self)._read(fields)
|
||||
|
||||
def write(self, vals):
|
||||
self.check('write', values=vals)
|
||||
# remove computed field depending of datas
|
||||
for field in ('file_size', 'checksum', 'store_fname'):
|
||||
vals.pop(field, False)
|
||||
if 'mimetype' in vals or 'datas' in vals or 'raw' in vals:
|
||||
vals = self._check_contents(vals)
|
||||
return super(IrAttachment, self).write(vals)
|
||||
|
||||
def copy(self, default=None):
|
||||
if not (default or {}).keys() & {'datas', 'db_datas', 'raw'}:
|
||||
# ensure the content is kept and recomputes checksum/store_fname
|
||||
default = dict(default or {}, raw=self.raw)
|
||||
return super(IrAttachment, self).copy(default)
|
||||
|
||||
def unlink(self):
|
||||
if not self:
|
||||
return True
|
||||
self.check('unlink')
|
||||
|
||||
# First delete in the database, *then* in the filesystem if the
|
||||
# database allowed it. Helps avoid errors when concurrent transactions
|
||||
# are deleting the same file, and some of the transactions are
|
||||
# rolled back by PostgreSQL (due to concurrent updates detection).
|
||||
to_delete = set(attach.store_fname for attach in self if attach.store_fname)
|
||||
res = super(IrAttachment, self).unlink()
|
||||
for file_path in to_delete:
|
||||
self._file_delete(file_path)
|
||||
|
||||
return res
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
record_tuple_set = set()
|
||||
|
||||
# remove computed field depending of datas
|
||||
vals_list = [{
|
||||
key: value
|
||||
for key, value
|
||||
in vals.items()
|
||||
if key not in ('file_size', 'checksum', 'store_fname')
|
||||
} for vals in vals_list]
|
||||
|
||||
for values in vals_list:
|
||||
values = self._check_contents(values)
|
||||
raw, datas = values.pop('raw', None), values.pop('datas', None)
|
||||
if raw or datas:
|
||||
if isinstance(raw, str):
|
||||
# b64decode handles str input but raw needs explicit encoding
|
||||
raw = raw.encode()
|
||||
values.update(self._get_datas_related_values(
|
||||
raw or base64.b64decode(datas or b''),
|
||||
values['mimetype']
|
||||
))
|
||||
|
||||
# 'check()' only uses res_model and res_id from values, and make an exists.
|
||||
# We can group the values by model, res_id to make only one query when
|
||||
# creating multiple attachments on a single record.
|
||||
record_tuple = (values.get('res_model'), values.get('res_id'))
|
||||
record_tuple_set.add(record_tuple)
|
||||
|
||||
# don't use possible contextual recordset for check, see commit for details
|
||||
Attachments = self.browse()
|
||||
for res_model, res_id in record_tuple_set:
|
||||
Attachments.check('create', values={'res_model':res_model, 'res_id':res_id})
|
||||
return super().create(vals_list)
|
||||
|
||||
def _post_add_create(self):
|
||||
pass
|
||||
|
||||
def generate_access_token(self):
|
||||
tokens = []
|
||||
for attachment in self:
|
||||
if attachment.access_token:
|
||||
tokens.append(attachment.access_token)
|
||||
continue
|
||||
access_token = self._generate_access_token()
|
||||
attachment.write({'access_token': access_token})
|
||||
tokens.append(access_token)
|
||||
return tokens
|
||||
|
||||
def _generate_access_token(self):
|
||||
return str(uuid.uuid4())
|
||||
|
||||
def validate_access(self, access_token):
|
||||
self.ensure_one()
|
||||
record_sudo = self.sudo()
|
||||
|
||||
if access_token:
|
||||
tok = record_sudo.with_context(prefetch_fields=False).access_token
|
||||
valid_token = consteq(tok or '', access_token)
|
||||
if not valid_token:
|
||||
raise AccessError("Invalid access token")
|
||||
return record_sudo
|
||||
|
||||
if record_sudo.with_context(prefetch_fields=False).public:
|
||||
return record_sudo
|
||||
|
||||
if self.env.user.has_group('base.group_portal'):
|
||||
# Check the read access on the record linked to the attachment
|
||||
# eg: Allow to download an attachment on a task from /my/tasks/task_id
|
||||
self.check('read')
|
||||
return record_sudo
|
||||
|
||||
return self
|
||||
|
||||
@api.model
|
||||
def action_get(self):
|
||||
return self.env['ir.actions.act_window']._for_xml_id('base.action_attachment')
|
||||
|
||||
@api.model
|
||||
def _get_serve_attachment(self, url, extra_domain=None, order=None):
|
||||
domain = [('type', '=', 'binary'), ('url', '=', url)] + (extra_domain or [])
|
||||
return self.search(domain, order=order, limit=1)
|
||||
|
||||
@api.model
|
||||
def regenerate_assets_bundles(self):
|
||||
self.search([
|
||||
('public', '=', True),
|
||||
("url", "=like", "/web/assets/%"),
|
||||
('res_model', '=', 'ir.ui.view'),
|
||||
('res_id', '=', 0),
|
||||
('create_uid', '=', SUPERUSER_ID),
|
||||
]).unlink()
|
||||
self.clear_caches()
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import warnings
|
||||
import traceback
|
||||
|
||||
from odoo import api, models
|
||||
from odoo.exceptions import AccessDenied
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_autovacuum(func):
|
||||
""" Return whether ``func`` is an autovacuum method. """
|
||||
return callable(func) and getattr(func, '_autovacuum', False)
|
||||
|
||||
|
||||
class AutoVacuum(models.AbstractModel):
|
||||
""" Helper model to the ``@api.autovacuum`` method decorator. """
|
||||
_name = 'ir.autovacuum'
|
||||
_description = 'Automatic Vacuum'
|
||||
|
||||
def _run_vacuum_cleaner(self):
|
||||
"""
|
||||
Perform a complete database cleanup by safely calling every
|
||||
``@api.autovacuum`` decorated method.
|
||||
"""
|
||||
if not self.env.is_admin():
|
||||
raise AccessDenied()
|
||||
|
||||
for model in self.env.values():
|
||||
cls = self.env.registry[model._name]
|
||||
for attr, func in inspect.getmembers(cls, is_autovacuum):
|
||||
_logger.debug('Calling %s.%s()', model, attr)
|
||||
try:
|
||||
func(model)
|
||||
self.env.cr.commit()
|
||||
except Exception:
|
||||
_logger.exception("Failed %s.%s()", model, attr)
|
||||
self.env.cr.rollback()
|
||||
|
||||
# Ensure backward compatibility with the previous autovacuum API
|
||||
try:
|
||||
self.power_on()
|
||||
self.env.cr.commit()
|
||||
except Exception:
|
||||
_logger.exception("Failed power_on")
|
||||
self.env.cr.rollback()
|
||||
|
||||
# Deprecated API
|
||||
@api.model
|
||||
def power_on(self, *args, **kwargs):
|
||||
tb = traceback.extract_stack(limit=2)
|
||||
if tb[-2].name == 'power_on':
|
||||
warnings.warn(
|
||||
"You are extending the 'power_on' ir.autovacuum method"
|
||||
f"in {tb[-2].filename} around line {tb[-2].lineno}. "
|
||||
"You should instead use the @api.autovacuum decorator "
|
||||
"on your garbage collecting method.", DeprecationWarning, stacklevel=2)
|
||||
257
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_binary.py
Normal file
257
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_binary.py
Normal file
|
|
@ -0,0 +1,257 @@
|
|||
import logging
|
||||
import werkzeug.http
|
||||
from datetime import datetime
|
||||
from mimetypes import guess_extension
|
||||
|
||||
from odoo import models
|
||||
from odoo.exceptions import MissingError, UserError
|
||||
from odoo.http import Stream, request
|
||||
from odoo.tools import file_open, replace_exceptions
|
||||
from odoo.tools.image import image_process, image_guess_size_from_field_name
|
||||
from odoo.tools.mimetypes import guess_mimetype, get_extension
|
||||
|
||||
|
||||
DEFAULT_PLACEHOLDER_PATH = 'web/static/img/placeholder.png'
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrBinary(models.AbstractModel):
|
||||
_name = 'ir.binary'
|
||||
_description = "File streaming helper model for controllers"
|
||||
|
||||
def _find_record(
|
||||
self, xmlid=None, res_model='ir.attachment', res_id=None,
|
||||
access_token=None,
|
||||
):
|
||||
"""
|
||||
Find and return a record either using an xmlid either a model+id
|
||||
pair. This method is an helper for the ``/web/content`` and
|
||||
``/web/image`` controllers and should not be used in other
|
||||
contextes.
|
||||
|
||||
:param Optional[str] xmlid: xmlid of the record
|
||||
:param Optional[str] res_model: model of the record,
|
||||
ir.attachment by default.
|
||||
:param Optional[id] res_id: id of the record
|
||||
:param Optional[str] access_token: access token to use instead
|
||||
of the access rights and access rules.
|
||||
:returns: single record
|
||||
:raises MissingError: when no record was found.
|
||||
"""
|
||||
record = None
|
||||
if xmlid:
|
||||
record = self.env.ref(xmlid, False)
|
||||
elif res_id is not None and res_model in self.env:
|
||||
record = self.env[res_model].browse(res_id).exists()
|
||||
if not record:
|
||||
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}")
|
||||
|
||||
record = self._find_record_check_access(record, access_token)
|
||||
return record
|
||||
|
||||
def _find_record_check_access(self, record, access_token):
|
||||
if record._name == 'ir.attachment':
|
||||
return record.validate_access(access_token)
|
||||
|
||||
record.check_access_rights('read')
|
||||
record.check_access_rule('read')
|
||||
return record
|
||||
|
||||
def _record_to_stream(self, record, field_name):
|
||||
"""
|
||||
Low level method responsible for the actual conversion from a
|
||||
model record to a stream. This method is an extensible hook for
|
||||
other modules. It is not meant to be directly called from
|
||||
outside or the ir.binary model.
|
||||
|
||||
:param record: the record where to load the data from.
|
||||
:param str field_name: the binary field where to load the data
|
||||
from.
|
||||
:rtype: odoo.http.Stream
|
||||
"""
|
||||
if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'):
|
||||
return Stream.from_attachment(record)
|
||||
|
||||
record.check_field_access_rights('read', [field_name])
|
||||
|
||||
if record._fields[field_name].attachment:
|
||||
field_attachment = self.env['ir.attachment'].sudo().search(
|
||||
domain=[('res_model', '=', record._name),
|
||||
('res_id', '=', record.id),
|
||||
('res_field', '=', field_name)],
|
||||
limit=1)
|
||||
if not field_attachment:
|
||||
raise MissingError("The related attachment does not exist.")
|
||||
return Stream.from_attachment(field_attachment)
|
||||
|
||||
return Stream.from_binary_field(record, field_name)
|
||||
|
||||
def _get_stream_from(
|
||||
self, record, field_name='raw', filename=None, filename_field='name',
|
||||
mimetype=None, default_mimetype='application/octet-stream',
|
||||
):
|
||||
"""
|
||||
Create a :class:odoo.http.Stream: from a record's binary field.
|
||||
|
||||
:param record: the record where to load the data from.
|
||||
:param str field_name: the binary field where to load the data
|
||||
from.
|
||||
:param Optional[str] filename: when the stream is downloaded by
|
||||
a browser, what filename it should have on disk. By default
|
||||
it is ``{model}-{id}-{field}.{extension}``, the extension is
|
||||
determined thanks to mimetype.
|
||||
:param Optional[str] filename_field: like ``filename`` but use
|
||||
one of the record's char field as filename.
|
||||
:param Optional[str] mimetype: the data mimetype to use instead
|
||||
of the stored one (attachment) or the one determined by
|
||||
magic.
|
||||
:param str default_mimetype: the mimetype to use when the
|
||||
mimetype couldn't be determined. By default it is
|
||||
``application/octet-stream``.
|
||||
:rtype: odoo.http.Stream
|
||||
"""
|
||||
with replace_exceptions(ValueError, by=UserError(f'Expected singleton: {record}')):
|
||||
record.ensure_one()
|
||||
|
||||
try:
|
||||
field_def = record._fields[field_name]
|
||||
except KeyError:
|
||||
raise UserError(f"Record has no field {field_name!r}.")
|
||||
if field_def.type != 'binary':
|
||||
raise UserError(
|
||||
f"Field {field_def!r} is type {field_def.type!r} but "
|
||||
f"it is only possible to stream Binary or Image fields."
|
||||
)
|
||||
|
||||
stream = self._record_to_stream(record, field_name)
|
||||
|
||||
if stream.type in ('data', 'path'):
|
||||
if mimetype:
|
||||
stream.mimetype = mimetype
|
||||
elif not stream.mimetype:
|
||||
if stream.type == 'data':
|
||||
head = stream.data[:1024]
|
||||
else:
|
||||
with open(stream.path, 'rb') as file:
|
||||
head = file.read(1024)
|
||||
stream.mimetype = guess_mimetype(head, default=default_mimetype)
|
||||
|
||||
if filename:
|
||||
stream.download_name = filename
|
||||
elif filename_field in record:
|
||||
stream.download_name = record[filename_field]
|
||||
if not stream.download_name:
|
||||
stream.download_name = f'{record._table}-{record.id}-{field_name}'
|
||||
|
||||
stream.download_name = stream.download_name.replace('\n', '_').replace('\r', '_')
|
||||
if (not get_extension(stream.download_name)
|
||||
and stream.mimetype != 'application/octet-stream'):
|
||||
stream.download_name += guess_extension(stream.mimetype) or ''
|
||||
|
||||
return stream
|
||||
|
||||
def _get_image_stream_from(
|
||||
self, record, field_name='raw', filename=None, filename_field='name',
|
||||
mimetype=None, default_mimetype='image/png', placeholder=None,
|
||||
width=0, height=0, crop=False, quality=0,
|
||||
):
|
||||
"""
|
||||
Create a :class:odoo.http.Stream: from a record's binary field,
|
||||
equivalent of :meth:`~get_stream_from` but for images.
|
||||
|
||||
In case the record does not exist or is not accessible, the
|
||||
alternative ``placeholder`` path is used instead. If not set,
|
||||
a path is determined via
|
||||
:meth:`~odoo.models.BaseModel._get_placeholder_filename` which
|
||||
ultimately fallbacks on ``web/static/img/placeholder.png``.
|
||||
|
||||
In case the arguments ``width``, ``height``, ``crop`` or
|
||||
``quality`` are given, the image will be post-processed and the
|
||||
ETags (the unique cache http header) will be updated
|
||||
accordingly. See also :func:`odoo.tools.image.image_process`.
|
||||
|
||||
:param record: the record where to load the data from.
|
||||
:param str field_name: the binary field where to load the data
|
||||
from.
|
||||
:param Optional[str] filename: when the stream is downloaded by
|
||||
a browser, what filename it should have on disk. By default
|
||||
it is ``{table}-{id}-{field}.{extension}``, the extension is
|
||||
determined thanks to mimetype.
|
||||
:param Optional[str] filename_field: like ``filename`` but use
|
||||
one of the record's char field as filename.
|
||||
:param Optional[str] mimetype: the data mimetype to use instead
|
||||
of the stored one (attachment) or the one determined by
|
||||
magic.
|
||||
:param str default_mimetype: the mimetype to use when the
|
||||
mimetype couldn't be determined. By default it is
|
||||
``image/png``.
|
||||
:param Optional[pathlike] placeholder: in case the image is not
|
||||
found or unaccessible, the path of an image to use instead.
|
||||
By default the record ``_get_placeholder_filename`` on the
|
||||
requested field or ``web/static/img/placeholder.png``.
|
||||
:param int width: if not zero, the width of the resized image.
|
||||
:param int height: if not zero, the height of the resized image.
|
||||
:param bool crop: if true, crop the image instead of rezising
|
||||
it.
|
||||
:param int quality: if not zero, the quality of the resized
|
||||
image.
|
||||
|
||||
"""
|
||||
stream = None
|
||||
try:
|
||||
stream = self._get_stream_from(
|
||||
record, field_name, filename, filename_field, mimetype,
|
||||
default_mimetype
|
||||
)
|
||||
except UserError:
|
||||
if request.params.get('download'):
|
||||
raise
|
||||
|
||||
if not stream or stream.size == 0:
|
||||
if not placeholder:
|
||||
placeholder = record._get_placeholder_filename(field_name)
|
||||
stream = self._get_placeholder_stream(placeholder)
|
||||
if (width, height) == (0, 0):
|
||||
width, height = image_guess_size_from_field_name(field_name)
|
||||
|
||||
if stream.type == 'url':
|
||||
return stream # Rezising an external URL is not supported
|
||||
if not stream.mimetype.startswith('image/'):
|
||||
stream.mimetype = 'application/octet-stream'
|
||||
if isinstance(stream.etag, str):
|
||||
stream.etag += f'-{width}x{height}-crop={crop}-quality={quality}'
|
||||
|
||||
if isinstance(stream.last_modified, (int, float)):
|
||||
stream.last_modified = datetime.fromtimestamp(stream.last_modified, tz=None)
|
||||
modified = werkzeug.http.is_resource_modified(
|
||||
request.httprequest.environ,
|
||||
etag=stream.etag if isinstance(stream.etag, str) else None,
|
||||
last_modified=stream.last_modified
|
||||
)
|
||||
|
||||
if modified and (width or height or crop):
|
||||
if stream.type == 'path':
|
||||
with open(stream.path, 'rb') as file:
|
||||
stream.type = 'data'
|
||||
stream.path = None
|
||||
stream.data = file.read()
|
||||
stream.data = image_process(
|
||||
stream.data,
|
||||
size=(width, height),
|
||||
crop=crop,
|
||||
quality=quality,
|
||||
)
|
||||
stream.size = len(stream.data)
|
||||
|
||||
return stream
|
||||
|
||||
def _get_placeholder_stream(self, path=None):
|
||||
if not path:
|
||||
path = DEFAULT_PLACEHOLDER_PATH
|
||||
return Stream.from_path(path, filter_ext=('.png', '.jpg'))
|
||||
|
||||
def _placeholder(self, path=False):
|
||||
if not path:
|
||||
path = DEFAULT_PLACEHOLDER_PATH
|
||||
with file_open(path, 'rb', filter_ext=('.png', '.jpg')) as file:
|
||||
return file.read()
|
||||
|
|
@ -0,0 +1,125 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
"""
|
||||
Store database-specific configuration parameters
|
||||
"""
|
||||
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tools import config, ormcache, mute_logger
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
"""
|
||||
A dictionary holding some configuration parameters to be initialized when the database is created.
|
||||
"""
|
||||
_default_parameters = {
|
||||
"database.secret": lambda: str(uuid.uuid4()),
|
||||
"database.uuid": lambda: str(uuid.uuid1()),
|
||||
"database.create_date": fields.Datetime.now,
|
||||
"web.base.url": lambda: "http://localhost:%s" % config.get('http_port'),
|
||||
"base.login_cooldown_after": lambda: 10,
|
||||
"base.login_cooldown_duration": lambda: 60,
|
||||
}
|
||||
|
||||
|
||||
class IrConfigParameter(models.Model):
|
||||
"""Per-database storage of configuration key-value pairs."""
|
||||
_name = 'ir.config_parameter'
|
||||
_description = 'System Parameter'
|
||||
_rec_name = 'key'
|
||||
_order = 'key'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
key = fields.Char(required=True)
|
||||
value = fields.Text(required=True)
|
||||
|
||||
_sql_constraints = [
|
||||
('key_uniq', 'unique (key)', 'Key must be unique.')
|
||||
]
|
||||
|
||||
@mute_logger('odoo.addons.base.models.ir_config_parameter')
|
||||
def init(self, force=False):
|
||||
"""
|
||||
Initializes the parameters listed in _default_parameters.
|
||||
It overrides existing parameters if force is ``True``.
|
||||
"""
|
||||
# avoid prefetching during module installation, as the res_users table
|
||||
# may not have all prescribed columns
|
||||
self = self.with_context(prefetch_fields=False)
|
||||
for key, func in _default_parameters.items():
|
||||
# force=True skips search and always performs the 'if' body (because ids=False)
|
||||
params = self.sudo().search([('key', '=', key)])
|
||||
if force or not params:
|
||||
params.set_param(key, func())
|
||||
|
||||
@api.model
|
||||
def get_param(self, key, default=False):
|
||||
"""Retrieve the value for a given key.
|
||||
|
||||
:param string key: The key of the parameter value to retrieve.
|
||||
:param string default: default value if parameter is missing.
|
||||
:return: The value of the parameter, or ``default`` if it does not exist.
|
||||
:rtype: string
|
||||
"""
|
||||
self.check_access_rights('read')
|
||||
return self._get_param(key) or default
|
||||
|
||||
@api.model
|
||||
@ormcache('key')
|
||||
def _get_param(self, key):
|
||||
# we bypass the ORM because get_param() is used in some field's depends,
|
||||
# and must therefore work even when the ORM is not ready to work
|
||||
self.flush_model(['key', 'value'])
|
||||
self.env.cr.execute("SELECT value FROM ir_config_parameter WHERE key = %s", [key])
|
||||
result = self.env.cr.fetchone()
|
||||
return result and result[0]
|
||||
|
||||
@api.model
|
||||
def set_param(self, key, value):
|
||||
"""Sets the value of a parameter.
|
||||
|
||||
:param string key: The key of the parameter value to set.
|
||||
:param string value: The value to set.
|
||||
:return: the previous value of the parameter or False if it did
|
||||
not exist.
|
||||
:rtype: string
|
||||
"""
|
||||
param = self.search([('key', '=', key)])
|
||||
if param:
|
||||
old = param.value
|
||||
if value is not False and value is not None:
|
||||
if str(value) != old:
|
||||
param.write({'value': value})
|
||||
else:
|
||||
param.unlink()
|
||||
return old
|
||||
else:
|
||||
if value is not False and value is not None:
|
||||
self.create({'key': key, 'value': value})
|
||||
return False
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
return super(IrConfigParameter, self).create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
if 'key' in vals:
|
||||
illegal = _default_parameters.keys() & self.mapped('key')
|
||||
if illegal:
|
||||
raise ValidationError(_("You cannot rename config parameters with keys %s", ', '.join(illegal)))
|
||||
self.clear_caches()
|
||||
return super(IrConfigParameter, self).write(vals)
|
||||
|
||||
def unlink(self):
|
||||
self.clear_caches()
|
||||
return super(IrConfigParameter, self).unlink()
|
||||
|
||||
@api.ondelete(at_uninstall=False)
|
||||
def unlink_default_parameters(self):
|
||||
for record in self.filtered(lambda p: p.key in _default_parameters.keys()):
|
||||
raise ValidationError(_("You cannot delete the %s record.", record.key))
|
||||
552
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_cron.py
Normal file
552
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_cron.py
Normal file
|
|
@ -0,0 +1,552 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
import psycopg2
|
||||
import pytz
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
import odoo
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
from psycopg2 import sql
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
BASE_VERSION = odoo.modules.get_manifest('base')['version']
|
||||
MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice
|
||||
|
||||
# custom function to call instead of NOTIFY postgresql command (opt-in)
|
||||
ODOO_NOTIFY_FUNCTION = os.environ.get('ODOO_NOTIFY_FUNCTION')
|
||||
|
||||
|
||||
class BadVersion(Exception):
|
||||
pass
|
||||
|
||||
class BadModuleState(Exception):
|
||||
pass
|
||||
|
||||
|
||||
_intervalTypes = {
|
||||
'days': lambda interval: relativedelta(days=interval),
|
||||
'hours': lambda interval: relativedelta(hours=interval),
|
||||
'weeks': lambda interval: relativedelta(days=7*interval),
|
||||
'months': lambda interval: relativedelta(months=interval),
|
||||
'minutes': lambda interval: relativedelta(minutes=interval),
|
||||
}
|
||||
|
||||
|
||||
class ir_cron(models.Model):
|
||||
""" Model describing cron jobs (also called actions or tasks).
|
||||
"""
|
||||
|
||||
# TODO: perhaps in the future we could consider a flag on ir.cron jobs
|
||||
# that would cause database wake-up even if the database has not been
|
||||
# loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something)
|
||||
# See also odoo.cron
|
||||
|
||||
_name = "ir.cron"
|
||||
_order = 'cron_name'
|
||||
_description = 'Scheduled Actions'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
ir_actions_server_id = fields.Many2one(
|
||||
'ir.actions.server', 'Server action',
|
||||
delegate=True, ondelete='restrict', required=True)
|
||||
cron_name = fields.Char('Name', related='ir_actions_server_id.name', store=True, readonly=False)
|
||||
user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True)
|
||||
active = fields.Boolean(default=True)
|
||||
interval_number = fields.Integer(default=1, group_operator=None, help="Repeat every x.")
|
||||
interval_type = fields.Selection([('minutes', 'Minutes'),
|
||||
('hours', 'Hours'),
|
||||
('days', 'Days'),
|
||||
('weeks', 'Weeks'),
|
||||
('months', 'Months')], string='Interval Unit', default='months')
|
||||
numbercall = fields.Integer(string='Number of Calls', default=1, help='How many times the method is called,\na negative number indicates no limit.')
|
||||
doall = fields.Boolean(string='Repeat Missed', help="Specify if missed occurrences should be executed when the server restarts.")
|
||||
nextcall = fields.Datetime(string='Next Execution Date', required=True, default=fields.Datetime.now, help="Next planned execution date for this job.")
|
||||
lastcall = fields.Datetime(string='Last Execution Date', help="Previous time the cron ran successfully, provided to the job through the context on the `lastcall` key")
|
||||
priority = fields.Integer(default=5, group_operator=None, help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.')
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
vals['usage'] = 'ir_cron'
|
||||
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
|
||||
self._cr.postcommit.add(self._notifydb)
|
||||
return super().create(vals_list)
|
||||
|
||||
@api.model
|
||||
def default_get(self, fields_list):
|
||||
# only 'code' state is supported for cron job so set it as default
|
||||
if not self._context.get('default_state'):
|
||||
self = self.with_context(default_state='code')
|
||||
return super(ir_cron, self).default_get(fields_list)
|
||||
|
||||
@api.onchange('active', 'interval_number', 'interval_type')
|
||||
def _onchange_interval_number(self):
|
||||
if self.active and (self.interval_number <= 0 or not self.interval_type):
|
||||
self.active = False
|
||||
return {'warning': {
|
||||
'title': _("Scheduled action disabled"),
|
||||
'message': _("This scheduled action has been disabled because its interval number is not a strictly positive value.")}
|
||||
}
|
||||
|
||||
def method_direct_trigger(self):
|
||||
self.check_access_rights('write')
|
||||
for cron in self:
|
||||
cron._try_lock()
|
||||
_logger.info('Manually starting job `%s`.', cron.name)
|
||||
cron.with_user(cron.user_id).with_context({'lastcall': cron.lastcall}).ir_actions_server_id.run()
|
||||
self.env.flush_all()
|
||||
_logger.info('Job `%s` done.', cron.name)
|
||||
cron.lastcall = fields.Datetime.now()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def _process_jobs(cls, db_name):
|
||||
""" Execute every job ready to be run on this database. """
|
||||
try:
|
||||
db = odoo.sql_db.db_connect(db_name)
|
||||
threading.current_thread().dbname = db_name
|
||||
with db.cursor() as cron_cr:
|
||||
cls._check_version(cron_cr)
|
||||
jobs = cls._get_all_ready_jobs(cron_cr)
|
||||
if not jobs:
|
||||
return
|
||||
cls._check_modules_state(cron_cr, jobs)
|
||||
|
||||
for job_id in (job['id'] for job in jobs):
|
||||
try:
|
||||
job = cls._acquire_one_job(cron_cr, (job_id,))
|
||||
except psycopg2.extensions.TransactionRollbackError:
|
||||
cron_cr.rollback()
|
||||
_logger.debug("job %s has been processed by another worker, skip", job_id)
|
||||
continue
|
||||
if not job:
|
||||
_logger.debug("another worker is processing job %s, skip", job_id)
|
||||
continue
|
||||
_logger.debug("job %s acquired", job_id)
|
||||
# take into account overridings of _process_job() on that database
|
||||
registry = odoo.registry(db_name)
|
||||
registry[cls._name]._process_job(db, cron_cr, job)
|
||||
cron_cr.commit()
|
||||
_logger.debug("job %s updated and released", job_id)
|
||||
|
||||
except BadVersion:
|
||||
_logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION)
|
||||
except BadModuleState:
|
||||
_logger.warning('Skipping database %s because of modules to install/upgrade/remove.', db_name)
|
||||
except psycopg2.ProgrammingError as e:
|
||||
if e.pgcode == '42P01':
|
||||
# Class 42 — Syntax Error or Access Rule Violation; 42P01: undefined_table
|
||||
# The table ir_cron does not exist; this is probably not an OpenERP database.
|
||||
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
|
||||
else:
|
||||
raise
|
||||
except Exception:
|
||||
_logger.warning('Exception in cron:', exc_info=True)
|
||||
finally:
|
||||
if hasattr(threading.current_thread(), 'dbname'):
|
||||
del threading.current_thread().dbname
|
||||
|
||||
@classmethod
|
||||
def _check_version(cls, cron_cr):
|
||||
""" Ensure the code version matches the database version """
|
||||
cron_cr.execute("""
|
||||
SELECT latest_version
|
||||
FROM ir_module_module
|
||||
WHERE name='base'
|
||||
""")
|
||||
(version,) = cron_cr.fetchone()
|
||||
if version is None:
|
||||
raise BadModuleState()
|
||||
if version != BASE_VERSION:
|
||||
raise BadVersion()
|
||||
|
||||
@classmethod
|
||||
def _check_modules_state(cls, cr, jobs):
|
||||
""" Ensure no module is installing or upgrading """
|
||||
cr.execute("""
|
||||
SELECT COUNT(*)
|
||||
FROM ir_module_module
|
||||
WHERE state LIKE %s
|
||||
""", ['to %'])
|
||||
(changes,) = cr.fetchone()
|
||||
if not changes:
|
||||
return
|
||||
|
||||
if not jobs:
|
||||
raise BadModuleState()
|
||||
|
||||
oldest = min([
|
||||
fields.Datetime.from_string(job['nextcall'])
|
||||
for job in jobs
|
||||
])
|
||||
if datetime.now() - oldest < MAX_FAIL_TIME:
|
||||
raise BadModuleState()
|
||||
|
||||
# the cron execution failed around MAX_FAIL_TIME * 60 times (1 failure
|
||||
# per minute for 5h) in which case we assume that the crons are stuck
|
||||
# because the db has zombie states and we force a call to
|
||||
# reset_module_states.
|
||||
odoo.modules.reset_modules_state(cr.dbname)
|
||||
|
||||
@classmethod
|
||||
def _get_all_ready_jobs(cls, cr):
|
||||
""" Return a list of all jobs that are ready to be executed """
|
||||
cr.execute("""
|
||||
SELECT *, cron_name->>'en_US' as cron_name
|
||||
FROM ir_cron
|
||||
WHERE active = true
|
||||
AND numbercall != 0
|
||||
AND (nextcall <= (now() at time zone 'UTC')
|
||||
OR id in (
|
||||
SELECT cron_id
|
||||
FROM ir_cron_trigger
|
||||
WHERE call_at <= (now() at time zone 'UTC')
|
||||
)
|
||||
)
|
||||
ORDER BY priority
|
||||
""")
|
||||
return cr.dictfetchall()
|
||||
|
||||
@classmethod
|
||||
def _acquire_one_job(cls, cr, job_ids):
|
||||
"""
|
||||
Acquire for update one job that is ready from the job_ids tuple.
|
||||
|
||||
The jobs that have already been processed in this worker should
|
||||
be excluded from the tuple.
|
||||
|
||||
This function raises a ``psycopg2.errors.SerializationFailure``
|
||||
when the ``nextcall`` of one of the job_ids is modified in
|
||||
another transaction. You should rollback the transaction and try
|
||||
again later.
|
||||
"""
|
||||
|
||||
# We have to make sure ALL jobs are executed ONLY ONCE no matter
|
||||
# how many cron workers may process them. The exlusion mechanism
|
||||
# is twofold: (i) prevent parallel processing of the same job,
|
||||
# and (ii) prevent re-processing jobs that have been processed
|
||||
# already.
|
||||
#
|
||||
# (i) is implemented via `LIMIT 1 FOR UPDATE SKIP LOCKED`, each
|
||||
# worker just acquire one available job at a time and lock it so
|
||||
# the other workers don't select it too.
|
||||
# (ii) is implemented via the `WHERE` statement, when a job has
|
||||
# been processed, its nextcall is updated to a date in the
|
||||
# future and the optional triggers are removed.
|
||||
#
|
||||
# Note about (ii): it is possible that a job becomes available
|
||||
# again quickly (e.g. high frequency or self-triggering cron).
|
||||
# This function doesn't prevent from acquiring that job multiple
|
||||
# times at different moments. This can block a worker on
|
||||
# executing a same job in loop. To prevent this problem, the
|
||||
# callee is responsible of providing a `job_ids` tuple without
|
||||
# the jobs it has executed already.
|
||||
#
|
||||
# An `UPDATE` lock type is the strongest row lock, it conflicts
|
||||
# with ALL other lock types. Among them the `KEY SHARE` row lock
|
||||
# which is implicitely aquired by foreign keys to prevent the
|
||||
# referenced record from being removed while in use. Because we
|
||||
# never delete acquired cron jobs, foreign keys are safe to
|
||||
# concurrently reference cron jobs. Hence, the `NO KEY UPDATE`
|
||||
# row lock is used, it is a weaker lock that does conflict with
|
||||
# everything BUT `KEY SHARE`.
|
||||
#
|
||||
# Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
|
||||
|
||||
query = """
|
||||
SELECT *, cron_name->>'en_US' as cron_name
|
||||
FROM ir_cron
|
||||
WHERE active = true
|
||||
AND numbercall != 0
|
||||
AND (nextcall <= (now() at time zone 'UTC')
|
||||
OR EXISTS (
|
||||
SELECT cron_id
|
||||
FROM ir_cron_trigger
|
||||
WHERE call_at <= (now() at time zone 'UTC')
|
||||
AND cron_id = ir_cron.id
|
||||
)
|
||||
)
|
||||
AND id in %s
|
||||
ORDER BY priority
|
||||
LIMIT 1 FOR NO KEY UPDATE SKIP LOCKED
|
||||
"""
|
||||
try:
|
||||
cr.execute(query, [job_ids], log_exceptions=False)
|
||||
except psycopg2.extensions.TransactionRollbackError:
|
||||
# A serialization error can occur when another cron worker
|
||||
# commits the new `nextcall` value of a cron it just ran and
|
||||
# that commit occured just before this query. The error is
|
||||
# genuine and the job should be skipped in this cron worker.
|
||||
raise
|
||||
except Exception as exc:
|
||||
_logger.error("bad query: %s\nERROR: %s", query, exc)
|
||||
raise
|
||||
return cr.dictfetchone()
|
||||
|
||||
@classmethod
|
||||
def _process_job(cls, db, cron_cr, job):
|
||||
""" Execute a cron job and re-schedule a call for later. """
|
||||
|
||||
# Compute how many calls were missed and at what time we should
|
||||
# recall the cron next. In the example bellow, we fake a cron
|
||||
# with an interval of 30 (starting at 0) that was last executed
|
||||
# at 15 and that is executed again at 135.
|
||||
#
|
||||
# 0 60 120 180
|
||||
# --|-----|-----|-----|-----|-----|-----|----> time
|
||||
# 1 2* * * * 3 4
|
||||
#
|
||||
# 1: lastcall, the last time the cron was executed
|
||||
# 2: past_nextcall, the cron nextcall as seen from lastcall
|
||||
# *: missed_call, a total of 4 calls are missing
|
||||
# 3: now
|
||||
# 4: future_nextcall, the cron nextcall as seen from now
|
||||
|
||||
if job['interval_number'] <= 0:
|
||||
_logger.error("Job %s %r has been disabled because its interval number is null or negative.", job['id'], job['cron_name'])
|
||||
cron_cr.execute("UPDATE ir_cron SET active=false WHERE id=%s", [job['id']])
|
||||
return
|
||||
|
||||
with cls.pool.cursor() as job_cr:
|
||||
lastcall = fields.Datetime.to_datetime(job['lastcall'])
|
||||
interval = _intervalTypes[job['interval_type']](job['interval_number'])
|
||||
env = api.Environment(job_cr, job['user_id'], {'lastcall': lastcall})
|
||||
ir_cron = env[cls._name]
|
||||
|
||||
# Use the user's timezone to compare and compute datetimes,
|
||||
# otherwise unexpected results may appear. For instance, adding
|
||||
# 1 month in UTC to July 1st at midnight in GMT+2 gives July 30
|
||||
# instead of August 1st!
|
||||
now = fields.Datetime.context_timestamp(ir_cron, datetime.utcnow())
|
||||
past_nextcall = fields.Datetime.context_timestamp(
|
||||
ir_cron, fields.Datetime.to_datetime(job['nextcall']))
|
||||
|
||||
# Compute how many call were missed
|
||||
missed_call = past_nextcall
|
||||
missed_call_count = 0
|
||||
while missed_call <= now:
|
||||
missed_call += interval
|
||||
missed_call_count += 1
|
||||
future_nextcall = missed_call
|
||||
|
||||
# Compute how many time we should run the cron
|
||||
effective_call_count = (
|
||||
1 if not missed_call_count # run at least once
|
||||
else 1 if not job['doall'] # run once for all
|
||||
else missed_call_count if job['numbercall'] == -1 # run them all
|
||||
else min(missed_call_count, job['numbercall']) # run maximum numbercall times
|
||||
)
|
||||
call_count_left = max(job['numbercall'] - effective_call_count, -1)
|
||||
|
||||
# The actual cron execution
|
||||
for call in range(effective_call_count):
|
||||
ir_cron._callback(job['cron_name'], job['ir_actions_server_id'], job['id'])
|
||||
|
||||
# Update the cron with the information computed above
|
||||
cron_cr.execute("""
|
||||
UPDATE ir_cron
|
||||
SET nextcall=%s,
|
||||
numbercall=%s,
|
||||
lastcall=%s,
|
||||
active=%s
|
||||
WHERE id=%s
|
||||
""", [
|
||||
fields.Datetime.to_string(future_nextcall.astimezone(pytz.UTC)),
|
||||
call_count_left,
|
||||
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
|
||||
job['active'] and bool(call_count_left),
|
||||
job['id'],
|
||||
])
|
||||
|
||||
cron_cr.execute("""
|
||||
DELETE FROM ir_cron_trigger
|
||||
WHERE cron_id = %s
|
||||
AND call_at < (now() at time zone 'UTC')
|
||||
""", [job['id']])
|
||||
|
||||
@api.model
|
||||
def _callback(self, cron_name, server_action_id, job_id):
|
||||
""" Run the method associated to a given job. It takes care of logging
|
||||
and exception handling. Note that the user running the server action
|
||||
is the user calling this method. """
|
||||
try:
|
||||
if self.pool != self.pool.check_signaling():
|
||||
# the registry has changed, reload self in the new registry
|
||||
self.env.reset()
|
||||
self = self.env()[self._name]
|
||||
|
||||
log_depth = (None if _logger.isEnabledFor(logging.DEBUG) else 1)
|
||||
odoo.netsvc.log(_logger, logging.DEBUG, 'cron.object.execute', (self._cr.dbname, self._uid, '*', cron_name, server_action_id), depth=log_depth)
|
||||
start_time = False
|
||||
_logger.info('Starting job `%s`.', cron_name)
|
||||
if _logger.isEnabledFor(logging.DEBUG):
|
||||
start_time = time.time()
|
||||
self.env['ir.actions.server'].browse(server_action_id).run()
|
||||
self.env.flush_all()
|
||||
_logger.info('Job `%s` done.', cron_name)
|
||||
if start_time and _logger.isEnabledFor(logging.DEBUG):
|
||||
end_time = time.time()
|
||||
_logger.debug('%.3fs (cron %s, server action %d with uid %d)', end_time - start_time, cron_name, server_action_id, self.env.uid)
|
||||
self.pool.signal_changes()
|
||||
except Exception as e:
|
||||
self.pool.reset_changes()
|
||||
_logger.exception("Call from cron %s for server action #%s failed in Job #%s",
|
||||
cron_name, server_action_id, job_id)
|
||||
self._handle_callback_exception(cron_name, server_action_id, job_id, e)
|
||||
|
||||
@api.model
|
||||
def _handle_callback_exception(self, cron_name, server_action_id, job_id, job_exception):
|
||||
""" Method called when an exception is raised by a job.
|
||||
|
||||
Simply logs the exception and rollback the transaction. """
|
||||
self._cr.rollback()
|
||||
|
||||
def _try_lock(self, lockfk=False):
|
||||
"""Try to grab a dummy exclusive write-lock to the rows with the given ids,
|
||||
to make sure a following write() or unlink() will not block due
|
||||
to a process currently executing those cron tasks.
|
||||
|
||||
:param lockfk: acquire a strong row lock which conflicts with
|
||||
the lock aquired by foreign keys when they
|
||||
reference this row.
|
||||
"""
|
||||
row_level_lock = "UPDATE" if lockfk else "NO KEY UPDATE"
|
||||
try:
|
||||
self._cr.execute(f"""
|
||||
SELECT id
|
||||
FROM "{self._table}"
|
||||
WHERE id IN %s
|
||||
FOR {row_level_lock} NOWAIT
|
||||
""", [tuple(self.ids)], log_exceptions=False)
|
||||
except psycopg2.OperationalError:
|
||||
self._cr.rollback() # early rollback to allow translations to work for the user feedback
|
||||
raise UserError(_("Record cannot be modified right now: "
|
||||
"This cron task is currently being executed and may not be modified "
|
||||
"Please try again in a few minutes"))
|
||||
|
||||
def write(self, vals):
|
||||
self._try_lock()
|
||||
if ('nextcall' in vals or vals.get('active')) and os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
|
||||
self._cr.postcommit.add(self._notifydb)
|
||||
return super(ir_cron, self).write(vals)
|
||||
|
||||
def unlink(self):
|
||||
self._try_lock(lockfk=True)
|
||||
return super(ir_cron, self).unlink()
|
||||
|
||||
def try_write(self, values):
|
||||
try:
|
||||
with self._cr.savepoint():
|
||||
self._cr.execute(f"""
|
||||
SELECT id
|
||||
FROM "{self._table}"
|
||||
WHERE id IN %s
|
||||
FOR NO KEY UPDATE NOWAIT
|
||||
""", [tuple(self.ids)], log_exceptions=False)
|
||||
except psycopg2.OperationalError:
|
||||
pass
|
||||
else:
|
||||
return super(ir_cron, self).write(values)
|
||||
return False
|
||||
|
||||
@api.model
|
||||
def toggle(self, model, domain):
|
||||
# Prevent deactivated cron jobs from being re-enabled through side effects on
|
||||
# neutralized databases.
|
||||
if self.env['ir.config_parameter'].sudo().get_param('database.is_neutralized'):
|
||||
return True
|
||||
|
||||
active = bool(self.env[model].search_count(domain))
|
||||
return self.try_write({'active': active})
|
||||
|
||||
def _trigger(self, at=None):
|
||||
"""
|
||||
Schedule a cron job to be executed soon independently of its
|
||||
``nextcall`` field value.
|
||||
|
||||
By default the cron is scheduled to be executed in the next batch but
|
||||
the optional `at` argument may be given to delay the execution later
|
||||
with a precision down to 1 minute.
|
||||
|
||||
The method may be called with a datetime or an iterable of datetime.
|
||||
The actual implementation is in :meth:`~._trigger_list`, which is the
|
||||
recommended method for overrides.
|
||||
|
||||
:param Optional[Union[datetime.datetime, list[datetime.datetime]]] at:
|
||||
When to execute the cron, at one or several moments in time instead
|
||||
of as soon as possible.
|
||||
"""
|
||||
if at is None:
|
||||
at_list = [fields.Datetime.now()]
|
||||
elif isinstance(at, datetime):
|
||||
at_list = [at]
|
||||
else:
|
||||
at_list = list(at)
|
||||
assert all(isinstance(at, datetime) for at in at_list)
|
||||
|
||||
self._trigger_list(at_list)
|
||||
|
||||
def _trigger_list(self, at_list):
|
||||
"""
|
||||
Implementation of :meth:`~._trigger`.
|
||||
|
||||
:param list[datetime.datetime] at_list:
|
||||
Execute the cron later, at precise moments in time.
|
||||
"""
|
||||
self.ensure_one()
|
||||
now = fields.Datetime.now()
|
||||
|
||||
if not self.sudo().active:
|
||||
# skip triggers that would be ignored
|
||||
at_list = [at for at in at_list if at > now]
|
||||
|
||||
if not at_list:
|
||||
return
|
||||
|
||||
self.env['ir.cron.trigger'].sudo().create([
|
||||
{'cron_id': self.id, 'call_at': at}
|
||||
for at in at_list
|
||||
])
|
||||
if _logger.isEnabledFor(logging.DEBUG):
|
||||
ats = ', '.join(map(str, at_list))
|
||||
_logger.debug("will execute '%s' at %s", self.sudo().name, ats)
|
||||
|
||||
if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
|
||||
self._cr.postcommit.add(self._notifydb)
|
||||
|
||||
def _notifydb(self):
|
||||
""" Wake up the cron workers
|
||||
The ODOO_NOTIFY_CRON_CHANGES environment variable allows to force the notifydb on both
|
||||
ir_cron modification and on trigger creation (regardless of call_at)
|
||||
"""
|
||||
with odoo.sql_db.db_connect('postgres').cursor() as cr:
|
||||
if ODOO_NOTIFY_FUNCTION:
|
||||
query = sql.SQL("SELECT {}('cron_trigger', %s)").format(sql.Identifier(ODOO_NOTIFY_FUNCTION))
|
||||
else:
|
||||
query = "NOTIFY cron_trigger, %s"
|
||||
cr.execute(query, [self.env.cr.dbname])
|
||||
_logger.debug("cron workers notified")
|
||||
|
||||
|
||||
class ir_cron_trigger(models.Model):
|
||||
_name = 'ir.cron.trigger'
|
||||
_description = 'Triggered actions'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
cron_id = fields.Many2one("ir.cron", index=True)
|
||||
call_at = fields.Datetime()
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_cron_triggers(self):
|
||||
domain = [('call_at', '<', datetime.now() + relativedelta(weeks=-1))]
|
||||
records = self.search(domain, limit=models.GC_UNLINK_LIMIT)
|
||||
if len(records) >= models.GC_UNLINK_LIMIT:
|
||||
self.env.ref('base.autovacuum_job')._trigger()
|
||||
return records.unlink()
|
||||
190
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_default.py
Normal file
190
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_default.py
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class IrDefault(models.Model):
|
||||
""" User-defined default values for fields. """
|
||||
_name = 'ir.default'
|
||||
_description = 'Default Values'
|
||||
_rec_name = 'field_id'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
field_id = fields.Many2one('ir.model.fields', string="Field", required=True,
|
||||
ondelete='cascade', index=True)
|
||||
user_id = fields.Many2one('res.users', string='User', ondelete='cascade', index=True,
|
||||
help="If set, action binding only applies for this user.")
|
||||
company_id = fields.Many2one('res.company', string='Company', ondelete='cascade', index=True,
|
||||
help="If set, action binding only applies for this company")
|
||||
condition = fields.Char('Condition', help="If set, applies the default upon condition.")
|
||||
json_value = fields.Char('Default Value (JSON format)', required=True)
|
||||
|
||||
@api.constrains('json_value')
|
||||
def _check_json_format(self):
|
||||
for record in self:
|
||||
try:
|
||||
json.loads(record.json_value)
|
||||
except json.JSONDecodeError:
|
||||
raise ValidationError(_('Invalid JSON format in Default Value field.'))
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
return super(IrDefault, self).create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
if self:
|
||||
self.clear_caches()
|
||||
new_default = super().write(vals)
|
||||
self.check_access_rule('write')
|
||||
return new_default
|
||||
|
||||
def unlink(self):
|
||||
if self:
|
||||
self.clear_caches()
|
||||
return super(IrDefault, self).unlink()
|
||||
|
||||
@api.model
|
||||
def set(self, model_name, field_name, value, user_id=False, company_id=False, condition=False):
|
||||
""" Defines a default value for the given field. Any entry for the same
|
||||
scope (field, user, company) will be replaced. The value is encoded
|
||||
in JSON to be stored to the database.
|
||||
|
||||
:param model_name:
|
||||
:param field_name:
|
||||
:param value:
|
||||
:param user_id: may be ``False`` for all users, ``True`` for the
|
||||
current user, or any user id
|
||||
:param company_id: may be ``False`` for all companies, ``True`` for
|
||||
the current user's company, or any company id
|
||||
:param condition: optional condition that restricts the
|
||||
applicability of the default value; this is an
|
||||
opaque string, but the client typically uses
|
||||
single-field conditions in the form ``'key=val'``.
|
||||
"""
|
||||
if user_id is True:
|
||||
user_id = self.env.uid
|
||||
if company_id is True:
|
||||
company_id = self.env.company.id
|
||||
|
||||
# check consistency of model_name, field_name, and value
|
||||
try:
|
||||
model = self.env[model_name]
|
||||
field = model._fields[field_name]
|
||||
parsed = field.convert_to_cache(value, model)
|
||||
json_value = json.dumps(value, ensure_ascii=False)
|
||||
except KeyError:
|
||||
raise ValidationError(_("Invalid field %s.%s") % (model_name, field_name))
|
||||
except Exception:
|
||||
raise ValidationError(_("Invalid value for %s.%s: %s") % (model_name, field_name, value))
|
||||
if field.type == 'integer' and not (-2**31 < parsed < 2**31-1):
|
||||
raise ValidationError(_("Invalid value for %s.%s: %s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model_name, field_name, value))
|
||||
|
||||
# update existing default for the same scope, or create one
|
||||
field = self.env['ir.model.fields']._get(model_name, field_name)
|
||||
default = self.search([
|
||||
('field_id', '=', field.id),
|
||||
('user_id', '=', user_id),
|
||||
('company_id', '=', company_id),
|
||||
('condition', '=', condition),
|
||||
])
|
||||
if default:
|
||||
# Avoid clearing the cache if nothing changes
|
||||
if default.json_value != json_value:
|
||||
default.write({'json_value': json_value})
|
||||
else:
|
||||
self.create({
|
||||
'field_id': field.id,
|
||||
'user_id': user_id,
|
||||
'company_id': company_id,
|
||||
'condition': condition,
|
||||
'json_value': json_value,
|
||||
})
|
||||
return True
|
||||
|
||||
@api.model
|
||||
def get(self, model_name, field_name, user_id=False, company_id=False, condition=False):
|
||||
""" Return the default value for the given field, user and company, or
|
||||
``None`` if no default is available.
|
||||
|
||||
:param model_name:
|
||||
:param field_name:
|
||||
:param user_id: may be ``False`` for all users, ``True`` for the
|
||||
current user, or any user id
|
||||
:param company_id: may be ``False`` for all companies, ``True`` for
|
||||
the current user's company, or any company id
|
||||
:param condition: optional condition that restricts the
|
||||
applicability of the default value; this is an
|
||||
opaque string, but the client typically uses
|
||||
single-field conditions in the form ``'key=val'``.
|
||||
"""
|
||||
if user_id is True:
|
||||
user_id = self.env.uid
|
||||
if company_id is True:
|
||||
company_id = self.env.company.id
|
||||
|
||||
field = self.env['ir.model.fields']._get(model_name, field_name)
|
||||
default = self.search([
|
||||
('field_id', '=', field.id),
|
||||
('user_id', '=', user_id),
|
||||
('company_id', '=', company_id),
|
||||
('condition', '=', condition),
|
||||
], limit=1)
|
||||
return json.loads(default.json_value) if default else None
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('self.env.uid', 'self.env.company.id', 'model_name', 'condition')
|
||||
# Note about ormcache invalidation: it is not needed when deleting a field,
|
||||
# a user, or a company, as the corresponding defaults will no longer be
|
||||
# requested. It must only be done when a user's company is modified.
|
||||
def get_model_defaults(self, model_name, condition=False):
|
||||
""" Return the available default values for the given model (for the
|
||||
current user), as a dict mapping field names to values.
|
||||
"""
|
||||
cr = self.env.cr
|
||||
query = """ SELECT f.name, d.json_value
|
||||
FROM ir_default d
|
||||
JOIN ir_model_fields f ON d.field_id=f.id
|
||||
WHERE f.model=%s
|
||||
AND (d.user_id IS NULL OR d.user_id=%s)
|
||||
AND (d.company_id IS NULL OR d.company_id=%s)
|
||||
AND {}
|
||||
ORDER BY d.user_id, d.company_id, d.id
|
||||
"""
|
||||
# self.env.company is empty when there is no user (controllers with auth=None)
|
||||
params = [model_name, self.env.uid, self.env.company.id or None]
|
||||
if condition:
|
||||
query = query.format("d.condition=%s")
|
||||
params.append(condition)
|
||||
else:
|
||||
query = query.format("d.condition IS NULL")
|
||||
cr.execute(query, params)
|
||||
result = {}
|
||||
for row in cr.fetchall():
|
||||
# keep the highest priority default for each field
|
||||
if row[0] not in result:
|
||||
result[row[0]] = json.loads(row[1])
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def discard_records(self, records):
|
||||
""" Discard all the defaults of many2one fields using any of the given
|
||||
records.
|
||||
"""
|
||||
json_vals = [json.dumps(id) for id in records.ids]
|
||||
domain = [('field_id.ttype', '=', 'many2one'),
|
||||
('field_id.relation', '=', records._name),
|
||||
('json_value', 'in', json_vals)]
|
||||
return self.search(domain).unlink()
|
||||
|
||||
@api.model
|
||||
def discard_values(self, model_name, field_name, values):
|
||||
""" Discard all the defaults for any of the given values. """
|
||||
field = self.env['ir.model.fields']._get(model_name, field_name)
|
||||
json_vals = [json.dumps(value, ensure_ascii=False) for value in values]
|
||||
domain = [('field_id', '=', field.id), ('json_value', 'in', json_vals)]
|
||||
return self.search(domain).unlink()
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models
|
||||
from odoo.modules.loading import force_demo
|
||||
from odoo.addons.base.models.ir_module import assert_log_admin_access
|
||||
|
||||
|
||||
class IrDemo(models.TransientModel):
|
||||
|
||||
_name = 'ir.demo'
|
||||
_description = 'Demo'
|
||||
|
||||
@assert_log_admin_access
|
||||
def install_demo(self):
|
||||
force_demo(self.env.cr)
|
||||
return {
|
||||
'type': 'ir.actions.act_url',
|
||||
'target': 'self',
|
||||
'url': '/web',
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class DemoFailure(models.TransientModel):
|
||||
""" Stores modules for which we could not install demo data
|
||||
"""
|
||||
_name = 'ir.demo_failure'
|
||||
_description = 'Demo failure'
|
||||
|
||||
module_id = fields.Many2one('ir.module.module', required=True, string="Module")
|
||||
error = fields.Char(string="Error")
|
||||
wizard_id = fields.Many2one('ir.demo_failure.wizard')
|
||||
|
||||
class DemoFailureWizard(models.TransientModel):
|
||||
_name = 'ir.demo_failure.wizard'
|
||||
_description = 'Demo Failure wizard'
|
||||
|
||||
failure_ids = fields.One2many(
|
||||
'ir.demo_failure', 'wizard_id', readonly=True,
|
||||
string="Demo Installation Failures"
|
||||
)
|
||||
failures_count = fields.Integer(compute='_compute_failures_count')
|
||||
|
||||
@api.depends('failure_ids')
|
||||
def _compute_failures_count(self):
|
||||
for r in self:
|
||||
r.failures_count = len(r.failure_ids)
|
||||
|
||||
def done(self):
|
||||
# pylint: disable=next-method-called
|
||||
return self.env['ir.module.module'].next()
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class IrExports(models.Model):
|
||||
_name = "ir.exports"
|
||||
_description = 'Exports'
|
||||
_order = 'name'
|
||||
|
||||
name = fields.Char(string='Export Name')
|
||||
resource = fields.Char(index=True)
|
||||
export_fields = fields.One2many('ir.exports.line', 'export_id', string='Export ID', copy=True)
|
||||
|
||||
|
||||
class IrExportsLine(models.Model):
|
||||
_name = 'ir.exports.line'
|
||||
_description = 'Exports Line'
|
||||
_order = 'id'
|
||||
|
||||
name = fields.Char(string='Field Name')
|
||||
export_id = fields.Many2one('ir.exports', string='Export', index=True, ondelete='cascade')
|
||||
689
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_fields.py
Normal file
689
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_fields.py
Normal file
|
|
@ -0,0 +1,689 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
import functools
|
||||
import itertools
|
||||
|
||||
import psycopg2
|
||||
import pytz
|
||||
|
||||
from odoo import api, Command, fields, models, _
|
||||
from odoo.tools import ustr, OrderedSet
|
||||
from odoo.tools.translate import code_translations, _lt
|
||||
|
||||
REFERENCING_FIELDS = {None, 'id', '.id'}
|
||||
def only_ref_fields(record):
|
||||
return {k: v for k, v in record.items() if k in REFERENCING_FIELDS}
|
||||
def exclude_ref_fields(record):
|
||||
return {k: v for k, v in record.items() if k not in REFERENCING_FIELDS}
|
||||
|
||||
# these lazy translations promise translations for ['yes', 'no', 'true', 'false']
|
||||
BOOLEAN_TRANSLATIONS = (
|
||||
_lt('yes'),
|
||||
_lt('no'),
|
||||
_lt('true'),
|
||||
_lt('false')
|
||||
)
|
||||
|
||||
class ImportWarning(Warning):
|
||||
""" Used to send warnings upwards the stack during the import process """
|
||||
pass
|
||||
|
||||
class ConversionNotFound(ValueError):
|
||||
pass
|
||||
|
||||
class IrFieldsConverter(models.AbstractModel):
|
||||
_name = 'ir.fields.converter'
|
||||
_description = 'Fields Converter'
|
||||
|
||||
@api.model
|
||||
def _format_import_error(self, error_type, error_msg, error_params=(), error_args=None):
|
||||
# sanitize error params for later formatting by the import system
|
||||
sanitize = lambda p: p.replace('%', '%%') if isinstance(p, str) else p
|
||||
if error_params:
|
||||
if isinstance(error_params, str):
|
||||
error_params = sanitize(error_params)
|
||||
elif isinstance(error_params, dict):
|
||||
error_params = {k: sanitize(v) for k, v in error_params.items()}
|
||||
elif isinstance(error_params, tuple):
|
||||
error_params = tuple(sanitize(v) for v in error_params)
|
||||
return error_type(error_msg % error_params, error_args)
|
||||
|
||||
def _get_import_field_path(self, field, value):
|
||||
""" Rebuild field path for import error attribution to the right field.
|
||||
This method uses the 'parent_fields_hierarchy' context key built during treatment of one2many fields
|
||||
(_str_to_one2many). As the field to import is the last of the chain (child_id/child_id2/field_to_import),
|
||||
we need to retrieve the complete hierarchy in case of error in order to assign the error to the correct
|
||||
column in the import UI.
|
||||
|
||||
:param (str) field: field in which the value will be imported.
|
||||
:param (str or list) value:
|
||||
- str: in most of the case the value we want to import into a field is a string (or a number).
|
||||
- list: when importing into a one2may field, all the records to import are regrouped into a list of dict.
|
||||
E.g.: creating multiple partners: [{None: 'ChildA_1', 'type': 'Private address'}, {None: 'ChildA_2', 'type': 'Private address'}]
|
||||
where 'None' is the name. (because we can find a partner by his name, we don't need to specify the field.)
|
||||
|
||||
The field_path value is computed based on the last field in the chain.
|
||||
for example,
|
||||
- path_field for 'Private address' at childA_1 is ['partner_id', 'type']
|
||||
- path_field for 'childA_1' is ['partner_id']
|
||||
|
||||
So, by retrieving the correct field_path for each value to import, if errors are raised for those fields,
|
||||
we can the link the errors to the correct header-field couple in the import UI.
|
||||
"""
|
||||
field_path = [field]
|
||||
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy')
|
||||
if parent_fields_hierarchy:
|
||||
field_path = parent_fields_hierarchy + field_path
|
||||
|
||||
field_path_value = value
|
||||
while isinstance(field_path_value, list):
|
||||
key = list(field_path_value[0].keys())[0]
|
||||
if key:
|
||||
field_path.append(key)
|
||||
field_path_value = field_path_value[0][key]
|
||||
return field_path
|
||||
|
||||
@api.model
|
||||
def for_model(self, model, fromtype=str):
|
||||
""" Returns a converter object for the model. A converter is a
|
||||
callable taking a record-ish (a dictionary representing an odoo
|
||||
record with values of typetag ``fromtype``) and returning a converted
|
||||
records matching what :meth:`odoo.osv.orm.Model.write` expects.
|
||||
|
||||
:param model: :class:`odoo.osv.orm.Model` for the conversion base
|
||||
:param fromtype:
|
||||
:returns: a converter callable
|
||||
:rtype: (record: dict, logger: (field, error) -> None) -> dict
|
||||
"""
|
||||
# make sure model is new api
|
||||
model = self.env[model._name]
|
||||
|
||||
converters = {
|
||||
name: self.to_field(model, field, fromtype)
|
||||
for name, field in model._fields.items()
|
||||
}
|
||||
|
||||
def fn(record, log):
|
||||
converted = {}
|
||||
import_file_context = self.env.context.get('import_file')
|
||||
for field, value in record.items():
|
||||
if field in REFERENCING_FIELDS:
|
||||
continue
|
||||
if not value:
|
||||
converted[field] = False
|
||||
continue
|
||||
try:
|
||||
converted[field], ws = converters[field](value)
|
||||
for w in ws:
|
||||
if isinstance(w, str):
|
||||
# wrap warning string in an ImportWarning for
|
||||
# uniform handling
|
||||
w = ImportWarning(w)
|
||||
log(field, w)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError) as e:
|
||||
log(field, ValueError(str(e)))
|
||||
except ValueError as e:
|
||||
if import_file_context:
|
||||
# if the error is linked to a matching error, the error is a tuple
|
||||
# E.g.:("Value X cannot be found for field Y at row 1", {
|
||||
# 'more_info': {},
|
||||
# 'value': 'X',
|
||||
# 'field': 'Y',
|
||||
# 'field_path': child_id/Y,
|
||||
# })
|
||||
# In order to link the error to the correct header-field couple in the import UI, we need to add
|
||||
# the field path to the additional error info.
|
||||
# As we raise the deepest child in error, we need to add the field path only for the deepest
|
||||
# error in the import recursion. (if field_path is given, don't overwrite it)
|
||||
error_info = len(e.args) > 1 and e.args[1]
|
||||
if error_info and not error_info.get('field_path'): # only raise the deepest child in error
|
||||
error_info['field_path'] = self._get_import_field_path(field, value)
|
||||
log(field, e)
|
||||
return converted
|
||||
|
||||
return fn
|
||||
|
||||
@api.model
|
||||
def to_field(self, model, field, fromtype=str):
|
||||
""" Fetches a converter for the provided field object, from the
|
||||
specified type.
|
||||
|
||||
A converter is simply a callable taking a value of type ``fromtype``
|
||||
(or a composite of ``fromtype``, e.g. list or dict) and returning a
|
||||
value acceptable for a write() on the field ``field``.
|
||||
|
||||
By default, tries to get a method on itself with a name matching the
|
||||
pattern ``_$fromtype_to_$field.type`` and returns it.
|
||||
|
||||
Converter callables can either return a value and a list of warnings
|
||||
to their caller or raise ``ValueError``, which will be interpreted as a
|
||||
validation & conversion failure.
|
||||
|
||||
ValueError can have either one or two parameters. The first parameter
|
||||
is mandatory, **must** be a unicode string and will be used as the
|
||||
user-visible message for the error (it should be translatable and
|
||||
translated). It can contain a ``field`` named format placeholder so the
|
||||
caller can inject the field's translated, user-facing name (@string).
|
||||
|
||||
The second parameter is optional and, if provided, must be a mapping.
|
||||
This mapping will be merged into the error dictionary returned to the
|
||||
client.
|
||||
|
||||
If a converter can perform its function but has to make assumptions
|
||||
about the data, it can send a warning to the user through adding an
|
||||
instance of :class:`~.ImportWarning` to the second value
|
||||
it returns. The handling of a warning at the upper levels is the same
|
||||
as ``ValueError`` above.
|
||||
|
||||
:param model:
|
||||
:param field: field object to generate a value for
|
||||
:type field: :class:`odoo.fields.Field`
|
||||
:param fromtype: type to convert to something fitting for ``field``
|
||||
:type fromtype: type | str
|
||||
:return: a function (fromtype -> field.write_type), if a converter is found
|
||||
:rtype: Callable | None
|
||||
"""
|
||||
assert isinstance(fromtype, (type, str))
|
||||
# FIXME: return None
|
||||
typename = fromtype.__name__ if isinstance(fromtype, type) else fromtype
|
||||
converter = getattr(self, '_%s_to_%s' % (typename, field.type), None)
|
||||
if not converter:
|
||||
return None
|
||||
return functools.partial(converter, model, field)
|
||||
|
||||
def _str_to_json(self, model, field, value):
|
||||
try:
|
||||
return json.loads(value), []
|
||||
except ValueError:
|
||||
msg = _("'%s' does not seem to be a valid JSON for field '%%(field)s'")
|
||||
raise self._format_import_error(ValueError, msg, value)
|
||||
|
||||
def _str_to_properties(self, model, field, value):
|
||||
msg = _("Unable to import field type '%s' ", field.type)
|
||||
raise self._format_import_error(ValueError, msg)
|
||||
|
||||
@api.model
|
||||
def _str_to_boolean(self, model, field, value):
|
||||
# all translatables used for booleans
|
||||
# potentially broken casefolding? What about locales?
|
||||
trues = set(word.lower() for word in itertools.chain(
|
||||
[u'1', u"true", u"yes"], # don't use potentially translated values
|
||||
self._get_boolean_translations(u"true"),
|
||||
self._get_boolean_translations(u"yes"),
|
||||
))
|
||||
if value.lower() in trues:
|
||||
return True, []
|
||||
|
||||
# potentially broken casefolding? What about locales?
|
||||
falses = set(word.lower() for word in itertools.chain(
|
||||
[u'', u"0", u"false", u"no"],
|
||||
self._get_boolean_translations(u"false"),
|
||||
self._get_boolean_translations(u"no"),
|
||||
))
|
||||
if value.lower() in falses:
|
||||
return False, []
|
||||
|
||||
if field.name in self._context.get('import_skip_records', []):
|
||||
return None, []
|
||||
|
||||
return True, [self._format_import_error(
|
||||
ValueError,
|
||||
_(u"Unknown value '%s' for boolean field '%%(field)s'"),
|
||||
value,
|
||||
{'moreinfo': _(u"Use '1' for yes and '0' for no")}
|
||||
)]
|
||||
|
||||
@api.model
|
||||
def _str_to_integer(self, model, field, value):
|
||||
try:
|
||||
return int(value), []
|
||||
except ValueError:
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"'%s' does not seem to be an integer for field '%%(field)s'"),
|
||||
value
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _str_to_float(self, model, field, value):
|
||||
try:
|
||||
return float(value), []
|
||||
except ValueError:
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"'%s' does not seem to be a number for field '%%(field)s'"),
|
||||
value
|
||||
)
|
||||
|
||||
_str_to_monetary = _str_to_float
|
||||
|
||||
@api.model
|
||||
def _str_id(self, model, field, value):
|
||||
return value, []
|
||||
|
||||
_str_to_reference = _str_to_char = _str_to_text = _str_to_binary = _str_to_html = _str_id
|
||||
|
||||
@api.model
|
||||
def _str_to_date(self, model, field, value):
|
||||
try:
|
||||
parsed_value = fields.Date.from_string(value)
|
||||
return fields.Date.to_string(parsed_value), []
|
||||
except ValueError:
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"'%s' does not seem to be a valid date for field '%%(field)s'"),
|
||||
value,
|
||||
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31")}
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _input_tz(self):
|
||||
# if there's a tz in context, try to use that
|
||||
if self._context.get('tz'):
|
||||
try:
|
||||
return pytz.timezone(self._context['tz'])
|
||||
except pytz.UnknownTimeZoneError:
|
||||
pass
|
||||
|
||||
# if the current user has a tz set, try to use that
|
||||
user = self.env.user
|
||||
if user.tz:
|
||||
try:
|
||||
return pytz.timezone(user.tz)
|
||||
except pytz.UnknownTimeZoneError:
|
||||
pass
|
||||
|
||||
# fallback if no tz in context or on user: UTC
|
||||
return pytz.UTC
|
||||
|
||||
@api.model
|
||||
def _str_to_datetime(self, model, field, value):
|
||||
try:
|
||||
parsed_value = fields.Datetime.from_string(value)
|
||||
except ValueError:
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"'%s' does not seem to be a valid datetime for field '%%(field)s'"),
|
||||
value,
|
||||
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31 23:59:59")}
|
||||
)
|
||||
|
||||
input_tz = self._input_tz()# Apply input tz to the parsed naive datetime
|
||||
dt = input_tz.localize(parsed_value, is_dst=False)
|
||||
# And convert to UTC before reformatting for writing
|
||||
return fields.Datetime.to_string(dt.astimezone(pytz.UTC)), []
|
||||
|
||||
@api.model
|
||||
def _get_boolean_translations(self, src):
|
||||
# Cache translations so they don't have to be reloaded from scratch on
|
||||
# every row of the file
|
||||
tnx_cache = self._cr.cache.setdefault(self._name, {})
|
||||
if src in tnx_cache:
|
||||
return tnx_cache[src]
|
||||
|
||||
values = OrderedSet()
|
||||
for lang, __ in self.env['res.lang'].get_installed():
|
||||
translations = code_translations.get_python_translations('base', lang)
|
||||
if src in translations:
|
||||
values.add(translations[src])
|
||||
|
||||
result = tnx_cache[src] = list(values)
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _get_selection_translations(self, field, src):
|
||||
if not src:
|
||||
return []
|
||||
# Cache translations so they don't have to be reloaded from scratch on
|
||||
# every row of the file
|
||||
tnx_cache = self._cr.cache.setdefault(self._name, {})
|
||||
if src in tnx_cache:
|
||||
return tnx_cache[src]
|
||||
|
||||
values = OrderedSet()
|
||||
self.env['ir.model.fields.selection'].flush_model()
|
||||
query = """
|
||||
SELECT s.name
|
||||
FROM ir_model_fields_selection s
|
||||
JOIN ir_model_fields f ON s.field_id = f.id
|
||||
WHERE f.model = %s AND f.name = %s AND s.name->>'en_US' = %s
|
||||
"""
|
||||
self.env.cr.execute(query, [field.model_name, field.name, src])
|
||||
for (name,) in self.env.cr.fetchall():
|
||||
name.pop('en_US')
|
||||
values.update(name.values())
|
||||
|
||||
result = tnx_cache[src] = list(values)
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _str_to_selection(self, model, field, value):
|
||||
# get untranslated values
|
||||
env = self.with_context(lang=None).env
|
||||
selection = field.get_description(env)['selection']
|
||||
|
||||
for item, label in selection:
|
||||
label = ustr(label)
|
||||
if callable(field.selection):
|
||||
labels = [label]
|
||||
for item2, label2 in field._description_selection(self.env):
|
||||
if item2 == item:
|
||||
labels.append(label2)
|
||||
break
|
||||
else:
|
||||
labels = [label] + self._get_selection_translations(field, label)
|
||||
# case insensitive comparaison of string to allow to set the value even if the given 'value' param is not
|
||||
# exactly (case sensitive) the same as one of the selection item.
|
||||
if value.lower() == str(item).lower() or any(value.lower() == label.lower() for label in labels):
|
||||
return item, []
|
||||
|
||||
if field.name in self._context.get('import_skip_records', []):
|
||||
return None, []
|
||||
elif field.name in self._context.get('import_set_empty_fields', []):
|
||||
return False, []
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"Value '%s' not found in selection field '%%(field)s'"),
|
||||
value,
|
||||
{'moreinfo': [_label or str(item) for item, _label in selection if _label or item]}
|
||||
)
|
||||
|
||||
@api.model
|
||||
def db_id_for(self, model, field, subfield, value):
|
||||
""" Finds a database id for the reference ``value`` in the referencing
|
||||
subfield ``subfield`` of the provided field of the provided model.
|
||||
|
||||
:param model: model to which the field belongs
|
||||
:param field: relational field for which references are provided
|
||||
:param subfield: a relational subfield allowing building of refs to
|
||||
existing records: ``None`` for a name_get/name_search,
|
||||
``id`` for an external id and ``.id`` for a database
|
||||
id
|
||||
:param value: value of the reference to match to an actual record
|
||||
:return: a pair of the matched database identifier (if any), the
|
||||
translated user-readable name for the field and the list of
|
||||
warnings
|
||||
:rtype: (ID|None, unicode, list)
|
||||
"""
|
||||
# the function 'flush' comes from BaseModel.load(), and forces the
|
||||
# creation/update of former records (batch creation)
|
||||
flush = self._context.get('import_flush', lambda **kw: None)
|
||||
|
||||
id = None
|
||||
warnings = []
|
||||
error_msg = ''
|
||||
action = {
|
||||
'name': 'Possible Values',
|
||||
'type': 'ir.actions.act_window', 'target': 'new',
|
||||
'view_mode': 'tree,form',
|
||||
'views': [(False, 'list'), (False, 'form')],
|
||||
'context': {'create': False},
|
||||
'help': _(u"See all possible values")}
|
||||
if subfield is None:
|
||||
action['res_model'] = field.comodel_name
|
||||
elif subfield in ('id', '.id'):
|
||||
action['res_model'] = 'ir.model.data'
|
||||
action['domain'] = [('model', '=', field.comodel_name)]
|
||||
|
||||
RelatedModel = self.env[field.comodel_name]
|
||||
if subfield == '.id':
|
||||
field_type = _(u"database id")
|
||||
if isinstance(value, str) and not self._str_to_boolean(model, field, value)[0]:
|
||||
return False, field_type, warnings
|
||||
try: tentative_id = int(value)
|
||||
except ValueError: tentative_id = value
|
||||
try:
|
||||
if RelatedModel.search([('id', '=', tentative_id)]):
|
||||
id = tentative_id
|
||||
except psycopg2.DataError:
|
||||
# type error
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
_(u"Invalid database id '%s' for the field '%%(field)s'"),
|
||||
value,
|
||||
{'moreinfo': action})
|
||||
elif subfield == 'id':
|
||||
field_type = _(u"external id")
|
||||
if not self._str_to_boolean(model, field, value)[0]:
|
||||
return False, field_type, warnings
|
||||
if '.' in value:
|
||||
xmlid = value
|
||||
else:
|
||||
xmlid = "%s.%s" % (self._context.get('_import_current_module', ''), value)
|
||||
flush(xml_id=xmlid)
|
||||
id = self._xmlid_to_record_id(xmlid, RelatedModel)
|
||||
elif subfield is None:
|
||||
field_type = _(u"name")
|
||||
if value == '':
|
||||
return False, field_type, warnings
|
||||
flush(model=field.comodel_name)
|
||||
ids = RelatedModel.name_search(name=value, operator='=')
|
||||
if ids:
|
||||
if len(ids) > 1:
|
||||
warnings.append(ImportWarning(
|
||||
_(u"Found multiple matches for value '%s' in field '%%(field)s' (%d matches)")
|
||||
%(str(value).replace('%', '%%'), len(ids))))
|
||||
id, _name = ids[0]
|
||||
else:
|
||||
name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {}
|
||||
if name_create_enabled_fields.get(field.name):
|
||||
try:
|
||||
with self.env.cr.savepoint():
|
||||
id, _name = RelatedModel.name_create(name=value)
|
||||
except (Exception, psycopg2.IntegrityError):
|
||||
error_msg = _(u"Cannot create new '%s' records from their name alone. Please create those records manually and try importing again.", RelatedModel._description)
|
||||
else:
|
||||
raise self._format_import_error(
|
||||
Exception,
|
||||
_(u"Unknown sub-field '%s'"),
|
||||
subfield
|
||||
)
|
||||
|
||||
set_empty = False
|
||||
skip_record = False
|
||||
if self.env.context.get('import_file'):
|
||||
import_set_empty_fields = self.env.context.get('import_set_empty_fields') or []
|
||||
field_path = "/".join((self.env.context.get('parent_fields_hierarchy', []) + [field.name]))
|
||||
set_empty = field_path in import_set_empty_fields
|
||||
skip_record = field_path in self.env.context.get('import_skip_records', [])
|
||||
if id is None and not set_empty and not skip_record:
|
||||
if error_msg:
|
||||
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s' and the following error was encountered when we attempted to create one: %(error_message)s")
|
||||
else:
|
||||
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s'")
|
||||
|
||||
error_info_dict = {'moreinfo': action}
|
||||
if self.env.context.get('import_file'):
|
||||
# limit to 50 char to avoid too long error messages.
|
||||
value = value[:50] if isinstance(value, str) else value
|
||||
error_info_dict.update({'value': value, 'field_type': field_type})
|
||||
if error_msg:
|
||||
error_info_dict['error_message'] = error_msg
|
||||
raise self._format_import_error(
|
||||
ValueError,
|
||||
message,
|
||||
{'field_type': field_type, 'value': value, 'error_message': error_msg},
|
||||
error_info_dict)
|
||||
return id, field_type, warnings
|
||||
|
||||
def _xmlid_to_record_id(self, xmlid, model):
|
||||
""" Return the record id corresponding to the given external id,
|
||||
provided that the record actually exists; otherwise return ``None``.
|
||||
"""
|
||||
import_cache = self.env.context.get('import_cache', {})
|
||||
result = import_cache.get(xmlid)
|
||||
|
||||
if not result:
|
||||
module, name = xmlid.split('.', 1)
|
||||
query = """
|
||||
SELECT d.model, d.res_id
|
||||
FROM ir_model_data d
|
||||
JOIN "{}" r ON d.res_id = r.id
|
||||
WHERE d.module = %s AND d.name = %s
|
||||
""".format(model._table)
|
||||
self.env.cr.execute(query, [module, name])
|
||||
result = self.env.cr.fetchone()
|
||||
|
||||
if result:
|
||||
res_model, res_id = import_cache[xmlid] = result
|
||||
if res_model != model._name:
|
||||
MSG = "Invalid external ID %s: expected model %r, found %r"
|
||||
raise ValueError(MSG % (xmlid, model._name, res_model))
|
||||
return res_id
|
||||
|
||||
def _referencing_subfield(self, record):
|
||||
""" Checks the record for the subfields allowing referencing (an
|
||||
existing record in an other table), errors out if it finds potential
|
||||
conflicts (multiple referencing subfields) or non-referencing subfields
|
||||
returns the name of the correct subfield.
|
||||
|
||||
:param record:
|
||||
:return: the record subfield to use for referencing and a list of warnings
|
||||
:rtype: str, list
|
||||
"""
|
||||
# Can import by name_get, external id or database id
|
||||
fieldset = set(record)
|
||||
if fieldset - REFERENCING_FIELDS:
|
||||
raise ValueError(
|
||||
_(u"Can not create Many-To-One records indirectly, import the field separately"))
|
||||
if len(fieldset) > 1:
|
||||
raise ValueError(
|
||||
_(u"Ambiguous specification for field '%(field)s', only provide one of name, external id or database id"))
|
||||
|
||||
# only one field left possible, unpack
|
||||
[subfield] = fieldset
|
||||
return subfield, []
|
||||
|
||||
@api.model
|
||||
def _str_to_many2one(self, model, field, values):
|
||||
# Should only be one record, unpack
|
||||
[record] = values
|
||||
|
||||
subfield, w1 = self._referencing_subfield(record)
|
||||
|
||||
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield])
|
||||
return id, w1 + w2
|
||||
|
||||
@api.model
|
||||
def _str_to_many2one_reference(self, model, field, value):
|
||||
return self._str_to_integer(model, field, value)
|
||||
|
||||
@api.model
|
||||
def _str_to_many2many(self, model, field, value):
|
||||
[record] = value
|
||||
|
||||
subfield, warnings = self._referencing_subfield(record)
|
||||
|
||||
ids = []
|
||||
for reference in record[subfield].split(','):
|
||||
id, _, ws = self.db_id_for(model, field, subfield, reference)
|
||||
ids.append(id)
|
||||
warnings.extend(ws)
|
||||
|
||||
if field.name in self._context.get('import_set_empty_fields', []) and any([id is None for id in ids]):
|
||||
ids = [id for id in ids if id]
|
||||
elif field.name in self._context.get('import_skip_records', []) and any([id is None for id in ids]):
|
||||
return None, warnings
|
||||
|
||||
if self._context.get('update_many2many'):
|
||||
return [Command.link(id) for id in ids], warnings
|
||||
else:
|
||||
return [Command.set(ids)], warnings
|
||||
|
||||
@api.model
|
||||
def _str_to_one2many(self, model, field, records):
|
||||
name_create_enabled_fields = self._context.get('name_create_enabled_fields') or {}
|
||||
prefix = field.name + '/'
|
||||
relative_name_create_enabled_fields = {
|
||||
k[len(prefix):]: v
|
||||
for k, v in name_create_enabled_fields.items()
|
||||
if k.startswith(prefix)
|
||||
}
|
||||
commands = []
|
||||
warnings = []
|
||||
|
||||
if len(records) == 1 and exclude_ref_fields(records[0]) == {}:
|
||||
# only one row with only ref field, field=ref1,ref2,ref3 as in
|
||||
# m2o/m2m
|
||||
record = records[0]
|
||||
subfield, ws = self._referencing_subfield(record)
|
||||
warnings.extend(ws)
|
||||
# transform [{subfield:ref1,ref2,ref3}] into
|
||||
# [{subfield:ref1},{subfield:ref2},{subfield:ref3}]
|
||||
records = ({subfield:item} for item in record[subfield].split(','))
|
||||
|
||||
def log(f, exception):
|
||||
if not isinstance(exception, Warning):
|
||||
current_field_name = self.env[field.comodel_name]._fields[f].string
|
||||
arg0 = exception.args[0].replace('%(field)s', '%(field)s/' + current_field_name)
|
||||
exception.args = (arg0, *exception.args[1:])
|
||||
raise exception
|
||||
warnings.append(exception)
|
||||
|
||||
# Complete the field hierarchy path
|
||||
# E.g. For "parent/child/subchild", field hierarchy path for "subchild" is ['parent', 'child']
|
||||
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy', []) + [field.name]
|
||||
|
||||
convert = self.with_context(
|
||||
name_create_enabled_fields=relative_name_create_enabled_fields,
|
||||
parent_fields_hierarchy=parent_fields_hierarchy
|
||||
).for_model(self.env[field.comodel_name])
|
||||
|
||||
for record in records:
|
||||
id = None
|
||||
refs = only_ref_fields(record)
|
||||
writable = convert(exclude_ref_fields(record), log)
|
||||
if refs:
|
||||
subfield, w1 = self._referencing_subfield(refs)
|
||||
warnings.extend(w1)
|
||||
try:
|
||||
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield])
|
||||
warnings.extend(w2)
|
||||
except ValueError:
|
||||
if subfield != 'id':
|
||||
raise
|
||||
writable['id'] = record['id']
|
||||
|
||||
if id:
|
||||
commands.append(Command.link(id))
|
||||
commands.append(Command.update(id, writable))
|
||||
else:
|
||||
commands.append(Command.create(writable))
|
||||
|
||||
return commands, warnings
|
||||
|
||||
class O2MIdMapper(models.AbstractModel):
|
||||
"""
|
||||
Updates the base class to support setting xids directly in create by
|
||||
providing an "id" key (otherwise stripped by create) during an import
|
||||
(which should strip 'id' from the input data anyway)
|
||||
"""
|
||||
_inherit = 'base'
|
||||
|
||||
# sadly _load_records_create is only called for the toplevel record so we
|
||||
# can't hook into that
|
||||
@api.model_create_multi
|
||||
@api.returns('self', lambda value: value.id)
|
||||
def create(self, vals_list):
|
||||
recs = super().create(vals_list)
|
||||
|
||||
import_module = self.env.context.get('_import_current_module')
|
||||
if not import_module: # not an import -> bail
|
||||
return recs
|
||||
noupdate = self.env.context.get('noupdate', False)
|
||||
|
||||
xids = (v.get('id') for v in vals_list)
|
||||
self.env['ir.model.data']._update_xmlids([
|
||||
{
|
||||
'xml_id': xid if '.' in xid else ('%s.%s' % (import_module, xid)),
|
||||
'record': rec,
|
||||
# note: this is not used when updating o2ms above...
|
||||
'noupdate': noupdate,
|
||||
}
|
||||
for rec, xid in zip(recs, xids)
|
||||
if xid and isinstance(xid, str)
|
||||
])
|
||||
|
||||
return recs
|
||||
164
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_filters.py
Normal file
164
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_filters.py
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools.safe_eval import safe_eval, datetime
|
||||
|
||||
|
||||
class IrFilters(models.Model):
|
||||
_name = 'ir.filters'
|
||||
_description = 'Filters'
|
||||
_order = 'model_id, name, id desc'
|
||||
|
||||
name = fields.Char(string='Filter Name', required=True)
|
||||
user_id = fields.Many2one('res.users', string='User', ondelete='cascade',
|
||||
help="The user this filter is private to. When left empty the filter is public "
|
||||
"and available to all users.")
|
||||
domain = fields.Text(default='[]', required=True)
|
||||
context = fields.Text(default='{}', required=True)
|
||||
sort = fields.Text(default='[]', required=True)
|
||||
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
|
||||
is_default = fields.Boolean(string='Default Filter')
|
||||
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
|
||||
help="The menu action this filter applies to. "
|
||||
"When left empty the filter applies to all menus "
|
||||
"for this model.")
|
||||
active = fields.Boolean(default=True)
|
||||
|
||||
@api.model
|
||||
def _list_all_models(self):
|
||||
lang = self.env.lang or 'en_US'
|
||||
self._cr.execute(
|
||||
"SELECT model, COALESCE(name->>%s, name->>'en_US') FROM ir_model ORDER BY 2",
|
||||
[lang],
|
||||
)
|
||||
return self._cr.fetchall()
|
||||
|
||||
def write(self, vals):
|
||||
new_filter = super().write(vals)
|
||||
self.check_access_rule('write')
|
||||
return new_filter
|
||||
|
||||
def copy(self, default=None):
|
||||
self.ensure_one()
|
||||
default = dict(default or {}, name=_('%s (copy)', self.name))
|
||||
return super(IrFilters, self).copy(default)
|
||||
|
||||
def _get_eval_domain(self):
|
||||
self.ensure_one()
|
||||
return safe_eval(self.domain, {
|
||||
'datetime': datetime,
|
||||
'context_today': datetime.datetime.now,
|
||||
})
|
||||
|
||||
@api.model
|
||||
def _get_action_domain(self, action_id=None):
|
||||
"""Return a domain component for matching filters that are visible in the
|
||||
same context (menu/view) as the given action."""
|
||||
if action_id:
|
||||
# filters specific to this menu + global ones
|
||||
return [('action_id', 'in', [action_id, False])]
|
||||
# only global ones
|
||||
return [('action_id', '=', False)]
|
||||
|
||||
@api.model
|
||||
def get_filters(self, model, action_id=None):
|
||||
"""Obtain the list of filters available for the user on the given model.
|
||||
|
||||
:param int model: id of model to find filters for
|
||||
:param action_id: optional ID of action to restrict filters to this action
|
||||
plus global filters. If missing only global filters are returned.
|
||||
The action does not have to correspond to the model, it may only be
|
||||
a contextual action.
|
||||
:return: list of :meth:`~osv.read`-like dicts containing the
|
||||
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
|
||||
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
|
||||
"""
|
||||
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
|
||||
# and filters for the action (action_id=action_id) or global (action_id=NULL)
|
||||
action_domain = self._get_action_domain(action_id)
|
||||
filters = self.search(action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])])
|
||||
user_context = self.env['res.users'].context_get()
|
||||
return filters.with_context(user_context).read(['name', 'is_default', 'domain', 'context', 'user_id', 'sort'])
|
||||
|
||||
@api.model
|
||||
def _check_global_default(self, vals, matching_filters):
|
||||
""" _check_global_default(dict, list(dict), dict) -> None
|
||||
|
||||
Checks if there is a global default for the model_id requested.
|
||||
|
||||
If there is, and the default is different than the record being written
|
||||
(-> we're not updating the current global default), raise an error
|
||||
to avoid users unknowingly overwriting existing global defaults (they
|
||||
have to explicitly remove the current default before setting a new one)
|
||||
|
||||
This method should only be called if ``vals`` is trying to set
|
||||
``is_default``
|
||||
|
||||
:raises odoo.exceptions.UserError: if there is an existing default and
|
||||
we're not updating it
|
||||
"""
|
||||
domain = self._get_action_domain(vals.get('action_id'))
|
||||
defaults = self.search(domain + [
|
||||
('model_id', '=', vals['model_id']),
|
||||
('user_id', '=', False),
|
||||
('is_default', '=', True),
|
||||
])
|
||||
|
||||
if not defaults:
|
||||
return
|
||||
if matching_filters and (matching_filters[0]['id'] == defaults.id):
|
||||
return
|
||||
|
||||
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default") % {'model': vals.get('model_id')})
|
||||
|
||||
@api.model
|
||||
@api.returns('self', lambda value: value.id)
|
||||
def create_or_replace(self, vals):
|
||||
action_id = vals.get('action_id')
|
||||
current_filters = self.get_filters(vals['model_id'], action_id)
|
||||
matching_filters = [f for f in current_filters
|
||||
if f['name'].lower() == vals['name'].lower()
|
||||
# next line looks for matching user_ids (specific or global), i.e.
|
||||
# f.user_id is False and vals.user_id is False or missing,
|
||||
# or f.user_id.id == vals.user_id
|
||||
if (f['user_id'] and f['user_id'][0]) == vals.get('user_id')]
|
||||
|
||||
if vals.get('is_default'):
|
||||
if vals.get('user_id'):
|
||||
# Setting new default: any other default that belongs to the user
|
||||
# should be turned off
|
||||
domain = self._get_action_domain(action_id)
|
||||
defaults = self.search(domain + [
|
||||
('model_id', '=', vals['model_id']),
|
||||
('user_id', '=', vals['user_id']),
|
||||
('is_default', '=', True),
|
||||
])
|
||||
if defaults:
|
||||
defaults.write({'is_default': False})
|
||||
else:
|
||||
self._check_global_default(vals, matching_filters)
|
||||
|
||||
# When a filter exists for the same (name, model, user) triple, we simply
|
||||
# replace its definition (considering action_id irrelevant here)
|
||||
if matching_filters:
|
||||
matching_filter = self.browse(matching_filters[0]['id'])
|
||||
matching_filter.write(vals)
|
||||
return matching_filter
|
||||
|
||||
return self.create(vals)
|
||||
|
||||
_sql_constraints = [
|
||||
# Partial constraint, complemented by unique index (see below). Still
|
||||
# useful to keep because it provides a proper error message when a
|
||||
# violation occurs, as it shares the same prefix as the unique index.
|
||||
('name_model_uid_unique', 'unique (model_id, user_id, action_id, name)', 'Filter names must be unique'),
|
||||
]
|
||||
|
||||
def _auto_init(self):
|
||||
result = super(IrFilters, self)._auto_init()
|
||||
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
|
||||
tools.create_unique_index(self._cr, 'ir_filters_name_model_uid_unique_action_index',
|
||||
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)', 'lower(name)'])
|
||||
return result
|
||||
273
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_http.py
Normal file
273
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_http.py
Normal file
|
|
@ -0,0 +1,273 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
#----------------------------------------------------------
|
||||
# ir_http modular http routing
|
||||
#----------------------------------------------------------
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import threading
|
||||
|
||||
import werkzeug
|
||||
import werkzeug.exceptions
|
||||
import werkzeug.routing
|
||||
import werkzeug.utils
|
||||
|
||||
try:
|
||||
from werkzeug.routing import NumberConverter
|
||||
except ImportError:
|
||||
from werkzeug.routing.converters import NumberConverter # moved in werkzeug 2.2.2
|
||||
|
||||
import odoo
|
||||
from odoo import api, http, models, tools, SUPERUSER_ID
|
||||
from odoo.exceptions import AccessDenied, AccessError, MissingError
|
||||
from odoo.http import request, Response, ROUTING_KEYS, Stream
|
||||
from odoo.modules.registry import Registry
|
||||
from odoo.service import security
|
||||
from odoo.tools import get_lang, submap
|
||||
from odoo.tools.translate import code_translations
|
||||
from odoo.modules.module import get_resource_path, get_module_path
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequestUID(object):
|
||||
def __init__(self, **kw):
|
||||
self.__dict__.update(kw)
|
||||
|
||||
|
||||
class ModelConverter(werkzeug.routing.BaseConverter):
|
||||
|
||||
def __init__(self, url_map, model=False):
|
||||
super(ModelConverter, self).__init__(url_map)
|
||||
self.model = model
|
||||
self.regex = r'([0-9]+)'
|
||||
|
||||
def to_python(self, value):
|
||||
_uid = RequestUID(value=value, converter=self)
|
||||
env = api.Environment(request.cr, _uid, request.context)
|
||||
return env[self.model].browse(int(value))
|
||||
|
||||
def to_url(self, value):
|
||||
return value.id
|
||||
|
||||
|
||||
class ModelsConverter(werkzeug.routing.BaseConverter):
|
||||
|
||||
def __init__(self, url_map, model=False):
|
||||
super(ModelsConverter, self).__init__(url_map)
|
||||
self.model = model
|
||||
# TODO add support for slug in the form [A-Za-z0-9-] bla-bla-89 -> id 89
|
||||
self.regex = r'([0-9,]+)'
|
||||
|
||||
def to_python(self, value):
|
||||
_uid = RequestUID(value=value, converter=self)
|
||||
env = api.Environment(request.cr, _uid, request.context)
|
||||
return env[self.model].browse(int(v) for v in value.split(','))
|
||||
|
||||
def to_url(self, value):
|
||||
return ",".join(value.ids)
|
||||
|
||||
|
||||
class SignedIntConverter(NumberConverter):
|
||||
regex = r'-?\d+'
|
||||
num_convert = int
|
||||
|
||||
|
||||
class IrHttp(models.AbstractModel):
|
||||
_name = 'ir.http'
|
||||
_description = "HTTP Routing"
|
||||
|
||||
#------------------------------------------------------
|
||||
# Routing map
|
||||
#------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def _get_converters(cls):
|
||||
return {'model': ModelConverter, 'models': ModelsConverter, 'int': SignedIntConverter}
|
||||
|
||||
@classmethod
|
||||
def _match(cls, path_info, key=None):
|
||||
rule, args = cls.routing_map().bind_to_environ(request.httprequest.environ).match(path_info=path_info, return_rule=True)
|
||||
return rule, args
|
||||
|
||||
@classmethod
|
||||
def _get_public_users(cls):
|
||||
return [request.env['ir.model.data']._xmlid_to_res_model_res_id('base.public_user')[1]]
|
||||
|
||||
@classmethod
|
||||
def _auth_method_user(cls):
|
||||
if request.env.uid in [None] + cls._get_public_users():
|
||||
raise http.SessionExpiredException("Session expired")
|
||||
|
||||
@classmethod
|
||||
def _auth_method_none(cls):
|
||||
request.env = api.Environment(request.env.cr, None, request.env.context)
|
||||
|
||||
@classmethod
|
||||
def _auth_method_public(cls):
|
||||
if request.env.uid is None:
|
||||
public_user = request.env.ref('base.public_user')
|
||||
request.update_env(user=public_user.id)
|
||||
|
||||
@classmethod
|
||||
def _authenticate(cls, endpoint):
|
||||
auth = 'none' if http.is_cors_preflight(request, endpoint) else endpoint.routing['auth']
|
||||
|
||||
try:
|
||||
if request.session.uid is not None:
|
||||
if not security.check_session(request.session, request.env):
|
||||
request.session.logout(keep_db=True)
|
||||
request.env = api.Environment(request.env.cr, None, request.session.context)
|
||||
getattr(cls, f'_auth_method_{auth}')()
|
||||
except (AccessDenied, http.SessionExpiredException, werkzeug.exceptions.HTTPException):
|
||||
raise
|
||||
except Exception:
|
||||
_logger.info("Exception during request Authentication.", exc_info=True)
|
||||
raise AccessDenied()
|
||||
|
||||
@classmethod
|
||||
def _geoip_resolve(cls):
|
||||
return request._geoip_resolve()
|
||||
|
||||
@classmethod
|
||||
def _pre_dispatch(cls, rule, args):
|
||||
request.dispatcher.pre_dispatch(rule, args)
|
||||
|
||||
# Replace uid placeholder by the current request.env.uid
|
||||
for key, val in list(args.items()):
|
||||
if isinstance(val, models.BaseModel) and isinstance(val._uid, RequestUID):
|
||||
args[key] = val.with_user(request.env.uid)
|
||||
|
||||
# verify the default language set in the context is valid,
|
||||
# otherwise fallback on the company lang, english or the first
|
||||
# lang installed
|
||||
request.update_context(lang=get_lang(request.env)._get_cached('code'))
|
||||
|
||||
@classmethod
|
||||
def _dispatch(cls, endpoint):
|
||||
result = endpoint(**request.params)
|
||||
if isinstance(result, Response) and result.is_qweb:
|
||||
result.flatten()
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def _post_dispatch(cls, response):
|
||||
request.dispatcher.post_dispatch(response)
|
||||
|
||||
@classmethod
|
||||
def _handle_error(cls, exception):
|
||||
return request.dispatcher.handle_error(exception)
|
||||
|
||||
@classmethod
|
||||
def _serve_fallback(cls):
|
||||
model = request.env['ir.attachment']
|
||||
attach = model.sudo()._get_serve_attachment(request.httprequest.path)
|
||||
if attach and (attach.store_fname or attach.db_datas):
|
||||
return Stream.from_attachment(attach).get_response()
|
||||
|
||||
@classmethod
|
||||
def _redirect(cls, location, code=303):
|
||||
return werkzeug.utils.redirect(location, code=code, Response=Response)
|
||||
|
||||
@classmethod
|
||||
def _generate_routing_rules(cls, modules, converters):
|
||||
return http._generate_routing_rules(modules, False, converters)
|
||||
|
||||
@classmethod
|
||||
def routing_map(cls, key=None):
|
||||
|
||||
if not hasattr(cls, '_routing_map'):
|
||||
cls._routing_map = {}
|
||||
cls._rewrite_len = {}
|
||||
|
||||
if key not in cls._routing_map:
|
||||
_logger.info("Generating routing map for key %s" % str(key))
|
||||
registry = Registry(threading.current_thread().dbname)
|
||||
installed = registry._init_modules.union(odoo.conf.server_wide_modules)
|
||||
mods = sorted(installed)
|
||||
# Note : when routing map is generated, we put it on the class `cls`
|
||||
# to make it available for all instance. Since `env` create an new instance
|
||||
# of the model, each instance will regenared its own routing map and thus
|
||||
# regenerate its EndPoint. The routing map should be static.
|
||||
routing_map = werkzeug.routing.Map(strict_slashes=False, converters=cls._get_converters())
|
||||
for url, endpoint in cls._generate_routing_rules(mods, converters=cls._get_converters()):
|
||||
routing = submap(endpoint.routing, ROUTING_KEYS)
|
||||
if routing['methods'] is not None and 'OPTIONS' not in routing['methods']:
|
||||
routing['methods'] = [*routing['methods'], 'OPTIONS']
|
||||
rule = werkzeug.routing.Rule(url, endpoint=endpoint, **routing)
|
||||
rule.merge_slashes = False
|
||||
routing_map.add(rule)
|
||||
cls._routing_map[key] = routing_map
|
||||
return cls._routing_map[key]
|
||||
|
||||
@classmethod
|
||||
def _clear_routing_map(cls):
|
||||
if hasattr(cls, '_routing_map'):
|
||||
cls._routing_map = {}
|
||||
_logger.debug("Clear routing map")
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_sessions(self):
|
||||
if os.getenv("ODOO_SKIP_GC_SESSIONS"):
|
||||
return
|
||||
ICP = self.env["ir.config_parameter"]
|
||||
max_lifetime = int(ICP.get_param('sessions.max_inactivity_seconds', http.SESSION_LIFETIME))
|
||||
http.root.session_store.vacuum(max_lifetime=max_lifetime)
|
||||
|
||||
@api.model
|
||||
def get_translations_for_webclient(self, modules, lang):
|
||||
if not modules:
|
||||
modules = self.pool._init_modules
|
||||
if not lang:
|
||||
lang = self._context.get("lang")
|
||||
langs = self.env['res.lang']._lang_get(lang)
|
||||
lang_params = None
|
||||
if langs:
|
||||
lang_params = {
|
||||
"name": langs.name,
|
||||
"direction": langs.direction,
|
||||
"date_format": langs.date_format,
|
||||
"time_format": langs.time_format,
|
||||
"grouping": langs.grouping,
|
||||
"decimal_point": langs.decimal_point,
|
||||
"thousands_sep": langs.thousands_sep,
|
||||
"week_start": langs.week_start,
|
||||
}
|
||||
lang_params['week_start'] = int(lang_params['week_start'])
|
||||
lang_params['code'] = lang
|
||||
if lang_params["thousands_sep"]:
|
||||
lang_params["thousands_sep"] = lang_params["thousands_sep"].replace(' ', '\N{NO-BREAK SPACE}')
|
||||
|
||||
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
|
||||
# done server-side when the language is loaded, so we only need to load the user's lang.
|
||||
translations_per_module = {}
|
||||
for module in modules:
|
||||
translations_per_module[module] = code_translations.get_web_translations(module, lang)
|
||||
|
||||
return translations_per_module, lang_params
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('frozenset(modules)', 'lang')
|
||||
def get_web_translations_hash(self, modules, lang):
|
||||
translations, lang_params = self.get_translations_for_webclient(modules, lang)
|
||||
translation_cache = {
|
||||
'lang_parameters': lang_params,
|
||||
'modules': translations,
|
||||
'lang': lang,
|
||||
'multi_lang': len(self.env['res.lang'].sudo().get_installed()) > 1,
|
||||
}
|
||||
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True).encode()).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def _is_allowed_cookie(cls, cookie_type):
|
||||
return True
|
||||
|
||||
@api.model
|
||||
def _verify_request_recaptcha_token(self, action):
|
||||
return True
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class IrLogging(models.Model):
|
||||
_name = 'ir.logging'
|
||||
_description = 'Logging'
|
||||
_order = 'id DESC'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
# The _log_access fields are defined manually for the following reasons:
|
||||
#
|
||||
# - The entries in ir_logging are filled in with sql queries bypassing the orm. As the --log-db
|
||||
# cli option allows to insert ir_logging entries into a remote database, the one2many *_uid
|
||||
# fields make no sense in the first place but we will keep it for backward compatibility.
|
||||
#
|
||||
# - Also, when an ir_logging entry is triggered by the orm (when using --log-db) at the moment
|
||||
# it is making changes to the res.users model, the ALTER TABLE will aquire an exclusive lock
|
||||
# on res_users, preventing the ir_logging INSERT to be processed, hence the ongoing module
|
||||
# install/update will hang forever as the orm is blocked by the ir_logging query that will
|
||||
# never occur.
|
||||
create_uid = fields.Integer(string='Created by', readonly=True)
|
||||
create_date = fields.Datetime(string='Created on', readonly=True)
|
||||
write_uid = fields.Integer(string='Last Updated by', readonly=True)
|
||||
write_date = fields.Datetime(string='Last Updated on', readonly=True)
|
||||
|
||||
name = fields.Char(required=True)
|
||||
type = fields.Selection([('client', 'Client'), ('server', 'Server')], required=True, index=True)
|
||||
dbname = fields.Char(string='Database Name', index=True)
|
||||
level = fields.Char(index=True)
|
||||
message = fields.Text(required=True)
|
||||
path = fields.Char(required=True)
|
||||
func = fields.Char(string='Function', required=True)
|
||||
line = fields.Char(required=True)
|
||||
|
||||
def init(self):
|
||||
super(IrLogging, self).init()
|
||||
self._cr.execute("select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'")
|
||||
if self._cr.rowcount:
|
||||
# DROP CONSTRAINT unconditionally takes an ACCESS EXCLUSIVE lock
|
||||
# on the table, even "IF EXISTS" is set and not matching; disabling
|
||||
# the relevant trigger instead acquires SHARE ROW EXCLUSIVE, which
|
||||
# still conflicts with the ROW EXCLUSIVE needed for an insert
|
||||
self._cr.execute("ALTER TABLE ir_logging DROP CONSTRAINT ir_logging_write_uid_fkey")
|
||||
|
|
@ -0,0 +1,847 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from email.message import EmailMessage
|
||||
from email.utils import make_msgid
|
||||
import base64
|
||||
import datetime
|
||||
import email
|
||||
import email.policy
|
||||
import idna
|
||||
import logging
|
||||
import re
|
||||
import smtplib
|
||||
import ssl
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from socket import gaierror, timeout
|
||||
from OpenSSL import crypto as SSLCrypto
|
||||
from OpenSSL.crypto import Error as SSLCryptoError, FILETYPE_PEM
|
||||
from OpenSSL.SSL import Error as SSLError
|
||||
from urllib3.contrib.pyopenssl import PyOpenSSLContext
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import ustr, pycompat, formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_test_logger = logging.getLogger('odoo.tests')
|
||||
|
||||
SMTP_TIMEOUT = 60
|
||||
|
||||
|
||||
class MailDeliveryException(Exception):
|
||||
"""Specific exception subclass for mail delivery errors"""
|
||||
|
||||
|
||||
def make_wrap_property(name):
|
||||
return property(
|
||||
lambda self: getattr(self.__obj__, name),
|
||||
lambda self, value: setattr(self.__obj__, name, value),
|
||||
)
|
||||
|
||||
|
||||
class SMTPConnection:
|
||||
"""Wrapper around smtplib.SMTP and smtplib.SMTP_SSL"""
|
||||
def __init__(self, server, port, encryption, context=None):
|
||||
if encryption == 'ssl':
|
||||
self.__obj__ = smtplib.SMTP_SSL(server, port, timeout=SMTP_TIMEOUT, context=context)
|
||||
else:
|
||||
self.__obj__ = smtplib.SMTP(server, port, timeout=SMTP_TIMEOUT)
|
||||
|
||||
|
||||
SMTP_ATTRIBUTES = [
|
||||
'auth', 'auth_cram_md5', 'auth_login', 'auth_plain', 'close', 'data', 'docmd', 'ehlo', 'ehlo_or_helo_if_needed',
|
||||
'expn', 'from_filter', 'getreply', 'has_extn', 'login', 'mail', 'noop', 'putcmd', 'quit', 'rcpt', 'rset',
|
||||
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host',
|
||||
]
|
||||
for name in SMTP_ATTRIBUTES:
|
||||
setattr(SMTPConnection, name, make_wrap_property(name))
|
||||
|
||||
|
||||
# Python 3: patch SMTP's internal printer/debugger
|
||||
def _print_debug(self, *args):
|
||||
_logger.debug(' '.join(str(a) for a in args))
|
||||
smtplib.SMTP._print_debug = _print_debug
|
||||
|
||||
# Python 3: workaround for bpo-35805, only partially fixed in Python 3.8.
|
||||
RFC5322_IDENTIFICATION_HEADERS = {'message-id', 'in-reply-to', 'references', 'resent-msg-id'}
|
||||
_noFoldPolicy = email.policy.SMTP.clone(max_line_length=None)
|
||||
class IdentificationFieldsNoFoldPolicy(email.policy.EmailPolicy):
|
||||
# Override _fold() to avoid folding identification fields, excluded by RFC2047 section 5
|
||||
# These are particularly important to preserve, as MTAs will often rewrite non-conformant
|
||||
# Message-ID headers, causing a loss of thread information (replies are lost)
|
||||
def _fold(self, name, value, *args, **kwargs):
|
||||
if name.lower() in RFC5322_IDENTIFICATION_HEADERS:
|
||||
return _noFoldPolicy._fold(name, value, *args, **kwargs)
|
||||
return super()._fold(name, value, *args, **kwargs)
|
||||
|
||||
# Global monkey-patch for our preferred SMTP policy, preserving the non-default linesep
|
||||
email.policy.SMTP = IdentificationFieldsNoFoldPolicy(linesep=email.policy.SMTP.linesep)
|
||||
|
||||
# Python 2: replace smtplib's stderr
|
||||
class WriteToLogger(object):
|
||||
def write(self, s):
|
||||
_logger.debug(s)
|
||||
smtplib.stderr = WriteToLogger()
|
||||
|
||||
def is_ascii(s):
|
||||
return all(ord(cp) < 128 for cp in s)
|
||||
|
||||
address_pattern = re.compile(r'([^" ,<@]+@[^>" ,]+)')
|
||||
|
||||
def extract_rfc2822_addresses(text):
|
||||
"""Returns a list of valid RFC2822 addresses
|
||||
that can be found in ``source``, ignoring
|
||||
malformed ones and non-ASCII ones.
|
||||
"""
|
||||
if not text:
|
||||
return []
|
||||
candidates = address_pattern.findall(ustr(text))
|
||||
valid_addresses = []
|
||||
for c in candidates:
|
||||
try:
|
||||
valid_addresses.append(formataddr(('', c), charset='ascii'))
|
||||
except idna.IDNAError:
|
||||
pass
|
||||
return valid_addresses
|
||||
|
||||
|
||||
class IrMailServer(models.Model):
|
||||
"""Represents an SMTP server, able to send outgoing emails, with SSL and TLS capabilities."""
|
||||
_name = "ir.mail_server"
|
||||
_description = 'Mail Server'
|
||||
_order = 'sequence'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
NO_VALID_RECIPIENT = ("At least one valid recipient address should be "
|
||||
"specified for outgoing emails (To/Cc/Bcc)")
|
||||
|
||||
name = fields.Char(string='Name', required=True, index=True)
|
||||
from_filter = fields.Char(
|
||||
"FROM Filtering",
|
||||
help='Define for which email address or domain this server can be used.\n'
|
||||
'e.g.: "notification@odoo.com" or "odoo.com"')
|
||||
smtp_host = fields.Char(string='SMTP Server', required=True, help="Hostname or IP of SMTP server")
|
||||
smtp_port = fields.Integer(string='SMTP Port', required=True, default=25, help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.")
|
||||
smtp_authentication = fields.Selection([('login', 'Username'), ('certificate', 'SSL Certificate')], string='Authenticate with', required=True, default='login')
|
||||
smtp_authentication_info = fields.Text('Authentication Info', compute='_compute_smtp_authentication_info')
|
||||
smtp_user = fields.Char(string='Username', help="Optional username for SMTP authentication", groups='base.group_system')
|
||||
smtp_pass = fields.Char(string='Password', help="Optional password for SMTP authentication", groups='base.group_system')
|
||||
smtp_encryption = fields.Selection([('none', 'None'),
|
||||
('starttls', 'TLS (STARTTLS)'),
|
||||
('ssl', 'SSL/TLS')],
|
||||
string='Connection Encryption', required=True, default='none',
|
||||
help="Choose the connection encryption scheme:\n"
|
||||
"- None: SMTP sessions are done in cleartext.\n"
|
||||
"- TLS (STARTTLS): TLS encryption is requested at start of SMTP session (Recommended)\n"
|
||||
"- SSL/TLS: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)")
|
||||
smtp_ssl_certificate = fields.Binary(
|
||||
'SSL Certificate', groups='base.group_system', attachment=False,
|
||||
help='SSL certificate used for authentication')
|
||||
smtp_ssl_private_key = fields.Binary(
|
||||
'SSL Private Key', groups='base.group_system', attachment=False,
|
||||
help='SSL private key used for authentication')
|
||||
smtp_debug = fields.Boolean(string='Debugging', help="If enabled, the full output of SMTP sessions will "
|
||||
"be written to the server log at DEBUG level "
|
||||
"(this is very verbose and may include confidential info!)")
|
||||
sequence = fields.Integer(string='Priority', default=10, help="When no specific mail server is requested for a mail, the highest priority one "
|
||||
"is used. Default priority is 10 (smaller number = higher priority)")
|
||||
active = fields.Boolean(default=True)
|
||||
|
||||
@api.depends('smtp_authentication')
|
||||
def _compute_smtp_authentication_info(self):
|
||||
for server in self:
|
||||
if server.smtp_authentication == 'login':
|
||||
server.smtp_authentication_info = _(
|
||||
'Connect to your server through your usual username and password. \n'
|
||||
'This is the most basic SMTP authentication process and '
|
||||
'may not be accepted by all providers. \n')
|
||||
elif server.smtp_authentication == 'certificate':
|
||||
server.smtp_authentication_info = _(
|
||||
'Authenticate by using SSL certificates, belonging to your domain name. \n'
|
||||
'SSL certificates allow you to authenticate your mail server for the entire domain name.')
|
||||
else:
|
||||
server.smtp_authentication = False
|
||||
|
||||
@api.constrains('smtp_authentication', 'smtp_ssl_certificate', 'smtp_ssl_private_key')
|
||||
def _check_smtp_ssl_files(self):
|
||||
for mail_server in self:
|
||||
if mail_server.smtp_authentication == 'certificate':
|
||||
if not mail_server.smtp_ssl_private_key:
|
||||
raise UserError(_('SSL private key is missing for %s.', mail_server.name))
|
||||
if not mail_server.smtp_ssl_certificate:
|
||||
raise UserError(_('SSL certificate is missing for %s.', mail_server.name))
|
||||
|
||||
def write(self, vals):
|
||||
"""Ensure we cannot archive a server in-use"""
|
||||
usages_per_server = {}
|
||||
if not vals.get('active', True):
|
||||
usages_per_server = self._active_usages_compute()
|
||||
|
||||
if not usages_per_server:
|
||||
return super().write(vals)
|
||||
|
||||
# Write cannot be performed as some server are used, build detailed usage per server
|
||||
usage_details_per_server = {}
|
||||
is_multiple_server_usage = len(usages_per_server) > 1
|
||||
for server in self:
|
||||
if server.id not in usages_per_server:
|
||||
continue
|
||||
usage_details = []
|
||||
if is_multiple_server_usage:
|
||||
usage_details.append(_('%s (Dedicated Outgoing Mail Server):', server.display_name))
|
||||
usage_details.extend(map(lambda u: f'- {u}', usages_per_server[server.id]))
|
||||
usage_details_per_server[server] = usage_details
|
||||
|
||||
# Raise the error with the ordered list of servers and concatenated detailed usages
|
||||
servers_ordered_by_name = sorted(usage_details_per_server.keys(), key=lambda r: r.display_name)
|
||||
error_server_usage = ', '.join(server.display_name for server in servers_ordered_by_name)
|
||||
error_usage_details = '\n'.join(line
|
||||
for server in servers_ordered_by_name
|
||||
for line in usage_details_per_server[server])
|
||||
if is_multiple_server_usage:
|
||||
raise UserError(
|
||||
_('You cannot archive these Outgoing Mail Servers (%s) because they are still used in the following case(s):\n%s',
|
||||
error_server_usage, error_usage_details))
|
||||
raise UserError(
|
||||
_('You cannot archive this Outgoing Mail Server (%s) because it is still used in the following case(s):\n%s',
|
||||
error_server_usage, error_usage_details))
|
||||
|
||||
def _active_usages_compute(self):
|
||||
"""Compute a dict server id to list of user-friendly outgoing mail servers usage of this record set.
|
||||
|
||||
This method must be overridden by all modules that uses this class in order to complete the list with
|
||||
user-friendly string describing the active elements that could send mail through the instance of this class.
|
||||
:return dict: { ir_mail_server.id: usage_str_list }.
|
||||
"""
|
||||
return dict()
|
||||
|
||||
def _get_test_email_addresses(self):
|
||||
self.ensure_one()
|
||||
email_to = "noreply@odoo.com"
|
||||
if self.from_filter:
|
||||
if "@" in self.from_filter:
|
||||
# All emails will be sent from the same address
|
||||
return self.from_filter, email_to
|
||||
# All emails will be sent from any address in the same domain
|
||||
default_from = self.env["ir.config_parameter"].sudo().get_param("mail.default.from", "odoo")
|
||||
if "@" not in default_from:
|
||||
return f"{default_from}@{self.from_filter}", email_to
|
||||
elif self._match_from_filter(default_from, self.from_filter):
|
||||
# the mail server is configured for a domain
|
||||
# that match the default email address
|
||||
return default_from, email_to
|
||||
# the from_filter is configured for a domain different that the one
|
||||
# of the full email configured in mail.default.from
|
||||
return f"noreply@{self.from_filter}", email_to
|
||||
# Fallback to current user email if there's no from filter
|
||||
email_from = self.env.user.email
|
||||
if not email_from:
|
||||
raise UserError(_('Please configure an email on the current user to simulate '
|
||||
'sending an email message via this outgoing server'))
|
||||
return email_from, email_to
|
||||
|
||||
def test_smtp_connection(self):
|
||||
for server in self:
|
||||
smtp = False
|
||||
try:
|
||||
smtp = self.connect(mail_server_id=server.id, allow_archived=True)
|
||||
# simulate sending an email from current user's address - without sending it!
|
||||
email_from, email_to = server._get_test_email_addresses()
|
||||
# Testing the MAIL FROM step should detect sender filter problems
|
||||
(code, repl) = smtp.mail(email_from)
|
||||
if code != 250:
|
||||
raise UserError(_('The server refused the sender address (%(email_from)s) '
|
||||
'with error %(repl)s') % locals())
|
||||
# Testing the RCPT TO step should detect most relaying problems
|
||||
(code, repl) = smtp.rcpt(email_to)
|
||||
if code not in (250, 251):
|
||||
raise UserError(_('The server refused the test recipient (%(email_to)s) '
|
||||
'with error %(repl)s') % locals())
|
||||
# Beginning the DATA step should detect some deferred rejections
|
||||
# Can't use self.data() as it would actually send the mail!
|
||||
smtp.putcmd("data")
|
||||
(code, repl) = smtp.getreply()
|
||||
if code != 354:
|
||||
raise UserError(_('The server refused the test connection '
|
||||
'with error %(repl)s') % locals())
|
||||
except UserError as e:
|
||||
# let UserErrors (messages) bubble up
|
||||
raise e
|
||||
except (UnicodeError, idna.core.InvalidCodepoint) as e:
|
||||
raise UserError(_("Invalid server name !\n %s", ustr(e)))
|
||||
except (gaierror, timeout) as e:
|
||||
raise UserError(_("No response received. Check server address and port number.\n %s", ustr(e)))
|
||||
except smtplib.SMTPServerDisconnected as e:
|
||||
raise UserError(_("The server has closed the connection unexpectedly. Check configuration served on this port number.\n %s", ustr(e.strerror)))
|
||||
except smtplib.SMTPResponseException as e:
|
||||
raise UserError(_("Server replied with following exception:\n %s", ustr(e.smtp_error)))
|
||||
except smtplib.SMTPNotSupportedError as e:
|
||||
raise UserError(_("An option is not supported by the server:\n %s", e.strerror))
|
||||
except smtplib.SMTPException as e:
|
||||
raise UserError(_("An SMTP exception occurred. Check port number and connection security type.\n %s", ustr(e)))
|
||||
except SSLError as e:
|
||||
raise UserError(_("An SSL exception occurred. Check connection security type.\n %s", ustr(e)))
|
||||
except Exception as e:
|
||||
raise UserError(_("Connection Test Failed! Here is what we got instead:\n %s", ustr(e)))
|
||||
finally:
|
||||
try:
|
||||
if smtp:
|
||||
smtp.close()
|
||||
except Exception:
|
||||
# ignored, just a consequence of the previous exception
|
||||
pass
|
||||
|
||||
message = _("Connection Test Successful!")
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'message': message,
|
||||
'type': 'success',
|
||||
'sticky': False,
|
||||
}
|
||||
}
|
||||
|
||||
def connect(self, host=None, port=None, user=None, password=None, encryption=None,
|
||||
smtp_from=None, ssl_certificate=None, ssl_private_key=None, smtp_debug=False, mail_server_id=None,
|
||||
allow_archived=False):
|
||||
"""Returns a new SMTP connection to the given SMTP server.
|
||||
When running in test mode, this method does nothing and returns `None`.
|
||||
|
||||
:param host: host or IP of SMTP server to connect to, if mail_server_id not passed
|
||||
:param int port: SMTP port to connect to
|
||||
:param user: optional username to authenticate with
|
||||
:param password: optional password to authenticate with
|
||||
:param string encryption: optional, ``'ssl'`` | ``'starttls'``
|
||||
:param smtp_from: FROM SMTP envelop, used to find the best mail server
|
||||
:param ssl_certificate: filename of the SSL certificate used for authentication
|
||||
Used when no mail server is given and overwrite the odoo-bin argument "smtp_ssl_certificate"
|
||||
:param ssl_private_key: filename of the SSL private key used for authentication
|
||||
Used when no mail server is given and overwrite the odoo-bin argument "smtp_ssl_private_key"
|
||||
:param bool smtp_debug: toggle debugging of SMTP sessions (all i/o
|
||||
will be output in logs)
|
||||
:param mail_server_id: ID of specific mail server to use (overrides other parameters)
|
||||
:param bool allow_archived: by default (False), an exception is raised when calling this method on an
|
||||
archived record (using mail_server_id param). It can be set to True for testing so that the exception is no
|
||||
longer raised.
|
||||
"""
|
||||
# Do not actually connect while running in test mode
|
||||
if self._is_test_mode():
|
||||
return
|
||||
|
||||
mail_server = smtp_encryption = None
|
||||
if mail_server_id:
|
||||
mail_server = self.sudo().browse(mail_server_id)
|
||||
if not allow_archived and not mail_server.active:
|
||||
raise UserError(_('The server "%s" cannot be used because it is archived.', mail_server.display_name))
|
||||
elif not host:
|
||||
mail_server, smtp_from = self.sudo()._find_mail_server(smtp_from)
|
||||
|
||||
if not mail_server:
|
||||
mail_server = self.env['ir.mail_server']
|
||||
ssl_context = None
|
||||
|
||||
if mail_server:
|
||||
smtp_server = mail_server.smtp_host
|
||||
smtp_port = mail_server.smtp_port
|
||||
if mail_server.smtp_authentication == "certificate":
|
||||
smtp_user = None
|
||||
smtp_password = None
|
||||
else:
|
||||
smtp_user = mail_server.smtp_user
|
||||
smtp_password = mail_server.smtp_pass
|
||||
smtp_encryption = mail_server.smtp_encryption
|
||||
smtp_debug = smtp_debug or mail_server.smtp_debug
|
||||
from_filter = mail_server.from_filter
|
||||
if mail_server.smtp_authentication == "certificate":
|
||||
try:
|
||||
ssl_context = PyOpenSSLContext(ssl.PROTOCOL_TLS)
|
||||
smtp_ssl_certificate = base64.b64decode(mail_server.smtp_ssl_certificate)
|
||||
certificate = SSLCrypto.load_certificate(FILETYPE_PEM, smtp_ssl_certificate)
|
||||
smtp_ssl_private_key = base64.b64decode(mail_server.smtp_ssl_private_key)
|
||||
private_key = SSLCrypto.load_privatekey(FILETYPE_PEM, smtp_ssl_private_key)
|
||||
ssl_context._ctx.use_certificate(certificate)
|
||||
ssl_context._ctx.use_privatekey(private_key)
|
||||
# Check that the private key match the certificate
|
||||
ssl_context._ctx.check_privatekey()
|
||||
except SSLCryptoError as e:
|
||||
raise UserError(_('The private key or the certificate is not a valid file. \n%s', str(e)))
|
||||
except SSLError as e:
|
||||
raise UserError(_('Could not load your certificate / private key. \n%s', str(e)))
|
||||
|
||||
else:
|
||||
# we were passed individual smtp parameters or nothing and there is no default server
|
||||
smtp_server = host or tools.config.get('smtp_server')
|
||||
smtp_port = tools.config.get('smtp_port', 25) if port is None else port
|
||||
smtp_user = user or tools.config.get('smtp_user')
|
||||
smtp_password = password or tools.config.get('smtp_password')
|
||||
from_filter = self.env['ir.config_parameter'].sudo().get_param(
|
||||
'mail.default.from_filter', tools.config.get('from_filter'))
|
||||
smtp_encryption = encryption
|
||||
if smtp_encryption is None and tools.config.get('smtp_ssl'):
|
||||
smtp_encryption = 'starttls' # smtp_ssl => STARTTLS as of v7
|
||||
smtp_ssl_certificate_filename = ssl_certificate or tools.config.get('smtp_ssl_certificate_filename')
|
||||
smtp_ssl_private_key_filename = ssl_private_key or tools.config.get('smtp_ssl_private_key_filename')
|
||||
|
||||
if smtp_ssl_certificate_filename and smtp_ssl_private_key_filename:
|
||||
try:
|
||||
ssl_context = PyOpenSSLContext(ssl.PROTOCOL_TLS)
|
||||
ssl_context.load_cert_chain(smtp_ssl_certificate_filename, keyfile=smtp_ssl_private_key_filename)
|
||||
# Check that the private key match the certificate
|
||||
ssl_context._ctx.check_privatekey()
|
||||
except SSLCryptoError as e:
|
||||
raise UserError(_('The private key or the certificate is not a valid file. \n%s', str(e)))
|
||||
except SSLError as e:
|
||||
raise UserError(_('Could not load your certificate / private key. \n%s', str(e)))
|
||||
|
||||
if not smtp_server:
|
||||
raise UserError(
|
||||
(_("Missing SMTP Server") + "\n" +
|
||||
_("Please define at least one SMTP server, "
|
||||
"or provide the SMTP parameters explicitly.")))
|
||||
|
||||
if smtp_encryption == 'ssl':
|
||||
if 'SMTP_SSL' not in smtplib.__all__:
|
||||
raise UserError(
|
||||
_("Your Odoo Server does not support SMTP-over-SSL. "
|
||||
"You could use STARTTLS instead. "
|
||||
"If SSL is needed, an upgrade to Python 2.6 on the server-side "
|
||||
"should do the trick."))
|
||||
connection = SMTPConnection(smtp_server, smtp_port, smtp_encryption, context=ssl_context)
|
||||
connection.set_debuglevel(smtp_debug)
|
||||
if smtp_encryption == 'starttls':
|
||||
# starttls() will perform ehlo() if needed first
|
||||
# and will discard the previous list of services
|
||||
# after successfully performing STARTTLS command,
|
||||
# (as per RFC 3207) so for example any AUTH
|
||||
# capability that appears only on encrypted channels
|
||||
# will be correctly detected for next step
|
||||
connection.starttls(context=ssl_context)
|
||||
|
||||
if smtp_user:
|
||||
# Attempt authentication - will raise if AUTH service not supported
|
||||
local, at, domain = smtp_user.rpartition('@')
|
||||
if at:
|
||||
smtp_user = local + at + idna.encode(domain).decode('ascii')
|
||||
mail_server._smtp_login(connection, smtp_user, smtp_password or '')
|
||||
|
||||
# Some methods of SMTP don't check whether EHLO/HELO was sent.
|
||||
# Anyway, as it may have been sent by login(), all subsequent usages should consider this command as sent.
|
||||
connection.ehlo_or_helo_if_needed()
|
||||
|
||||
# Store the "from_filter" of the mail server / odoo-bin argument to know if we
|
||||
# need to change the FROM headers or not when we will prepare the mail message
|
||||
connection.from_filter = from_filter
|
||||
connection.smtp_from = smtp_from
|
||||
|
||||
return connection
|
||||
|
||||
def _smtp_login(self, connection, smtp_user, smtp_password):
|
||||
"""Authenticate the SMTP connection.
|
||||
|
||||
Can be overridden in other module for different authentication methods.Can be
|
||||
called on the model itself or on a singleton.
|
||||
|
||||
:param connection: The SMTP connection to authenticate
|
||||
:param smtp_user: The user to used for the authentication
|
||||
:param smtp_password: The password to used for the authentication
|
||||
"""
|
||||
connection.login(smtp_user, smtp_password)
|
||||
|
||||
def build_email(self, email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
|
||||
attachments=None, message_id=None, references=None, object_id=False, subtype='plain', headers=None,
|
||||
body_alternative=None, subtype_alternative='plain'):
|
||||
"""Constructs an RFC2822 email.message.Message object based on the keyword arguments passed, and returns it.
|
||||
|
||||
:param string email_from: sender email address
|
||||
:param list email_to: list of recipient addresses (to be joined with commas)
|
||||
:param string subject: email subject (no pre-encoding/quoting necessary)
|
||||
:param string body: email body, of the type ``subtype`` (by default, plaintext).
|
||||
If html subtype is used, the message will be automatically converted
|
||||
to plaintext and wrapped in multipart/alternative, unless an explicit
|
||||
``body_alternative`` version is passed.
|
||||
:param string body_alternative: optional alternative body, of the type specified in ``subtype_alternative``
|
||||
:param string reply_to: optional value of Reply-To header
|
||||
:param string object_id: optional tracking identifier, to be included in the message-id for
|
||||
recognizing replies. Suggested format for object-id is "res_id-model",
|
||||
e.g. "12345-crm.lead".
|
||||
:param string subtype: optional mime subtype for the text body (usually 'plain' or 'html'),
|
||||
must match the format of the ``body`` parameter. Default is 'plain',
|
||||
making the content part of the mail "text/plain".
|
||||
:param string subtype_alternative: optional mime subtype of ``body_alternative`` (usually 'plain'
|
||||
or 'html'). Default is 'plain'.
|
||||
:param list attachments: list of (filename, filecontents) pairs, where filecontents is a string
|
||||
containing the bytes of the attachment
|
||||
:param message_id:
|
||||
:param references:
|
||||
:param list email_cc: optional list of string values for CC header (to be joined with commas)
|
||||
:param list email_bcc: optional list of string values for BCC header (to be joined with commas)
|
||||
:param dict headers: optional map of headers to set on the outgoing mail (may override the
|
||||
other headers, including Subject, Reply-To, Message-Id, etc.)
|
||||
:rtype: email.message.EmailMessage
|
||||
:return: the new RFC2822 email message
|
||||
"""
|
||||
email_from = email_from or self._get_default_from_address()
|
||||
assert email_from, "You must either provide a sender address explicitly or configure "\
|
||||
"using the combination of `mail.catchall.domain` and `mail.default.from` "\
|
||||
"ICPs, in the server configuration file or with the "\
|
||||
"--email-from startup parameter."
|
||||
|
||||
headers = headers or {} # need valid dict later
|
||||
email_cc = email_cc or []
|
||||
email_bcc = email_bcc or []
|
||||
body = body or u''
|
||||
|
||||
msg = EmailMessage(policy=email.policy.SMTP)
|
||||
if not message_id:
|
||||
if object_id:
|
||||
message_id = tools.generate_tracking_message_id(object_id)
|
||||
else:
|
||||
message_id = make_msgid()
|
||||
msg['Message-Id'] = message_id
|
||||
if references:
|
||||
msg['references'] = references
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = email_from
|
||||
del msg['Reply-To']
|
||||
msg['Reply-To'] = reply_to or email_from
|
||||
msg['To'] = email_to
|
||||
if email_cc:
|
||||
msg['Cc'] = email_cc
|
||||
if email_bcc:
|
||||
msg['Bcc'] = email_bcc
|
||||
msg['Date'] = datetime.datetime.utcnow()
|
||||
for key, value in headers.items():
|
||||
msg[pycompat.to_text(ustr(key))] = value
|
||||
|
||||
email_body = ustr(body)
|
||||
if subtype == 'html' and not body_alternative:
|
||||
msg['MIME-Version'] = '1.0'
|
||||
msg.add_alternative(tools.html2plaintext(email_body), subtype='plain', charset='utf-8')
|
||||
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
|
||||
elif body_alternative:
|
||||
msg['MIME-Version'] = '1.0'
|
||||
msg.add_alternative(ustr(body_alternative), subtype=subtype_alternative, charset='utf-8')
|
||||
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
|
||||
else:
|
||||
msg.set_content(email_body, subtype=subtype, charset='utf-8')
|
||||
|
||||
if attachments:
|
||||
for (fname, fcontent, mime) in attachments:
|
||||
maintype, subtype = mime.split('/') if mime and '/' in mime else ('application', 'octet-stream')
|
||||
if maintype == 'message' and subtype == 'rfc822':
|
||||
# Use binary encoding for "message/rfc822" attachments (see RFC 2046 Section 5.2.1)
|
||||
msg.add_attachment(fcontent, maintype, subtype, filename=fname, cte='binary')
|
||||
else:
|
||||
msg.add_attachment(fcontent, maintype, subtype, filename=fname)
|
||||
return msg
|
||||
|
||||
@api.model
|
||||
def _get_default_bounce_address(self):
|
||||
'''Compute the default bounce address.
|
||||
|
||||
The default bounce address is used to set the envelop address if no
|
||||
envelop address is provided in the message. It is formed by properly
|
||||
joining the parameters "mail.bounce.alias" and
|
||||
"mail.catchall.domain".
|
||||
|
||||
If "mail.bounce.alias" is not set it defaults to "postmaster-odoo".
|
||||
|
||||
If "mail.catchall.domain" is not set, return None.
|
||||
|
||||
'''
|
||||
ICP = self.env['ir.config_parameter'].sudo()
|
||||
bounce_alias = ICP.get_param('mail.bounce.alias')
|
||||
domain = ICP.get_param('mail.catchall.domain')
|
||||
if bounce_alias and domain:
|
||||
return '%s@%s' % (bounce_alias, domain)
|
||||
return
|
||||
|
||||
@api.model
|
||||
def _get_default_from_address(self):
|
||||
"""Compute the default from address.
|
||||
|
||||
Used for the "header from" address when no other has been received.
|
||||
|
||||
:return str/None:
|
||||
If the config parameter ``mail.default.from`` contains
|
||||
a full email address, return it.
|
||||
Otherwise, combines config parameters ``mail.default.from`` and
|
||||
``mail.catchall.domain`` to generate a default sender address.
|
||||
|
||||
If some of those parameters is not defined, it will default to the
|
||||
``--email-from`` CLI/config parameter.
|
||||
"""
|
||||
get_param = self.env['ir.config_parameter'].sudo().get_param
|
||||
email_from = get_param("mail.default.from")
|
||||
if email_from and "@" in email_from:
|
||||
return email_from
|
||||
domain = get_param("mail.catchall.domain")
|
||||
if email_from and domain:
|
||||
return "%s@%s" % (email_from, domain)
|
||||
return tools.config.get("email_from")
|
||||
|
||||
def _prepare_email_message(self, message, smtp_session):
|
||||
"""Prepare the SMTP information (from, to, message) before sending.
|
||||
|
||||
:param message: the email.message.Message to send, information like the
|
||||
Return-Path, the From, etc... will be used to find the smtp_from and to smtp_to
|
||||
:param smtp_session: the opened SMTP session to use to authenticate the sender
|
||||
:return: smtp_from, smtp_to_list, message
|
||||
smtp_from: email to used during the authentication to the mail server
|
||||
smtp_to_list: list of email address which will receive the email
|
||||
message: the email.message.Message to send
|
||||
"""
|
||||
# Use the default bounce address **only if** no Return-Path was
|
||||
# provided by caller. Caller may be using Variable Envelope Return
|
||||
# Path (VERP) to detect no-longer valid email addresses.
|
||||
bounce_address = message['Return-Path'] or self._get_default_bounce_address() or message['From']
|
||||
smtp_from = message['From'] or bounce_address
|
||||
assert smtp_from, "The Return-Path or From header is required for any outbound email"
|
||||
|
||||
email_to = message['To']
|
||||
email_cc = message['Cc']
|
||||
email_bcc = message['Bcc']
|
||||
del message['Bcc']
|
||||
|
||||
# All recipient addresses must only contain ASCII characters; support
|
||||
# optional pre-validated To list, used notably when formatted emails may
|
||||
# create fake emails using extract_rfc2822_addresses, e.g.
|
||||
# '"Bike@Home" <email@domain.com>' which can be considered as containing
|
||||
# 2 emails by extract_rfc2822_addresses
|
||||
validated_to = self.env.context.get('send_validated_to') or []
|
||||
smtp_to_list = [
|
||||
address
|
||||
for base in [email_to, email_cc, email_bcc]
|
||||
# be sure a given address does not return duplicates (but duplicates
|
||||
# in final smtp to list is still ok)
|
||||
for address in tools.misc.unique(extract_rfc2822_addresses(base))
|
||||
if address and (not validated_to or address in validated_to)
|
||||
]
|
||||
assert smtp_to_list, self.NO_VALID_RECIPIENT
|
||||
|
||||
x_forge_to = message['X-Forge-To']
|
||||
if x_forge_to:
|
||||
# `To:` header forged, e.g. for posting on mail.channels, to avoid confusion
|
||||
del message['X-Forge-To']
|
||||
del message['To'] # avoid multiple To: headers!
|
||||
message['To'] = x_forge_to
|
||||
|
||||
# Try to not spoof the mail from headers
|
||||
from_filter = getattr(smtp_session, 'from_filter', False)
|
||||
smtp_from = getattr(smtp_session, 'smtp_from', False) or smtp_from
|
||||
|
||||
notifications_email = email_normalize(self._get_default_from_address())
|
||||
if notifications_email and email_normalize(smtp_from) == notifications_email and email_normalize(message['From']) != notifications_email:
|
||||
smtp_from = encapsulate_email(message['From'], notifications_email)
|
||||
|
||||
if message['From'] != smtp_from:
|
||||
del message['From']
|
||||
message['From'] = smtp_from
|
||||
|
||||
# Check if it's still possible to put the bounce address as smtp_from
|
||||
if self._match_from_filter(bounce_address, from_filter):
|
||||
# Mail headers FROM will be spoofed to be able to receive bounce notifications
|
||||
# Because the mail server support the domain of the bounce address
|
||||
smtp_from = bounce_address
|
||||
|
||||
# The email's "Envelope From" (Return-Path) must only contain ASCII characters.
|
||||
smtp_from_rfc2822 = extract_rfc2822_addresses(smtp_from)
|
||||
assert smtp_from_rfc2822, (
|
||||
f"Malformed 'Return-Path' or 'From' address: {smtp_from} - "
|
||||
"It should contain one valid plain ASCII email")
|
||||
smtp_from = smtp_from_rfc2822[-1]
|
||||
|
||||
return smtp_from, smtp_to_list, message
|
||||
|
||||
@api.model
|
||||
def send_email(self, message, mail_server_id=None, smtp_server=None, smtp_port=None,
|
||||
smtp_user=None, smtp_password=None, smtp_encryption=None,
|
||||
smtp_ssl_certificate=None, smtp_ssl_private_key=None,
|
||||
smtp_debug=False, smtp_session=None):
|
||||
"""Sends an email directly (no queuing).
|
||||
|
||||
No retries are done, the caller should handle MailDeliveryException in order to ensure that
|
||||
the mail is never lost.
|
||||
|
||||
If the mail_server_id is provided, sends using this mail server, ignoring other smtp_* arguments.
|
||||
If mail_server_id is None and smtp_server is None, use the default mail server (highest priority).
|
||||
If mail_server_id is None and smtp_server is not None, use the provided smtp_* arguments.
|
||||
If both mail_server_id and smtp_server are None, look for an 'smtp_server' value in server config,
|
||||
and fails if not found.
|
||||
|
||||
:param message: the email.message.Message to send. The envelope sender will be extracted from the
|
||||
``Return-Path`` (if present), or will be set to the default bounce address.
|
||||
The envelope recipients will be extracted from the combined list of ``To``,
|
||||
``CC`` and ``BCC`` headers.
|
||||
:param smtp_session: optional pre-established SMTP session. When provided,
|
||||
overrides `mail_server_id` and all the `smtp_*` parameters.
|
||||
Passing the matching `mail_server_id` may yield better debugging/log
|
||||
messages. The caller is in charge of disconnecting the session.
|
||||
:param mail_server_id: optional id of ir.mail_server to use for sending. overrides other smtp_* arguments.
|
||||
:param smtp_server: optional hostname of SMTP server to use
|
||||
:param smtp_encryption: optional TLS mode, one of 'none', 'starttls' or 'ssl' (see ir.mail_server fields for explanation)
|
||||
:param smtp_port: optional SMTP port, if mail_server_id is not passed
|
||||
:param smtp_user: optional SMTP user, if mail_server_id is not passed
|
||||
:param smtp_password: optional SMTP password to use, if mail_server_id is not passed
|
||||
:param smtp_ssl_certificate: filename of the SSL certificate used for authentication
|
||||
:param smtp_ssl_private_key: filename of the SSL private key used for authentication
|
||||
:param smtp_debug: optional SMTP debug flag, if mail_server_id is not passed
|
||||
:return: the Message-ID of the message that was just sent, if successfully sent, otherwise raises
|
||||
MailDeliveryException and logs root cause.
|
||||
"""
|
||||
smtp = smtp_session
|
||||
if not smtp:
|
||||
smtp = self.connect(
|
||||
smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption,
|
||||
smtp_from=message['From'], ssl_certificate=smtp_ssl_certificate, ssl_private_key=smtp_ssl_private_key,
|
||||
smtp_debug=smtp_debug, mail_server_id=mail_server_id,)
|
||||
|
||||
smtp_from, smtp_to_list, message = self._prepare_email_message(message, smtp)
|
||||
|
||||
# Do not actually send emails in testing mode!
|
||||
if self._is_test_mode():
|
||||
_test_logger.info("skip sending email in test mode")
|
||||
return message['Message-Id']
|
||||
|
||||
try:
|
||||
message_id = message['Message-Id']
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# header folding code is buggy and adds redundant carriage
|
||||
# returns, it got fixed in 3.7.4 thanks to bpo-34424
|
||||
message_str = message.as_string()
|
||||
message_str = re.sub('\r+(?!\n)', '', message_str)
|
||||
|
||||
mail_options = []
|
||||
if any((not is_ascii(addr) for addr in smtp_to_list + [smtp_from])):
|
||||
# non ascii email found, require SMTPUTF8 extension,
|
||||
# the relay may reject it
|
||||
mail_options.append("SMTPUTF8")
|
||||
smtp.sendmail(smtp_from, smtp_to_list, message_str, mail_options=mail_options)
|
||||
else:
|
||||
smtp.send_message(message, smtp_from, smtp_to_list)
|
||||
|
||||
# do not quit() a pre-established smtp_session
|
||||
if not smtp_session:
|
||||
smtp.quit()
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
raise
|
||||
except Exception as e:
|
||||
params = (ustr(smtp_server), e.__class__.__name__, e)
|
||||
msg = _("Mail delivery failed via SMTP server '%s'.\n%s: %s", *params)
|
||||
_logger.info(msg)
|
||||
raise MailDeliveryException(_("Mail Delivery Failed"), msg)
|
||||
return message_id
|
||||
|
||||
def _find_mail_server(self, email_from, mail_servers=None):
|
||||
"""Find the appropriate mail server for the given email address.
|
||||
|
||||
Returns: Record<ir.mail_server>, email_from
|
||||
- Mail server to use to send the email (None if we use the odoo-bin arguments)
|
||||
- Email FROM to use to send the email (in some case, it might be impossible
|
||||
to use the given email address directly if no mail server is configured for)
|
||||
"""
|
||||
email_from_normalized = email_normalize(email_from)
|
||||
email_from_domain = email_domain_extract(email_from_normalized)
|
||||
notifications_email = email_normalize(self._get_default_from_address())
|
||||
notifications_domain = email_domain_extract(notifications_email)
|
||||
|
||||
if mail_servers is None:
|
||||
mail_servers = self.sudo().search([], order='sequence')
|
||||
# 0. Archived mail server should never be used
|
||||
mail_servers = mail_servers.filtered('active')
|
||||
|
||||
# 1. Try to find a mail server for the right mail from
|
||||
# Skip if passed email_from is False (example Odoobot has no email address)
|
||||
if email_from_normalized:
|
||||
mail_server = mail_servers.filtered(lambda m: email_normalize(m.from_filter) == email_from_normalized)
|
||||
if mail_server:
|
||||
return mail_server[0], email_from
|
||||
|
||||
mail_server = mail_servers.filtered(lambda m: email_domain_normalize(m.from_filter) == email_from_domain)
|
||||
if mail_server:
|
||||
return mail_server[0], email_from
|
||||
|
||||
# 2. Try to find a mail server for <notifications@domain.com>
|
||||
if notifications_email:
|
||||
mail_server = mail_servers.filtered(lambda m: email_normalize(m.from_filter) == notifications_email)
|
||||
if mail_server:
|
||||
return mail_server[0], notifications_email
|
||||
|
||||
mail_server = mail_servers.filtered(lambda m: email_domain_normalize(m.from_filter) == notifications_domain)
|
||||
if mail_server:
|
||||
return mail_server[0], notifications_email
|
||||
|
||||
# 3. Take the first mail server without "from_filter" because
|
||||
# nothing else has been found... Will spoof the FROM because
|
||||
# we have no other choices (will use the notification email if available
|
||||
# otherwise we will use the user email)
|
||||
mail_server = mail_servers.filtered(lambda m: not m.from_filter)
|
||||
if mail_server:
|
||||
return mail_server[0], notifications_email or email_from
|
||||
|
||||
# 4. Return the first mail server even if it was configured for another domain
|
||||
if mail_servers:
|
||||
_logger.warning(
|
||||
"No mail server matches the from_filter, using %s as fallback",
|
||||
notifications_email or email_from)
|
||||
return mail_servers[0], notifications_email or email_from
|
||||
|
||||
# 5: SMTP config in odoo-bin arguments
|
||||
from_filter = self.env['ir.config_parameter'].sudo().get_param(
|
||||
'mail.default.from_filter', tools.config.get('from_filter'))
|
||||
|
||||
if self._match_from_filter(email_from, from_filter):
|
||||
return None, email_from
|
||||
|
||||
if notifications_email and self._match_from_filter(notifications_email, from_filter):
|
||||
return None, notifications_email
|
||||
|
||||
_logger.warning(
|
||||
"The from filter of the CLI configuration does not match the notification email "
|
||||
"or the user email, using %s as fallback",
|
||||
notifications_email or email_from)
|
||||
return None, notifications_email or email_from
|
||||
|
||||
@api.model
|
||||
def _match_from_filter(self, email_from, from_filter):
|
||||
"""Return True is the given email address match the "from_filter" field.
|
||||
|
||||
The from filter can be Falsy (always match),
|
||||
a domain name or an full email address.
|
||||
"""
|
||||
if not from_filter:
|
||||
return True
|
||||
|
||||
normalized_mail_from = email_normalize(email_from)
|
||||
if '@' in from_filter:
|
||||
return email_normalize(from_filter) == normalized_mail_from
|
||||
|
||||
return email_domain_extract(normalized_mail_from) == email_domain_normalize(from_filter)
|
||||
|
||||
@api.onchange('smtp_encryption')
|
||||
def _onchange_encryption(self):
|
||||
result = {}
|
||||
if self.smtp_encryption == 'ssl':
|
||||
self.smtp_port = 465
|
||||
if not 'SMTP_SSL' in smtplib.__all__:
|
||||
result['warning'] = {
|
||||
'title': _('Warning'),
|
||||
'message': _('Your server does not seem to support SSL, you may want to try STARTTLS instead'),
|
||||
}
|
||||
else:
|
||||
self.smtp_port = 25
|
||||
return result
|
||||
|
||||
def _is_test_mode(self):
|
||||
"""Return True if we are running the tests, so we do not send real emails.
|
||||
|
||||
Can be overridden in tests after mocking the SMTP lib to test in depth the
|
||||
outgoing mail server.
|
||||
"""
|
||||
return getattr(threading.current_thread(), 'testing', False) or self.env.registry.in_test_mode()
|
||||
2451
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_model.py
Normal file
2451
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_model.py
Normal file
File diff suppressed because it is too large
Load diff
1162
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_module.py
Normal file
1162
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_module.py
Normal file
File diff suppressed because it is too large
Load diff
146
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_profile.py
Normal file
146
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_profile.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import fields, models, api, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.http import request
|
||||
from odoo.tools.profiler import make_session
|
||||
from odoo.tools.speedscope import Speedscope
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrProfile(models.Model):
|
||||
_name = 'ir.profile'
|
||||
_description = 'Profiling results'
|
||||
_log_access = False # avoid useless foreign key on res_user
|
||||
_order = 'session desc, id desc'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
create_date = fields.Datetime('Creation Date')
|
||||
|
||||
session = fields.Char('Session', index=True)
|
||||
name = fields.Char('Description')
|
||||
duration = fields.Float('Duration')
|
||||
|
||||
init_stack_trace = fields.Text('Initial stack trace', prefetch=False)
|
||||
|
||||
sql = fields.Text('Sql', prefetch=False)
|
||||
sql_count = fields.Integer('Queries Count')
|
||||
traces_async = fields.Text('Traces Async', prefetch=False)
|
||||
traces_sync = fields.Text('Traces Sync', prefetch=False)
|
||||
qweb = fields.Text('Qweb', prefetch=False)
|
||||
entry_count = fields.Integer('Entry count')
|
||||
|
||||
speedscope = fields.Binary('Speedscope', compute='_compute_speedscope')
|
||||
speedscope_url = fields.Text('Open', compute='_compute_speedscope_url')
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_profile(self):
|
||||
# remove profiles older than 30 days
|
||||
domain = [('create_date', '<', fields.Datetime.now() - datetime.timedelta(days=30))]
|
||||
return self.sudo().search(domain).unlink()
|
||||
|
||||
def _compute_speedscope(self):
|
||||
for execution in self:
|
||||
sp = Speedscope(init_stack_trace=json.loads(execution.init_stack_trace))
|
||||
if execution.sql:
|
||||
sp.add('sql', json.loads(execution.sql))
|
||||
if execution.traces_async:
|
||||
sp.add('frames', json.loads(execution.traces_async))
|
||||
if execution.traces_sync:
|
||||
sp.add('settrace', json.loads(execution.traces_sync))
|
||||
|
||||
result = json.dumps(sp.add_default().make())
|
||||
execution.speedscope = base64.b64encode(result.encode('utf-8'))
|
||||
|
||||
def _compute_speedscope_url(self):
|
||||
for profile in self:
|
||||
profile.speedscope_url = f'/web/speedscope/{profile.id}'
|
||||
|
||||
def _enabled_until(self):
|
||||
"""
|
||||
If the profiling is enabled, return until when it is enabled.
|
||||
Otherwise return ``None``.
|
||||
"""
|
||||
limit = self.env['ir.config_parameter'].sudo().get_param('base.profiling_enabled_until', '')
|
||||
return limit if str(fields.Datetime.now()) < limit else None
|
||||
|
||||
@api.model
|
||||
def set_profiling(self, profile=None, collectors=None, params=None):
|
||||
"""
|
||||
Enable or disable profiling for the current user.
|
||||
|
||||
:param profile: ``True`` to enable profiling, ``False`` to disable it.
|
||||
:param list collectors: optional list of collectors to use (string)
|
||||
:param dict params: optional parameters set on the profiler object
|
||||
"""
|
||||
# Note: parameters are coming from a rpc calls or route param (public user),
|
||||
# meaning that corresponding session variables are client-defined.
|
||||
# This allows to activate any profiler, but can be
|
||||
# dangerous handling request.session.profile_collectors/profile_params.
|
||||
if profile:
|
||||
limit = self._enabled_until()
|
||||
_logger.info("User %s started profiling", self.env.user.name)
|
||||
if not limit:
|
||||
request.session.profile_session = None
|
||||
if self.env.user._is_system():
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'view_mode': 'form',
|
||||
'res_model': 'base.enable.profiling.wizard',
|
||||
'target': 'new',
|
||||
'views': [[False, 'form']],
|
||||
}
|
||||
raise UserError(_('Profiling is not enabled on this database. Please contact an administrator.'))
|
||||
if not request.session.profile_session:
|
||||
request.session.profile_session = make_session(self.env.user.name)
|
||||
request.session.profile_expiration = limit
|
||||
if request.session.profile_collectors is None:
|
||||
request.session.profile_collectors = []
|
||||
if request.session.profile_params is None:
|
||||
request.session.profile_params = {}
|
||||
elif profile is not None:
|
||||
request.session.profile_session = None
|
||||
|
||||
if collectors is not None:
|
||||
request.session.profile_collectors = collectors
|
||||
|
||||
if params is not None:
|
||||
request.session.profile_params = params
|
||||
|
||||
return {
|
||||
'session': request.session.profile_session,
|
||||
'collectors': request.session.profile_collectors,
|
||||
'params': request.session.profile_params,
|
||||
}
|
||||
|
||||
|
||||
class EnableProfilingWizard(models.TransientModel):
|
||||
_name = 'base.enable.profiling.wizard'
|
||||
_description = "Enable profiling for some time"
|
||||
|
||||
duration = fields.Selection([
|
||||
('minutes_5', "5 Minutes"),
|
||||
('hours_1', "1 Hour"),
|
||||
('days_1', "1 Day"),
|
||||
('months_1', "1 Month"),
|
||||
], string="Enable profiling for")
|
||||
expiration = fields.Datetime("Enable profiling until", compute='_compute_expiration', store=True, readonly=False)
|
||||
|
||||
@api.depends('duration')
|
||||
def _compute_expiration(self):
|
||||
for record in self:
|
||||
unit, quantity = (record.duration or 'days_0').split('_')
|
||||
record.expiration = fields.Datetime.now() + relativedelta(**{unit: int(quantity)})
|
||||
|
||||
def submit(self):
|
||||
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', self.expiration)
|
||||
return False
|
||||
|
|
@ -0,0 +1,459 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.osv.expression import TERM_OPERATORS_NEGATION
|
||||
from odoo.tools import ormcache
|
||||
|
||||
TYPE2FIELD = {
|
||||
'char': 'value_text',
|
||||
'float': 'value_float',
|
||||
'boolean': 'value_integer',
|
||||
'integer': 'value_integer',
|
||||
'text': 'value_text',
|
||||
'binary': 'value_binary',
|
||||
'many2one': 'value_reference',
|
||||
'date': 'value_datetime',
|
||||
'datetime': 'value_datetime',
|
||||
'selection': 'value_text',
|
||||
}
|
||||
|
||||
TYPE2CLEAN = {
|
||||
'boolean': bool,
|
||||
'integer': lambda val: val or False,
|
||||
'float': lambda val: val or False,
|
||||
'char': lambda val: val or False,
|
||||
'text': lambda val: val or False,
|
||||
'selection': lambda val: val or False,
|
||||
'binary': lambda val: val or False,
|
||||
'date': lambda val: val.date() if val else False,
|
||||
'datetime': lambda val: val or False,
|
||||
}
|
||||
|
||||
|
||||
class Property(models.Model):
|
||||
_name = 'ir.property'
|
||||
_description = 'Company Property'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
name = fields.Char(index=True)
|
||||
res_id = fields.Char(string='Resource', index=True, help="If not set, acts as a default value for new resources",)
|
||||
company_id = fields.Many2one('res.company', string='Company', index=True)
|
||||
fields_id = fields.Many2one('ir.model.fields', string='Field', ondelete='cascade', required=True)
|
||||
value_float = fields.Float()
|
||||
value_integer = fields.Integer()
|
||||
value_text = fields.Text() # will contain (char, text)
|
||||
value_binary = fields.Binary(attachment=False)
|
||||
value_reference = fields.Char()
|
||||
value_datetime = fields.Datetime()
|
||||
type = fields.Selection([('char', 'Char'),
|
||||
('float', 'Float'),
|
||||
('boolean', 'Boolean'),
|
||||
('integer', 'Integer'),
|
||||
('text', 'Text'),
|
||||
('binary', 'Binary'),
|
||||
('many2one', 'Many2One'),
|
||||
('date', 'Date'),
|
||||
('datetime', 'DateTime'),
|
||||
('selection', 'Selection'),
|
||||
],
|
||||
required=True,
|
||||
default='many2one',
|
||||
index=True)
|
||||
|
||||
def init(self):
|
||||
# Ensure there is at most one active variant for each combination.
|
||||
query = """
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS ir_property_unique_index
|
||||
ON %s (fields_id, COALESCE(company_id, 0), COALESCE(res_id, ''))
|
||||
"""
|
||||
self.env.cr.execute(query % self._table)
|
||||
|
||||
def _update_values(self, values):
|
||||
if 'value' not in values:
|
||||
return values
|
||||
value = values.pop('value')
|
||||
|
||||
prop = None
|
||||
type_ = values.get('type')
|
||||
if not type_:
|
||||
if self:
|
||||
prop = self[0]
|
||||
type_ = prop.type
|
||||
else:
|
||||
type_ = self._fields['type'].default(self)
|
||||
|
||||
field = TYPE2FIELD.get(type_)
|
||||
if not field:
|
||||
raise UserError(_('Invalid type'))
|
||||
|
||||
if field == 'value_reference':
|
||||
if not value:
|
||||
value = False
|
||||
elif isinstance(value, models.BaseModel):
|
||||
value = '%s,%d' % (value._name, value.id)
|
||||
elif isinstance(value, int):
|
||||
field_id = values.get('fields_id')
|
||||
if not field_id:
|
||||
if not prop:
|
||||
raise ValueError()
|
||||
field_id = prop.fields_id
|
||||
else:
|
||||
field_id = self.env['ir.model.fields'].browse(field_id)
|
||||
|
||||
value = '%s,%d' % (field_id.sudo().relation, value)
|
||||
|
||||
values[field] = value
|
||||
return values
|
||||
|
||||
def write(self, values):
|
||||
# if any of the records we're writing on has a res_id=False *or*
|
||||
# we're writing a res_id=False on any record
|
||||
default_set = False
|
||||
if self._ids:
|
||||
default_set = values.get('res_id') is False or any(not p.res_id for p in self)
|
||||
r = super(Property, self).write(self._update_values(values))
|
||||
if default_set:
|
||||
# DLE P44: test `test_27_company_dependent`
|
||||
# Easy solution, need to flush write when changing a property.
|
||||
# Maybe it would be better to be able to compute all impacted cache value and update those instead
|
||||
# Then clear_caches must be removed as well.
|
||||
self.env.flush_all()
|
||||
self.clear_caches()
|
||||
return r
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
vals_list = [self._update_values(vals) for vals in vals_list]
|
||||
created_default = any(not v.get('res_id') for v in vals_list)
|
||||
r = super(Property, self).create(vals_list)
|
||||
if created_default:
|
||||
# DLE P44: test `test_27_company_dependent`
|
||||
self.env.flush_all()
|
||||
self.clear_caches()
|
||||
return r
|
||||
|
||||
def unlink(self):
|
||||
default_deleted = any(not p.res_id for p in self)
|
||||
r = super().unlink()
|
||||
if default_deleted:
|
||||
self.clear_caches()
|
||||
return r
|
||||
|
||||
def get_by_record(self):
|
||||
self.ensure_one()
|
||||
if self.type in ('char', 'text', 'selection'):
|
||||
return self.value_text
|
||||
elif self.type == 'float':
|
||||
return self.value_float
|
||||
elif self.type == 'boolean':
|
||||
return bool(self.value_integer)
|
||||
elif self.type == 'integer':
|
||||
return self.value_integer
|
||||
elif self.type == 'binary':
|
||||
return self.value_binary
|
||||
elif self.type == 'many2one':
|
||||
if not self.value_reference:
|
||||
return False
|
||||
model, resource_id = self.value_reference.split(',')
|
||||
return self.env[model].browse(int(resource_id)).exists()
|
||||
elif self.type == 'datetime':
|
||||
return self.value_datetime
|
||||
elif self.type == 'date':
|
||||
if not self.value_datetime:
|
||||
return False
|
||||
return fields.Date.to_string(fields.Datetime.from_string(self.value_datetime))
|
||||
return False
|
||||
|
||||
@api.model
|
||||
def _set_default(self, name, model, value, company=False):
|
||||
""" Set the given field's generic value for the given company.
|
||||
|
||||
:param name: the field's name
|
||||
:param model: the field's model name
|
||||
:param value: the field's value
|
||||
:param company: the company (record or id)
|
||||
"""
|
||||
field_id = self.env['ir.model.fields']._get(model, name).id
|
||||
company_id = int(company) if company else False
|
||||
prop = self.sudo().search([
|
||||
('fields_id', '=', field_id),
|
||||
('company_id', '=', company_id),
|
||||
('res_id', '=', False),
|
||||
])
|
||||
if prop:
|
||||
prop.write({'value': value})
|
||||
else:
|
||||
prop.create({
|
||||
'fields_id': field_id,
|
||||
'company_id': company_id,
|
||||
'res_id': False,
|
||||
'name': name,
|
||||
'value': value,
|
||||
'type': self.env[model]._fields[name].type,
|
||||
})
|
||||
|
||||
@api.model
|
||||
def _get(self, name, model, res_id=False):
|
||||
""" Get the given field's generic value for the record.
|
||||
|
||||
:param name: the field's name
|
||||
:param model: the field's model name
|
||||
:param res_id: optional resource, format: "<id>" (int) or
|
||||
"<model>,<id>" (str)
|
||||
"""
|
||||
if not res_id:
|
||||
t, v = self._get_default_property(name, model)
|
||||
if not v or t != 'many2one':
|
||||
return v
|
||||
return self.env[v[0]].browse(v[1])
|
||||
|
||||
p = self._get_property(name, model, res_id=res_id)
|
||||
if p:
|
||||
return p.get_by_record()
|
||||
return False
|
||||
|
||||
# only cache Property._get(res_id=False) as that's
|
||||
# sub-optimally.
|
||||
COMPANY_KEY = "self.env.company.id"
|
||||
@ormcache(COMPANY_KEY, 'name', 'model')
|
||||
def _get_default_property(self, name, model):
|
||||
prop = self._get_property(name, model, res_id=False)
|
||||
if not prop:
|
||||
return None, False
|
||||
v = prop.get_by_record()
|
||||
if prop.type != 'many2one':
|
||||
return prop.type, v
|
||||
return 'many2one', v and (v._name, v.id)
|
||||
|
||||
def _get_property(self, name, model, res_id):
|
||||
domain = self._get_domain(name, model)
|
||||
if domain is not None:
|
||||
if res_id and isinstance(res_id, int):
|
||||
res_id = "%s,%s" % (model, res_id)
|
||||
domain = [('res_id', '=', res_id)] + domain
|
||||
#make the search with company_id asc to make sure that properties specific to a company are given first
|
||||
return self.sudo().search(domain, limit=1, order='company_id')
|
||||
return self.sudo().browse(())
|
||||
|
||||
def _get_domain(self, prop_name, model):
|
||||
field_id = self.env['ir.model.fields']._get(model, prop_name).id
|
||||
if not field_id:
|
||||
return None
|
||||
company_id = self.env.company.id
|
||||
return [('fields_id', '=', field_id), ('company_id', 'in', [company_id, False])]
|
||||
|
||||
@api.model
|
||||
def _get_multi(self, name, model, ids):
|
||||
""" Read the property field `name` for the records of model `model` with
|
||||
the given `ids`, and return a dictionary mapping `ids` to their
|
||||
corresponding value.
|
||||
"""
|
||||
if not ids:
|
||||
return {}
|
||||
|
||||
field = self.env[model]._fields[name]
|
||||
field_id = self.env['ir.model.fields']._get(model, name).id
|
||||
company_id = self.env.company.id or None
|
||||
|
||||
if field.type == 'many2one':
|
||||
comodel = self.env[field.comodel_name]
|
||||
model_pos = len(model) + 2
|
||||
value_pos = len(comodel._name) + 2
|
||||
# retrieve values: both p.res_id and p.value_reference are formatted
|
||||
# as "<rec._name>,<rec.id>"; the purpose of the LEFT JOIN is to
|
||||
# return the value id if it exists, NULL otherwise
|
||||
query = """
|
||||
SELECT substr(p.res_id, %s)::integer, r.id
|
||||
FROM ir_property p
|
||||
LEFT JOIN {} r ON substr(p.value_reference, %s)::integer=r.id
|
||||
WHERE p.fields_id=%s
|
||||
AND (p.company_id=%s OR p.company_id IS NULL)
|
||||
AND (p.res_id IN %s OR p.res_id IS NULL)
|
||||
ORDER BY p.company_id NULLS FIRST
|
||||
""".format(comodel._table)
|
||||
params = [model_pos, value_pos, field_id, company_id]
|
||||
clean = comodel.browse
|
||||
|
||||
elif field.type in TYPE2FIELD:
|
||||
model_pos = len(model) + 2
|
||||
# retrieve values: p.res_id is formatted as "<rec._name>,<rec.id>"
|
||||
query = """
|
||||
SELECT substr(p.res_id, %s)::integer, p.{}
|
||||
FROM ir_property p
|
||||
WHERE p.fields_id=%s
|
||||
AND (p.company_id=%s OR p.company_id IS NULL)
|
||||
AND (p.res_id IN %s OR p.res_id IS NULL)
|
||||
ORDER BY p.company_id NULLS FIRST
|
||||
""".format(TYPE2FIELD[field.type])
|
||||
params = [model_pos, field_id, company_id]
|
||||
clean = TYPE2CLEAN[field.type]
|
||||
|
||||
else:
|
||||
return dict.fromkeys(ids, False)
|
||||
|
||||
# retrieve values
|
||||
self.flush_model()
|
||||
cr = self.env.cr
|
||||
result = {}
|
||||
refs = {"%s,%s" % (model, id) for id in ids}
|
||||
for sub_refs in cr.split_for_in_conditions(refs):
|
||||
cr.execute(query, params + [sub_refs])
|
||||
result.update(cr.fetchall())
|
||||
|
||||
# determine all values and format them
|
||||
default = result.get(None, None)
|
||||
return {
|
||||
id: clean(result.get(id, default))
|
||||
for id in ids
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _set_multi(self, name, model, values, default_value=None):
|
||||
""" Assign the property field `name` for the records of model `model`
|
||||
with `values` (dictionary mapping record ids to their value).
|
||||
If the value for a given record is the same as the default
|
||||
value, the property entry will not be stored, to avoid bloating
|
||||
the database.
|
||||
If `default_value` is provided, that value will be used instead
|
||||
of the computed default value, to determine whether the value
|
||||
for a record should be stored or not.
|
||||
"""
|
||||
def clean(value):
|
||||
return value.id if isinstance(value, models.BaseModel) else value
|
||||
|
||||
if not values:
|
||||
return
|
||||
|
||||
if default_value is None:
|
||||
domain = self._get_domain(name, model)
|
||||
if domain is None:
|
||||
raise Exception()
|
||||
# retrieve the default value for the field
|
||||
default_value = clean(self._get(name, model))
|
||||
|
||||
# retrieve the properties corresponding to the given record ids
|
||||
field_id = self.env['ir.model.fields']._get(model, name).id
|
||||
company_id = self.env.company.id
|
||||
refs = {('%s,%s' % (model, id)): id for id in values}
|
||||
props = self.sudo().search([
|
||||
('fields_id', '=', field_id),
|
||||
('company_id', '=', company_id),
|
||||
('res_id', 'in', list(refs)),
|
||||
])
|
||||
|
||||
# modify existing properties
|
||||
for prop in props:
|
||||
id = refs.pop(prop.res_id)
|
||||
value = clean(values[id])
|
||||
if value == default_value:
|
||||
# avoid prop.unlink(), as it clears the record cache that can
|
||||
# contain the value of other properties to set on record!
|
||||
self._cr.execute("DELETE FROM ir_property WHERE id=%s", [prop.id])
|
||||
elif value != clean(prop.get_by_record()):
|
||||
prop.write({'value': value})
|
||||
|
||||
# create new properties for records that do not have one yet
|
||||
vals_list = []
|
||||
for ref, id in refs.items():
|
||||
value = clean(values[id])
|
||||
if value != default_value:
|
||||
vals_list.append({
|
||||
'fields_id': field_id,
|
||||
'company_id': company_id,
|
||||
'res_id': ref,
|
||||
'name': name,
|
||||
'value': value,
|
||||
'type': self.env[model]._fields[name].type,
|
||||
})
|
||||
self.sudo().create(vals_list)
|
||||
|
||||
@api.model
|
||||
def search_multi(self, name, model, operator, value):
|
||||
""" Return a domain for the records that match the given condition. """
|
||||
default_matches = False
|
||||
negate = False
|
||||
|
||||
# For "is set" and "is not set", same logic for all types
|
||||
if operator == 'in' and False in value:
|
||||
operator = 'not in'
|
||||
negate = True
|
||||
elif operator == 'not in' and False not in value:
|
||||
operator = 'in'
|
||||
negate = True
|
||||
elif operator in ('!=', 'not like', 'not ilike') and value:
|
||||
operator = TERM_OPERATORS_NEGATION[operator]
|
||||
negate = True
|
||||
elif operator == '=' and not value:
|
||||
operator = '!='
|
||||
negate = True
|
||||
|
||||
field = self.env[model]._fields[name]
|
||||
|
||||
if field.type == 'many2one':
|
||||
def makeref(value):
|
||||
return value and f'{field.comodel_name},{value}'
|
||||
|
||||
if operator in ('=', '!=', '<=', '<', '>', '>='):
|
||||
value = makeref(value)
|
||||
elif operator in ('in', 'not in'):
|
||||
value = [makeref(v) for v in value]
|
||||
elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike', 'not ilike'):
|
||||
# most probably inefficient... but correct
|
||||
target = self.env[field.comodel_name]
|
||||
target_names = target.name_search(value, operator=operator, limit=None)
|
||||
target_ids = [n[0] for n in target_names]
|
||||
operator, value = 'in', [makeref(v) for v in target_ids]
|
||||
|
||||
elif field.type in ('integer', 'float'):
|
||||
# No record is created in ir.property if the field's type is float or integer with a value
|
||||
# equal to 0. Then to match with the records that are linked to a property field equal to 0,
|
||||
# the negation of the operator must be taken to compute the goods and the domain returned
|
||||
# to match the searched records is just the opposite.
|
||||
value = float(value) if field.type == 'float' else int(value)
|
||||
if operator == '>=' and value <= 0:
|
||||
operator = '<'
|
||||
negate = True
|
||||
elif operator == '>' and value < 0:
|
||||
operator = '<='
|
||||
negate = True
|
||||
elif operator == '<=' and value >= 0:
|
||||
operator = '>'
|
||||
negate = True
|
||||
elif operator == '<' and value > 0:
|
||||
operator = '>='
|
||||
negate = True
|
||||
|
||||
elif field.type == 'boolean':
|
||||
# the value must be mapped to an integer value
|
||||
value = int(value)
|
||||
|
||||
# retrieve the properties that match the condition
|
||||
domain = self._get_domain(name, model)
|
||||
if domain is None:
|
||||
raise Exception()
|
||||
props = self.search(domain + [(TYPE2FIELD[field.type], operator, value)])
|
||||
|
||||
# retrieve the records corresponding to the properties that match
|
||||
good_ids = []
|
||||
for prop in props:
|
||||
if prop.res_id:
|
||||
__, res_id = prop.res_id.split(',')
|
||||
good_ids.append(int(res_id))
|
||||
else:
|
||||
default_matches = True
|
||||
|
||||
if default_matches:
|
||||
# exclude all records with a property that does not match
|
||||
props = self.search(domain + [('res_id', '!=', False)])
|
||||
all_ids = {int(res_id.split(',')[1]) for res_id in props.mapped('res_id')}
|
||||
bad_ids = list(all_ids - set(good_ids))
|
||||
if negate:
|
||||
return [('id', 'in', bad_ids)]
|
||||
else:
|
||||
return [('id', 'not in', bad_ids)]
|
||||
elif negate:
|
||||
return [('id', 'not in', good_ids)]
|
||||
else:
|
||||
return [('id', 'in', good_ids)]
|
||||
2669
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_qweb.py
Normal file
2669
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_qweb.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,811 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
from datetime import time
|
||||
import logging
|
||||
import re
|
||||
from io import BytesIO
|
||||
|
||||
import babel
|
||||
import babel.dates
|
||||
from markupsafe import Markup, escape
|
||||
from PIL import Image
|
||||
from lxml import etree, html
|
||||
|
||||
from odoo import api, fields, models, _, _lt, tools
|
||||
from odoo.tools import posix_to_ldml, float_utils, format_date, format_duration, pycompat
|
||||
from odoo.tools.mail import safe_attrs
|
||||
from odoo.tools.misc import get_lang, babel_locale_parse
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def nl2br(string):
|
||||
""" Converts newlines to HTML linebreaks in ``string``. returns
|
||||
the unicode result
|
||||
|
||||
:param str string:
|
||||
:rtype: unicode
|
||||
"""
|
||||
return pycompat.to_text(string).replace('\n', Markup('<br>\n'))
|
||||
|
||||
#--------------------------------------------------------------------
|
||||
# QWeb Fields converters
|
||||
#--------------------------------------------------------------------
|
||||
|
||||
class FieldConverter(models.AbstractModel):
|
||||
""" Used to convert a t-field specification into an output HTML field.
|
||||
|
||||
:meth:`~.to_html` is the entry point of this conversion from QWeb, it:
|
||||
|
||||
* converts the record value to html using :meth:`~.record_to_html`
|
||||
* generates the metadata attributes (``data-oe-``) to set on the root
|
||||
result node
|
||||
* generates the root result node itself through :meth:`~.render_element`
|
||||
"""
|
||||
_name = 'ir.qweb.field'
|
||||
_description = 'Qweb Field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
"""
|
||||
Get the available option informations.
|
||||
|
||||
Returns a dict of dict with:
|
||||
* key equal to the option key.
|
||||
* dict: type, params, name, description, default_value
|
||||
* type:
|
||||
'string'
|
||||
'integer'
|
||||
'float'
|
||||
'model' (e.g. 'res.partner')
|
||||
'array'
|
||||
'selection' (e.g. [key1, key2...])
|
||||
"""
|
||||
return {}
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values=None):
|
||||
""" attributes(record, field_name, field, options, values)
|
||||
|
||||
Generates the metadata attributes (prefixed by ``data-oe-``) for the
|
||||
root node of the field conversion.
|
||||
|
||||
The default attributes are:
|
||||
|
||||
* ``model``, the name of the record's model
|
||||
* ``id`` the id of the record to which the field belongs
|
||||
* ``type`` the logical field type (widget, may not match the field's
|
||||
``type``, may not be any Field subclass name)
|
||||
* ``translate``, a boolean flag (``0`` or ``1``) denoting whether the
|
||||
field is translatable
|
||||
* ``readonly``, has this attribute if the field is readonly
|
||||
* ``expression``, the original expression
|
||||
|
||||
:returns: dict (attribute name, attribute value).
|
||||
"""
|
||||
data = {}
|
||||
field = record._fields[field_name]
|
||||
|
||||
if not options['inherit_branding'] and not options['translate']:
|
||||
return data
|
||||
|
||||
data['data-oe-model'] = record._name
|
||||
data['data-oe-id'] = record.id
|
||||
data['data-oe-field'] = field.name
|
||||
data['data-oe-type'] = options.get('type')
|
||||
data['data-oe-expression'] = options.get('expression')
|
||||
if field.readonly:
|
||||
data['data-oe-readonly'] = 1
|
||||
return data
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
""" value_to_html(value, field, options=None)
|
||||
|
||||
Converts a single value to its HTML version/output
|
||||
:rtype: unicode
|
||||
"""
|
||||
return escape(pycompat.to_text(value))
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
""" record_to_html(record, field_name, options)
|
||||
|
||||
Converts the specified field of the ``record`` to HTML
|
||||
|
||||
:rtype: unicode
|
||||
"""
|
||||
if not record:
|
||||
return False
|
||||
value = record.with_context(**self.env.context)[field_name]
|
||||
return False if value is False else self.value_to_html(value, options=options)
|
||||
|
||||
@api.model
|
||||
def user_lang(self):
|
||||
""" user_lang()
|
||||
|
||||
Fetches the res.lang record corresponding to the language code stored
|
||||
in the user's context.
|
||||
|
||||
:returns: Model[res.lang]
|
||||
"""
|
||||
return get_lang(self.env)
|
||||
|
||||
|
||||
class IntegerConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.integer'
|
||||
_description = 'Qweb Field Integer'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(IntegerConverter, self).get_available_options()
|
||||
options.update(
|
||||
format_decimalized_number=dict(type='boolean', string=_('Decimalized number')),
|
||||
precision_digits=dict(type='integer', string=_('Precision Digits')),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if options.get('format_decimalized_number'):
|
||||
return tools.format_decimalized_number(value, options.get('precision_digits', 1))
|
||||
return pycompat.to_text(self.user_lang().format('%d', value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}'))
|
||||
|
||||
|
||||
class FloatConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.float'
|
||||
_description = 'Qweb Field Float'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(FloatConverter, self).get_available_options()
|
||||
options.update(
|
||||
precision=dict(type='integer', string=_('Rounding precision')),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if 'decimal_precision' in options:
|
||||
precision = self.env['decimal.precision'].precision_get(options['decimal_precision'])
|
||||
else:
|
||||
precision = options['precision']
|
||||
|
||||
if precision is None:
|
||||
fmt = '%f'
|
||||
else:
|
||||
value = float_utils.float_round(value, precision_digits=precision)
|
||||
fmt = '%.{precision}f'.format(precision=precision)
|
||||
|
||||
formatted = self.user_lang().format(fmt, value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
|
||||
|
||||
# %f does not strip trailing zeroes. %g does but its precision causes
|
||||
# it to switch to scientific notation starting at a million *and* to
|
||||
# strip decimals. So use %f and if no precision was specified manually
|
||||
# strip trailing 0.
|
||||
if precision is None:
|
||||
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
|
||||
|
||||
return pycompat.to_text(formatted)
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
if 'precision' not in options and 'decimal_precision' not in options:
|
||||
_, precision = record._fields[field_name].get_digits(record.env) or (None, None)
|
||||
options = dict(options, precision=precision)
|
||||
return super(FloatConverter, self).record_to_html(record, field_name, options)
|
||||
|
||||
|
||||
class DateConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.date'
|
||||
_description = 'Qweb Field Date'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(DateConverter, self).get_available_options()
|
||||
options.update(
|
||||
format=dict(type='string', string=_('Date format'))
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
return format_date(self.env, value, date_format=options.get('format'))
|
||||
|
||||
|
||||
class DateTimeConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.datetime'
|
||||
_description = 'Qweb Field Datetime'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(DateTimeConverter, self).get_available_options()
|
||||
options.update(
|
||||
format=dict(type='string', string=_('Pattern to format')),
|
||||
tz_name=dict(type='char', string=_('Optional timezone name')),
|
||||
time_only=dict(type='boolean', string=_('Display only the time')),
|
||||
hide_seconds=dict(type='boolean', string=_('Hide seconds')),
|
||||
date_only=dict(type='boolean', string=_('Display only the date')),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if not value:
|
||||
return ''
|
||||
|
||||
lang = self.user_lang()
|
||||
locale = babel_locale_parse(lang.code)
|
||||
format_func = babel.dates.format_datetime
|
||||
if isinstance(value, str):
|
||||
value = fields.Datetime.from_string(value)
|
||||
|
||||
if options.get('tz_name'):
|
||||
self = self.with_context(tz=options['tz_name'])
|
||||
tzinfo = babel.dates.get_timezone(options['tz_name'])
|
||||
else:
|
||||
tzinfo = None
|
||||
|
||||
value = fields.Datetime.context_timestamp(self, value)
|
||||
|
||||
if 'format' in options:
|
||||
pattern = options['format']
|
||||
else:
|
||||
if options.get('time_only'):
|
||||
strftime_pattern = ("%s" % (lang.time_format))
|
||||
elif options.get('date_only'):
|
||||
strftime_pattern = ("%s" % (lang.date_format))
|
||||
else:
|
||||
strftime_pattern = ("%s %s" % (lang.date_format, lang.time_format))
|
||||
|
||||
pattern = posix_to_ldml(strftime_pattern, locale=locale)
|
||||
|
||||
if options.get('hide_seconds'):
|
||||
pattern = pattern.replace(":ss", "").replace(":s", "")
|
||||
|
||||
if options.get('time_only'):
|
||||
format_func = babel.dates.format_time
|
||||
return pycompat.to_text(format_func(value, format=pattern, tzinfo=tzinfo, locale=locale))
|
||||
if options.get('date_only'):
|
||||
format_func = babel.dates.format_date
|
||||
return pycompat.to_text(format_func(value, format=pattern, locale=locale))
|
||||
|
||||
return pycompat.to_text(format_func(value, format=pattern, tzinfo=tzinfo, locale=locale))
|
||||
|
||||
|
||||
class TextConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.text'
|
||||
_description = 'Qweb Field Text'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
"""
|
||||
Escapes the value and converts newlines to br. This is bullshit.
|
||||
"""
|
||||
return nl2br(escape(value)) if value else ''
|
||||
|
||||
|
||||
class SelectionConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.selection'
|
||||
_description = 'Qweb Field Selection'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(SelectionConverter, self).get_available_options()
|
||||
options.update(
|
||||
selection=dict(type='selection', string=_('Selection'), description=_('By default the widget uses the field information'), required=True)
|
||||
)
|
||||
options.update(
|
||||
selection=dict(type='json', string=_('Json'), description=_('By default the widget uses the field information'), required=True)
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if not value:
|
||||
return ''
|
||||
return escape(pycompat.to_text(options['selection'][value]) or '')
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
if 'selection' not in options:
|
||||
options = dict(options, selection=dict(record._fields[field_name].get_description(self.env)['selection']))
|
||||
return super(SelectionConverter, self).record_to_html(record, field_name, options)
|
||||
|
||||
|
||||
class ManyToOneConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.many2one'
|
||||
_description = 'Qweb Field Many to One'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if not value:
|
||||
return False
|
||||
value = value.sudo().display_name
|
||||
if not value:
|
||||
return False
|
||||
return nl2br(escape(value))
|
||||
|
||||
|
||||
class ManyToManyConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.many2many'
|
||||
_description = 'Qweb field many2many'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if not value:
|
||||
return False
|
||||
text = ', '.join(value.sudo().mapped('display_name'))
|
||||
return nl2br(escape(text))
|
||||
|
||||
|
||||
class HTMLConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.html'
|
||||
_description = 'Qweb Field HTML'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
irQweb = self.env['ir.qweb']
|
||||
# wrap value inside a body and parse it as HTML
|
||||
body = etree.fromstring("<body>%s</body>" % value, etree.HTMLParser(encoding='utf-8'))[0]
|
||||
# use pos processing for all nodes with attributes
|
||||
for element in body.iter():
|
||||
if element.attrib:
|
||||
attrib = dict(element.attrib)
|
||||
attrib = irQweb._post_processing_att(element.tag, attrib)
|
||||
element.attrib.clear()
|
||||
element.attrib.update(attrib)
|
||||
return Markup(etree.tostring(body, encoding='unicode', method='html')[6:-7])
|
||||
|
||||
|
||||
class ImageConverter(models.AbstractModel):
|
||||
""" ``image`` widget rendering, inserts a data:uri-using image tag in the
|
||||
document. May be overridden by e.g. the website module to generate links
|
||||
instead.
|
||||
|
||||
.. todo:: what happens if different output need different converters? e.g.
|
||||
reports may need embedded images or FS links whereas website
|
||||
needs website-aware
|
||||
"""
|
||||
_name = 'ir.qweb.field.image'
|
||||
_description = 'Qweb Field Image'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def _get_src_data_b64(self, value, options):
|
||||
try: # FIXME: maaaaaybe it could also take raw bytes?
|
||||
image = Image.open(BytesIO(base64.b64decode(value)))
|
||||
image.verify()
|
||||
except IOError:
|
||||
raise ValueError("Non-image binary fields can not be converted to HTML")
|
||||
except: # image.verify() throws "suitable exceptions", I have no idea what they are
|
||||
raise ValueError("Invalid image content")
|
||||
|
||||
return "data:%s;base64,%s" % (Image.MIME[image.format], value.decode('ascii'))
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
return Markup('<img src="%s">' % self._get_src_data_b64(value, options))
|
||||
|
||||
class ImageUrlConverter(models.AbstractModel):
|
||||
""" ``image_url`` widget rendering, inserts an image tag in the
|
||||
document.
|
||||
"""
|
||||
_name = 'ir.qweb.field.image_url'
|
||||
_description = 'Qweb Field Image'
|
||||
_inherit = 'ir.qweb.field.image'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
return Markup('<img src="%s">' % (value))
|
||||
|
||||
class MonetaryConverter(models.AbstractModel):
|
||||
""" ``monetary`` converter, has a mandatory option
|
||||
``display_currency`` only if field is not of type Monetary.
|
||||
Otherwise, if we are in presence of a monetary field, the field definition must
|
||||
have a currency_field attribute set.
|
||||
|
||||
The currency is used for formatting *and rounding* of the float value. It
|
||||
is assumed that the linked res_currency has a non-empty rounding value and
|
||||
res.currency's ``round`` method is used to perform rounding.
|
||||
|
||||
.. note:: the monetary converter internally adds the qweb context to its
|
||||
options mapping, so that the context is available to callees.
|
||||
It's set under the ``_values`` key.
|
||||
"""
|
||||
_name = 'ir.qweb.field.monetary'
|
||||
_description = 'Qweb Field Monetary'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(MonetaryConverter, self).get_available_options()
|
||||
options.update(
|
||||
from_currency=dict(type='model', params='res.currency', string=_('Original currency')),
|
||||
display_currency=dict(type='model', params='res.currency', string=_('Display currency'), required="value_to_html"),
|
||||
date=dict(type='date', string=_('Date'), description=_('Date used for the original currency (only used for t-esc). by default use the current date.')),
|
||||
company_id=dict(type='model', params='res.company', string=_('Company'), description=_('Company used for the original currency (only used for t-esc). By default use the user company')),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
display_currency = options['display_currency']
|
||||
|
||||
if not isinstance(value, (int, float)):
|
||||
raise ValueError(_("The value send to monetary field is not a number."))
|
||||
|
||||
# lang.format mandates a sprintf-style format. These formats are non-
|
||||
# minimal (they have a default fixed precision instead), and
|
||||
# lang.format will not set one by default. currency.round will not
|
||||
# provide one either. So we need to generate a precision value
|
||||
# (integer > 0) from the currency's rounding (a float generally < 1.0).
|
||||
fmt = "%.{0}f".format(display_currency.decimal_places)
|
||||
|
||||
if options.get('from_currency'):
|
||||
date = options.get('date') or fields.Date.today()
|
||||
company_id = options.get('company_id')
|
||||
if company_id:
|
||||
company = self.env['res.company'].browse(company_id)
|
||||
else:
|
||||
company = self.env.company
|
||||
value = options['from_currency']._convert(value, display_currency, company, date)
|
||||
|
||||
lang = self.user_lang()
|
||||
formatted_amount = lang.format(fmt, display_currency.round(value),
|
||||
grouping=True, monetary=True).replace(r' ', '\N{NO-BREAK SPACE}').replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
|
||||
|
||||
pre = post = ''
|
||||
if display_currency.position == 'before':
|
||||
pre = '{symbol}\N{NO-BREAK SPACE}'.format(symbol=display_currency.symbol or '')
|
||||
else:
|
||||
post = '\N{NO-BREAK SPACE}{symbol}'.format(symbol=display_currency.symbol or '')
|
||||
|
||||
if options.get('label_price') and lang.decimal_point in formatted_amount:
|
||||
sep = lang.decimal_point
|
||||
integer_part, decimal_part = formatted_amount.split(sep)
|
||||
integer_part += sep
|
||||
return Markup('{pre}<span class="oe_currency_value">{0}</span><span class="oe_currency_value" style="font-size:0.5em">{1}</span>{post}').format(integer_part, decimal_part, pre=pre, post=post)
|
||||
|
||||
return Markup('{pre}<span class="oe_currency_value">{0}</span>{post}').format(formatted_amount, pre=pre, post=post)
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
options = dict(options)
|
||||
#currency should be specified by monetary field
|
||||
field = record._fields[field_name]
|
||||
|
||||
if not options.get('display_currency') and field.type == 'monetary' and field.get_currency_field(record):
|
||||
options['display_currency'] = record[field.get_currency_field(record)]
|
||||
if not options.get('display_currency'):
|
||||
# search on the model if they are a res.currency field to set as default
|
||||
fields = record._fields.items()
|
||||
currency_fields = [k for k, v in fields if v.type == 'many2one' and v.comodel_name == 'res.currency']
|
||||
if currency_fields:
|
||||
options['display_currency'] = record[currency_fields[0]]
|
||||
if 'date' not in options:
|
||||
options['date'] = record._context.get('date')
|
||||
if 'company_id' not in options:
|
||||
options['company_id'] = record._context.get('company_id')
|
||||
|
||||
return super(MonetaryConverter, self).record_to_html(record, field_name, options)
|
||||
|
||||
|
||||
TIMEDELTA_UNITS = (
|
||||
('year', _lt('year'), 3600 * 24 * 365),
|
||||
('month', _lt('month'), 3600 * 24 * 30),
|
||||
('week', _lt('week'), 3600 * 24 * 7),
|
||||
('day', _lt('day'), 3600 * 24),
|
||||
('hour', _lt('hour'), 3600),
|
||||
('minute', _lt('minute'), 60),
|
||||
('second', _lt('second'), 1)
|
||||
)
|
||||
|
||||
|
||||
class FloatTimeConverter(models.AbstractModel):
|
||||
""" ``float_time`` converter, to display integral or fractional values as
|
||||
human-readable time spans (e.g. 1.5 as "01:30").
|
||||
|
||||
Can be used on any numerical field.
|
||||
"""
|
||||
_name = 'ir.qweb.field.float_time'
|
||||
_description = 'Qweb Field Float Time'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
return format_duration(value)
|
||||
|
||||
|
||||
class TimeConverter(models.AbstractModel):
|
||||
""" ``time`` converter, to display integer or fractional value as
|
||||
human-readable time (e.g. 1.5 as "1:30 AM"). The unit of this value
|
||||
is in hours.
|
||||
|
||||
Can be used on any numerical field between: 0 <= value < 24
|
||||
"""
|
||||
_name = 'ir.qweb.field.time'
|
||||
_description = 'QWeb Field Time'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if value < 0:
|
||||
raise ValueError(_("The value (%s) passed should be positive", value))
|
||||
hours, minutes = divmod(int(abs(value) * 60), 60)
|
||||
if hours > 23:
|
||||
raise ValueError(_("The hour must be between 0 and 23"))
|
||||
t = time(hour=hours, minute=minutes)
|
||||
|
||||
locale = babel_locale_parse(self.user_lang().code)
|
||||
pattern = options.get('format', 'short')
|
||||
|
||||
return babel.dates.format_time(t, format=pattern, tzinfo=None, locale=locale)
|
||||
|
||||
|
||||
class DurationConverter(models.AbstractModel):
|
||||
""" ``duration`` converter, to display integral or fractional values as
|
||||
human-readable time spans (e.g. 1.5 as "1 hour 30 minutes").
|
||||
|
||||
Can be used on any numerical field.
|
||||
|
||||
Has an option ``unit`` which can be one of ``second``, ``minute``,
|
||||
``hour``, ``day``, ``week`` or ``year``, used to interpret the numerical
|
||||
field value before converting it. By default use ``second``.
|
||||
|
||||
Has an option ``round``. By default use ``second``.
|
||||
|
||||
Has an option ``digital`` to display 01:00 instead of 1 hour
|
||||
|
||||
Sub-second values will be ignored.
|
||||
"""
|
||||
_name = 'ir.qweb.field.duration'
|
||||
_description = 'Qweb Field Duration'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(DurationConverter, self).get_available_options()
|
||||
unit = [(value, str(label)) for value, label, ratio in TIMEDELTA_UNITS]
|
||||
options.update(
|
||||
digital=dict(type="boolean", string=_('Digital formatting')),
|
||||
unit=dict(type="selection", params=unit, string=_('Date unit'), description=_('Date unit used for comparison and formatting'), default_value='second', required=True),
|
||||
round=dict(type="selection", params=unit, string=_('Rounding unit'), description=_("Date unit used for the rounding. The value must be smaller than 'hour' if you use the digital formatting."), default_value='second'),
|
||||
format=dict(
|
||||
type="selection",
|
||||
params=[
|
||||
('long', _('Long')),
|
||||
('short', _('Short')),
|
||||
('narrow', _('Narrow'))],
|
||||
string=_('Format'),
|
||||
description=_("Formatting: long, short, narrow (not used for digital)"),
|
||||
default_value='long'
|
||||
),
|
||||
add_direction=dict(
|
||||
type="boolean",
|
||||
string=_("Add direction"),
|
||||
description=_("Add directional information (not used for digital)")
|
||||
),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
units = {unit: duration for unit, label, duration in TIMEDELTA_UNITS}
|
||||
|
||||
locale = babel_locale_parse(self.user_lang().code)
|
||||
factor = units[options.get('unit', 'second')]
|
||||
round_to = units[options.get('round', 'second')]
|
||||
|
||||
if options.get('digital') and round_to > 3600:
|
||||
round_to = 3600
|
||||
|
||||
r = round((value * factor) / round_to) * round_to
|
||||
|
||||
sections = []
|
||||
sign = ''
|
||||
if value < 0:
|
||||
r = -r
|
||||
sign = '-'
|
||||
|
||||
if options.get('digital'):
|
||||
for unit, label, secs_per_unit in TIMEDELTA_UNITS:
|
||||
if secs_per_unit > 3600:
|
||||
continue
|
||||
v, r = divmod(r, secs_per_unit)
|
||||
if not v and (secs_per_unit > factor or secs_per_unit < round_to):
|
||||
continue
|
||||
sections.append(u"%02.0f" % int(round(v)))
|
||||
return sign + u':'.join(sections)
|
||||
|
||||
for unit, label, secs_per_unit in TIMEDELTA_UNITS:
|
||||
v, r = divmod(r, secs_per_unit)
|
||||
if not v:
|
||||
continue
|
||||
section = babel.dates.format_timedelta(
|
||||
v*secs_per_unit,
|
||||
granularity=round_to,
|
||||
add_direction=options.get('add_direction'),
|
||||
format=options.get('format', 'long'),
|
||||
threshold=1,
|
||||
locale=locale)
|
||||
if section:
|
||||
sections.append(section)
|
||||
|
||||
if sign:
|
||||
sections.insert(0, sign)
|
||||
return u' '.join(sections)
|
||||
|
||||
|
||||
class RelativeDatetimeConverter(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.relative'
|
||||
_description = 'Qweb Field Relative'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(RelativeDatetimeConverter, self).get_available_options()
|
||||
options.update(
|
||||
now=dict(type='datetime', string=_('Reference date'), description=_('Date to compare with the field value, by default use the current date.'))
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
locale = babel_locale_parse(self.user_lang().code)
|
||||
|
||||
if isinstance(value, str):
|
||||
value = fields.Datetime.from_string(value)
|
||||
|
||||
# value should be a naive datetime in UTC. So is fields.Datetime.now()
|
||||
reference = fields.Datetime.from_string(options['now'])
|
||||
|
||||
return pycompat.to_text(babel.dates.format_timedelta(value - reference, add_direction=True, locale=locale))
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
if 'now' not in options:
|
||||
options = dict(options, now=record._fields[field_name].now())
|
||||
return super(RelativeDatetimeConverter, self).record_to_html(record, field_name, options)
|
||||
|
||||
|
||||
class BarcodeConverter(models.AbstractModel):
|
||||
""" ``barcode`` widget rendering, inserts a data:uri-using image tag in the
|
||||
document. May be overridden by e.g. the website module to generate links
|
||||
instead.
|
||||
"""
|
||||
_name = 'ir.qweb.field.barcode'
|
||||
_description = 'Qweb Field Barcode'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(BarcodeConverter, self).get_available_options()
|
||||
options.update(
|
||||
symbology=dict(type='string', string=_('Barcode symbology'), description=_('Barcode type, eg: UPCA, EAN13, Code128'), default_value='Code128'),
|
||||
width=dict(type='integer', string=_('Width'), default_value=600),
|
||||
height=dict(type='integer', string=_('Height'), default_value=100),
|
||||
humanreadable=dict(type='integer', string=_('Human Readable'), default_value=0),
|
||||
quiet=dict(type='integer', string='Quiet', default_value=1),
|
||||
mask=dict(type='string', string='Mask', default_value='')
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options=None):
|
||||
if not value:
|
||||
return ''
|
||||
barcode_symbology = options.get('symbology', 'Code128')
|
||||
barcode = self.env['ir.actions.report'].barcode(
|
||||
barcode_symbology,
|
||||
value,
|
||||
**{key: value for key, value in options.items() if key in ['width', 'height', 'humanreadable', 'quiet', 'mask']})
|
||||
|
||||
img_element = html.Element('img')
|
||||
for k, v in options.items():
|
||||
if k.startswith('img_') and k[4:] in safe_attrs:
|
||||
img_element.set(k[4:], v)
|
||||
if not img_element.get('alt'):
|
||||
img_element.set('alt', _('Barcode %s') % value)
|
||||
img_element.set('src', 'data:image/png;base64,%s' % base64.b64encode(barcode).decode())
|
||||
return Markup(html.tostring(img_element, encoding='unicode'))
|
||||
|
||||
|
||||
class Contact(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.contact'
|
||||
_description = 'Qweb Field Contact'
|
||||
_inherit = 'ir.qweb.field.many2one'
|
||||
|
||||
@api.model
|
||||
def get_available_options(self):
|
||||
options = super(Contact, self).get_available_options()
|
||||
contact_fields = [
|
||||
{'field_name': 'name', 'label': _('Name'), 'default': True},
|
||||
{'field_name': 'address', 'label': _('Address'), 'default': True},
|
||||
{'field_name': 'phone', 'label': _('Phone'), 'default': True},
|
||||
{'field_name': 'mobile', 'label': _('Mobile'), 'default': True},
|
||||
{'field_name': 'email', 'label': _('Email'), 'default': True},
|
||||
{'field_name': 'vat', 'label': _('VAT')},
|
||||
]
|
||||
separator_params = dict(
|
||||
type='selection',
|
||||
selection=[[" ", _("Space")], [",", _("Comma")], ["-", _("Dash")], ["|", _("Vertical bar")], ["/", _("Slash")]],
|
||||
placeholder=_('Linebreak'),
|
||||
)
|
||||
options.update(
|
||||
fields=dict(type='array', params=dict(type='selection', params=contact_fields), string=_('Displayed fields'), description=_('List of contact fields to display in the widget'), default_value=[param.get('field_name') for param in contact_fields if param.get('default')]),
|
||||
separator=dict(type='selection', params=separator_params, string=_('Address separator'), description=_('Separator use to split the address from the display_name.'), default_value=False),
|
||||
no_marker=dict(type='boolean', string=_('Hide badges'), description=_("Don't display the font awesome marker")),
|
||||
no_tag_br=dict(type='boolean', string=_('Use comma'), description=_("Use comma instead of the <br> tag to display the address")),
|
||||
phone_icons=dict(type='boolean', string=_('Display phone icons'), description=_("Display the phone icons even if no_marker is True")),
|
||||
country_image=dict(type='boolean', string=_('Display country image'), description=_("Display the country image if the field is present on the record")),
|
||||
)
|
||||
return options
|
||||
|
||||
@api.model
|
||||
def value_to_html(self, value, options):
|
||||
if not value:
|
||||
return ''
|
||||
|
||||
opf = options.get('fields') or ["name", "address", "phone", "mobile", "email"]
|
||||
sep = options.get('separator')
|
||||
if sep:
|
||||
opsep = escape(sep)
|
||||
elif options.get('no_tag_br'):
|
||||
# escaped joiners will auto-escape joined params
|
||||
opsep = escape(', ')
|
||||
else:
|
||||
opsep = Markup('<br/>')
|
||||
|
||||
value = value.sudo().with_context(show_address=True)
|
||||
name_get = value.name_get()[0][1]
|
||||
# Avoid having something like:
|
||||
# name_get = 'Foo\n \n' -> This is a res.partner with a name and no address
|
||||
# That would return markup('<br/>') as address. But there is no address set.
|
||||
if any(elem.strip() for elem in name_get.split("\n")[1:]):
|
||||
address = opsep.join(name_get.split("\n")[1:]).strip()
|
||||
else:
|
||||
address = ''
|
||||
val = {
|
||||
'name': name_get.split("\n")[0],
|
||||
'address': address,
|
||||
'phone': value.phone,
|
||||
'mobile': value.mobile,
|
||||
'city': value.city,
|
||||
'country_id': value.country_id.display_name,
|
||||
'website': value.website,
|
||||
'email': value.email,
|
||||
'vat': value.vat,
|
||||
'vat_label': value.country_id.vat_label or _('VAT'),
|
||||
'fields': opf,
|
||||
'object': value,
|
||||
'options': options
|
||||
}
|
||||
return self.env['ir.qweb']._render('base.contact', val, minimal_qcontext=True)
|
||||
|
||||
|
||||
class QwebView(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.qweb'
|
||||
_description = 'Qweb Field qweb'
|
||||
_inherit = 'ir.qweb.field.many2one'
|
||||
|
||||
@api.model
|
||||
def record_to_html(self, record, field_name, options):
|
||||
view = record[field_name]
|
||||
if not view:
|
||||
return ''
|
||||
|
||||
if view._name != "ir.ui.view":
|
||||
_logger.warning("%s.%s must be a 'ir.ui.view', got %r.", record, field_name, view._name)
|
||||
return ''
|
||||
|
||||
return self.env['ir.qweb']._render(view.id, options.get('values', {}))
|
||||
264
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_rule.py
Normal file
264
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_rule.py
Normal file
|
|
@ -0,0 +1,264 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from odoo import api, fields, models, tools, SUPERUSER_ID, _
|
||||
from odoo.exceptions import AccessError, ValidationError
|
||||
from odoo.osv import expression
|
||||
from odoo.tools import config
|
||||
from odoo.tools.safe_eval import safe_eval, time
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
class IrRule(models.Model):
|
||||
_name = 'ir.rule'
|
||||
_description = 'Record Rule'
|
||||
_order = 'model_id DESC,id'
|
||||
_MODES = ('read', 'write', 'create', 'unlink')
|
||||
_allow_sudo_commands = False
|
||||
|
||||
name = fields.Char(index=True)
|
||||
active = fields.Boolean(default=True, help="If you uncheck the active field, it will disable the record rule without deleting it (if you delete a native record rule, it may be re-created when you reload the module).")
|
||||
model_id = fields.Many2one('ir.model', string='Model', index=True, required=True, ondelete="cascade")
|
||||
groups = fields.Many2many('res.groups', 'rule_group_rel', 'rule_group_id', 'group_id', ondelete='restrict')
|
||||
domain_force = fields.Text(string='Domain')
|
||||
perm_read = fields.Boolean(string='Apply for Read', default=True)
|
||||
perm_write = fields.Boolean(string='Apply for Write', default=True)
|
||||
perm_create = fields.Boolean(string='Apply for Create', default=True)
|
||||
perm_unlink = fields.Boolean(string='Apply for Delete', default=True)
|
||||
|
||||
_sql_constraints = [
|
||||
('no_access_rights',
|
||||
'CHECK (perm_read!=False or perm_write!=False or perm_create!=False or perm_unlink!=False)',
|
||||
'Rule must have at least one checked access right !'),
|
||||
]
|
||||
|
||||
@api.model
|
||||
def _eval_context(self):
|
||||
"""Returns a dictionary to use as evaluation context for
|
||||
ir.rule domains.
|
||||
Note: company_ids contains the ids of the activated companies
|
||||
by the user with the switch company menu. These companies are
|
||||
filtered and trusted.
|
||||
"""
|
||||
# use an empty context for 'user' to make the domain evaluation
|
||||
# independent from the context
|
||||
return {
|
||||
'user': self.env.user.with_context({}),
|
||||
'time': time,
|
||||
'company_ids': self.env.companies.ids,
|
||||
'company_id': self.env.company.id,
|
||||
}
|
||||
|
||||
@api.depends('groups')
|
||||
def _compute_global(self):
|
||||
for rule in self:
|
||||
rule['global'] = not rule.groups
|
||||
|
||||
@api.constrains('model_id')
|
||||
def _check_model_name(self):
|
||||
# Don't allow rules on rules records (this model).
|
||||
if any(rule.model_id.model == self._name for rule in self):
|
||||
raise ValidationError(_('Rules can not be applied on the Record Rules model.'))
|
||||
|
||||
@api.constrains('active', 'domain_force', 'model_id')
|
||||
def _check_domain(self):
|
||||
eval_context = self._eval_context()
|
||||
for rule in self:
|
||||
model = rule.model_id.model
|
||||
if rule.active and rule.domain_force and model in self.env:
|
||||
try:
|
||||
domain = safe_eval(rule.domain_force, eval_context)
|
||||
expression.expression(domain, self.env[model].sudo())
|
||||
except Exception as e:
|
||||
raise ValidationError(_('Invalid domain: %s', e))
|
||||
|
||||
def _compute_domain_keys(self):
|
||||
""" Return the list of context keys to use for caching ``_compute_domain``. """
|
||||
return ['allowed_company_ids']
|
||||
|
||||
def _get_failing(self, for_records, mode='read'):
|
||||
""" Returns the rules for the mode for the current user which fail on
|
||||
the specified records.
|
||||
|
||||
Can return any global rule and/or all local rules (since local rules
|
||||
are OR-ed together, the entire group succeeds or fails, while global
|
||||
rules get AND-ed and can each fail)
|
||||
"""
|
||||
Model = for_records.browse(()).sudo()
|
||||
eval_context = self._eval_context()
|
||||
|
||||
all_rules = self._get_rules(Model._name, mode=mode).sudo()
|
||||
|
||||
# first check if the group rules fail for any record (aka if
|
||||
# searching on (records, group_rules) filters out some of the records)
|
||||
group_rules = all_rules.filtered(lambda r: r.groups and r.groups & self.env.user.groups_id)
|
||||
group_domains = expression.OR([
|
||||
safe_eval(r.domain_force, eval_context) if r.domain_force else []
|
||||
for r in group_rules
|
||||
])
|
||||
# if all records get returned, the group rules are not failing
|
||||
if Model.search_count(expression.AND([[('id', 'in', for_records.ids)], group_domains])) == len(for_records):
|
||||
group_rules = self.browse(())
|
||||
|
||||
# failing rules are previously selected group rules or any failing global rule
|
||||
def is_failing(r, ids=for_records.ids):
|
||||
dom = safe_eval(r.domain_force, eval_context) if r.domain_force else []
|
||||
return Model.search_count(expression.AND([
|
||||
[('id', 'in', ids)],
|
||||
expression.normalize_domain(dom)
|
||||
])) < len(ids)
|
||||
|
||||
return all_rules.filtered(lambda r: r in group_rules or (not r.groups and is_failing(r))).with_user(self.env.user)
|
||||
|
||||
def _get_rules(self, model_name, mode='read'):
|
||||
""" Returns all the rules matching the model for the mode for the
|
||||
current user.
|
||||
"""
|
||||
if mode not in self._MODES:
|
||||
raise ValueError('Invalid mode: %r' % (mode,))
|
||||
|
||||
if self.env.su:
|
||||
return self.browse(())
|
||||
|
||||
query = """ SELECT r.id FROM ir_rule r JOIN ir_model m ON (r.model_id=m.id)
|
||||
WHERE m.model=%s AND r.active AND r.perm_{mode}
|
||||
AND (r.id IN (SELECT rule_group_id FROM rule_group_rel rg
|
||||
JOIN res_groups_users_rel gu ON (rg.group_id=gu.gid)
|
||||
WHERE gu.uid=%s)
|
||||
OR r.global)
|
||||
ORDER BY r.id
|
||||
""".format(mode=mode)
|
||||
self._cr.execute(query, (model_name, self._uid))
|
||||
return self.browse(row[0] for row in self._cr.fetchall())
|
||||
|
||||
@api.model
|
||||
@tools.conditional(
|
||||
'xml' not in config['dev_mode'],
|
||||
tools.ormcache('self.env.uid', 'self.env.su', 'model_name', 'mode',
|
||||
'tuple(self._compute_domain_context_values())'),
|
||||
)
|
||||
def _compute_domain(self, model_name, mode="read"):
|
||||
rules = self._get_rules(model_name, mode=mode)
|
||||
if not rules:
|
||||
return
|
||||
|
||||
# browse user and rules as SUPERUSER_ID to avoid access errors!
|
||||
eval_context = self._eval_context()
|
||||
user_groups = self.env.user.groups_id
|
||||
global_domains = [] # list of domains
|
||||
group_domains = [] # list of domains
|
||||
for rule in rules.sudo():
|
||||
# evaluate the domain for the current user
|
||||
dom = safe_eval(rule.domain_force, eval_context) if rule.domain_force else []
|
||||
dom = expression.normalize_domain(dom)
|
||||
if not rule.groups:
|
||||
global_domains.append(dom)
|
||||
elif rule.groups & user_groups:
|
||||
group_domains.append(dom)
|
||||
|
||||
# combine global domains and group domains
|
||||
if not group_domains:
|
||||
return expression.AND(global_domains)
|
||||
return expression.AND(global_domains + [expression.OR(group_domains)])
|
||||
|
||||
def _compute_domain_context_values(self):
|
||||
for k in self._compute_domain_keys():
|
||||
v = self._context.get(k)
|
||||
if isinstance(v, list):
|
||||
# currently this could be a frozenset (to avoid depending on
|
||||
# the order of allowed_company_ids) but it seems safer if
|
||||
# possibly slightly more miss-y to use a tuple
|
||||
v = tuple(v)
|
||||
yield v
|
||||
|
||||
@api.model
|
||||
def clear_cache(self):
|
||||
warnings.warn("Deprecated IrRule.clear_cache(), use IrRule.clear_caches() instead", DeprecationWarning)
|
||||
self.clear_caches()
|
||||
|
||||
def unlink(self):
|
||||
res = super(IrRule, self).unlink()
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(IrRule, self).create(vals_list)
|
||||
# DLE P33: tests
|
||||
self.env.flush_all()
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
res = super(IrRule, self).write(vals)
|
||||
# DLE P33: tests
|
||||
# - odoo/addons/test_access_rights/tests/test_feedback.py
|
||||
# - odoo/addons/test_access_rights/tests/test_ir_rules.py
|
||||
# - odoo/addons/base/tests/test_orm.py (/home/dle/src/odoo/master-nochange-fp/odoo/addons/base/tests/test_orm.py)
|
||||
self.env.flush_all()
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
def _make_access_error(self, operation, records):
|
||||
_logger.info('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, records.ids[:6], self._uid, records._name)
|
||||
self = self.with_context(self.env.user.context_get())
|
||||
|
||||
model = records._name
|
||||
description = self.env['ir.model']._get(model).name or model
|
||||
msg_heads = {
|
||||
# Messages are declared in extenso so they are properly exported in translation terms
|
||||
'read': _("Due to security restrictions, you are not allowed to access '%(document_kind)s' (%(document_model)s) records.", document_kind=description, document_model=model),
|
||||
'write': _("Due to security restrictions, you are not allowed to modify '%(document_kind)s' (%(document_model)s) records.", document_kind=description, document_model=model),
|
||||
'create': _("Due to security restrictions, you are not allowed to create '%(document_kind)s' (%(document_model)s) records.", document_kind=description, document_model=model),
|
||||
'unlink': _("Due to security restrictions, you are not allowed to delete '%(document_kind)s' (%(document_model)s) records.", document_kind=description, document_model=model)
|
||||
}
|
||||
operation_error = msg_heads[operation]
|
||||
resolution_info = _("Contact your administrator to request access if necessary.")
|
||||
|
||||
if not self.user_has_groups('base.group_no_one') or not self.env.user.has_group('base.group_user'):
|
||||
return AccessError(f"{operation_error}\n\n{resolution_info}")
|
||||
|
||||
# This extended AccessError is only displayed in debug mode.
|
||||
# Note that by default, public and portal users do not have
|
||||
# the group "base.group_no_one", even if debug mode is enabled,
|
||||
# so it is relatively safe here to include the list of rules and record names.
|
||||
rules = self._get_failing(records, mode=operation).sudo()
|
||||
|
||||
records_sudo = records[:6].sudo()
|
||||
company_related = any('company_id' in (r.domain_force or '') for r in rules)
|
||||
|
||||
def get_record_description(rec):
|
||||
# If the user has access to the company of the record, add this
|
||||
# information in the description to help them to change company
|
||||
if company_related and 'company_id' in rec and rec.company_id in self.env.user.company_ids:
|
||||
return f'{rec.display_name} (id={rec.id}, company={rec.company_id.display_name})'
|
||||
return f'{rec.display_name} (id={rec.id})'
|
||||
|
||||
records_description = ', '.join(get_record_description(rec) for rec in records_sudo)
|
||||
failing_records = _("Records: %s", records_description)
|
||||
|
||||
user_description = f'{self.env.user.name} (id={self.env.user.id})'
|
||||
failing_user = _("User: %s", user_description)
|
||||
|
||||
rules_description = '\n'.join(f'- {rule.name}' for rule in rules)
|
||||
failing_rules = _("This restriction is due to the following rules:\n%s", rules_description)
|
||||
if company_related:
|
||||
failing_rules += "\n\n" + _('Note: this might be a multi-company issue.')
|
||||
|
||||
# clean up the cache of records prefetched with display_name above
|
||||
records_sudo.invalidate_recordset()
|
||||
|
||||
msg = f"{operation_error}\n\n{failing_records}\n{failing_user}\n\n{failing_rules}\n\n{resolution_info}"
|
||||
return AccessError(msg)
|
||||
|
||||
|
||||
#
|
||||
# Hack for field 'global': this field cannot be defined like others, because
|
||||
# 'global' is a Python keyword. Therefore, we add it to the class by assignment.
|
||||
# Note that the attribute '_module' is normally added by the class' metaclass.
|
||||
#
|
||||
global_ = fields.Boolean(compute='_compute_global', store=True,
|
||||
help="If no group is specified the rule is global and applied to everyone")
|
||||
setattr(IrRule, 'global', global_)
|
||||
global_.__set_name__(IrRule, 'global')
|
||||
|
|
@ -0,0 +1,400 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import pytz
|
||||
from psycopg2 import sql
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _create_sequence(cr, seq_name, number_increment, number_next):
|
||||
""" Create a PostreSQL sequence. """
|
||||
if number_increment == 0:
|
||||
raise UserError(_('Step must not be zero.'))
|
||||
sql = "CREATE SEQUENCE %s INCREMENT BY %%s START WITH %%s" % seq_name
|
||||
cr.execute(sql, (number_increment, number_next))
|
||||
|
||||
|
||||
def _drop_sequences(cr, seq_names):
|
||||
""" Drop the PostreSQL sequences if they exist. """
|
||||
names = sql.SQL(',').join(map(sql.Identifier, seq_names))
|
||||
# RESTRICT is the default; it prevents dropping the sequence if an
|
||||
# object depends on it.
|
||||
cr.execute(sql.SQL("DROP SEQUENCE IF EXISTS {} RESTRICT").format(names))
|
||||
|
||||
|
||||
def _alter_sequence(cr, seq_name, number_increment=None, number_next=None):
|
||||
""" Alter a PostreSQL sequence. """
|
||||
if number_increment == 0:
|
||||
raise UserError(_("Step must not be zero."))
|
||||
cr.execute("SELECT relname FROM pg_class WHERE relkind=%s AND relname=%s", ('S', seq_name))
|
||||
if not cr.fetchone():
|
||||
# sequence is not created yet, we're inside create() so ignore it, will be set later
|
||||
return
|
||||
statement = sql.SQL("ALTER SEQUENCE") + sql.Identifier(seq_name)
|
||||
params = []
|
||||
if number_increment is not None:
|
||||
statement += sql.SQL("INCREMENT BY") + sql.Placeholder()
|
||||
params.append(number_increment)
|
||||
if number_next is not None:
|
||||
statement += sql.SQL("RESTART WITH") + sql.Placeholder()
|
||||
params.append(number_next)
|
||||
cr.execute(statement.join(' '), params)
|
||||
|
||||
|
||||
def _select_nextval(cr, seq_name):
|
||||
cr.execute("SELECT nextval(%s)", [seq_name])
|
||||
return cr.fetchone()
|
||||
|
||||
|
||||
def _update_nogap(self, number_increment):
|
||||
self.flush_recordset(['number_next'])
|
||||
number_next = self.number_next
|
||||
self._cr.execute("SELECT number_next FROM %s WHERE id=%%s FOR UPDATE NOWAIT" % self._table, [self.id])
|
||||
self._cr.execute("UPDATE %s SET number_next=number_next+%%s WHERE id=%%s " % self._table, (number_increment, self.id))
|
||||
self.invalidate_recordset(['number_next'])
|
||||
return number_next
|
||||
|
||||
def _predict_nextval(self, seq_id):
|
||||
"""Predict next value for PostgreSQL sequence without consuming it"""
|
||||
# Cannot use currval() as it requires prior call to nextval()
|
||||
seqname = 'ir_sequence_%s' % seq_id
|
||||
seqtable = sql.Identifier(seqname)
|
||||
query = sql.SQL("""SELECT last_value,
|
||||
(SELECT increment_by
|
||||
FROM pg_sequences
|
||||
WHERE sequencename = %s),
|
||||
is_called
|
||||
FROM {}""")
|
||||
params = [seqname]
|
||||
if self.env.cr._cnx.server_version < 100000:
|
||||
query = sql.SQL("SELECT last_value, increment_by, is_called FROM {}")
|
||||
params = []
|
||||
self.env.cr.execute(query.format(seqtable), params)
|
||||
(last_value, increment_by, is_called) = self.env.cr.fetchone()
|
||||
if is_called:
|
||||
return last_value + increment_by
|
||||
# sequence has just been RESTARTed to return last_value next time
|
||||
return last_value
|
||||
|
||||
|
||||
class IrSequence(models.Model):
|
||||
""" Sequence model.
|
||||
|
||||
The sequence model allows to define and use so-called sequence objects.
|
||||
Such objects are used to generate unique identifiers in a transaction-safe
|
||||
way.
|
||||
|
||||
"""
|
||||
_name = 'ir.sequence'
|
||||
_description = 'Sequence'
|
||||
_order = 'name'
|
||||
_allow_sudo_commands = False
|
||||
|
||||
def _get_number_next_actual(self):
|
||||
'''Return number from ir_sequence row when no_gap implementation,
|
||||
and number from postgres sequence when standard implementation.'''
|
||||
for seq in self:
|
||||
if not seq.id:
|
||||
seq.number_next_actual = 0
|
||||
elif seq.implementation != 'standard':
|
||||
seq.number_next_actual = seq.number_next
|
||||
else:
|
||||
seq_id = "%03d" % seq.id
|
||||
seq.number_next_actual = _predict_nextval(self, seq_id)
|
||||
|
||||
def _set_number_next_actual(self):
|
||||
for seq in self:
|
||||
seq.write({'number_next': seq.number_next_actual or 1})
|
||||
|
||||
@api.model
|
||||
def _get_current_sequence(self, sequence_date=None):
|
||||
'''Returns the object on which we can find the number_next to consider for the sequence.
|
||||
It could be an ir.sequence or an ir.sequence.date_range depending if use_date_range is checked
|
||||
or not. This function will also create the ir.sequence.date_range if none exists yet for today
|
||||
'''
|
||||
if not self.use_date_range:
|
||||
return self
|
||||
sequence_date = sequence_date or fields.Date.today()
|
||||
seq_date = self.env['ir.sequence.date_range'].search(
|
||||
[('sequence_id', '=', self.id), ('date_from', '<=', sequence_date), ('date_to', '>=', sequence_date)], limit=1)
|
||||
if seq_date:
|
||||
return seq_date[0]
|
||||
#no date_range sequence was found, we create a new one
|
||||
return self._create_date_range_seq(sequence_date)
|
||||
|
||||
name = fields.Char(required=True)
|
||||
code = fields.Char(string='Sequence Code')
|
||||
implementation = fields.Selection([('standard', 'Standard'), ('no_gap', 'No gap')],
|
||||
string='Implementation', required=True, default='standard',
|
||||
help="While assigning a sequence number to a record, the 'no gap' sequence implementation ensures that each previous sequence number has been assigned already. "
|
||||
"While this sequence implementation will not skip any sequence number upon assignment, there can still be gaps in the sequence if records are deleted. "
|
||||
"The 'no gap' implementation is slower than the standard one.")
|
||||
active = fields.Boolean(default=True)
|
||||
prefix = fields.Char(help="Prefix value of the record for the sequence", trim=False)
|
||||
suffix = fields.Char(help="Suffix value of the record for the sequence", trim=False)
|
||||
number_next = fields.Integer(string='Next Number', required=True, default=1, help="Next number of this sequence")
|
||||
number_next_actual = fields.Integer(compute='_get_number_next_actual', inverse='_set_number_next_actual',
|
||||
string='Actual Next Number',
|
||||
help="Next number that will be used. This number can be incremented "
|
||||
"frequently so the displayed value might already be obsolete")
|
||||
number_increment = fields.Integer(string='Step', required=True, default=1,
|
||||
help="The next number of the sequence will be incremented by this number")
|
||||
padding = fields.Integer(string='Sequence Size', required=True, default=0,
|
||||
help="Odoo will automatically adds some '0' on the left of the "
|
||||
"'Next Number' to get the required padding size.")
|
||||
company_id = fields.Many2one('res.company', string='Company',
|
||||
default=lambda s: s.env.company)
|
||||
use_date_range = fields.Boolean(string='Use subsequences per date_range')
|
||||
date_range_ids = fields.One2many('ir.sequence.date_range', 'sequence_id', string='Subsequences')
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Create a sequence, in implementation == standard a fast gaps-allowed PostgreSQL sequence is used.
|
||||
"""
|
||||
seqs = super().create(vals_list)
|
||||
for seq in seqs:
|
||||
if seq.implementation == 'standard':
|
||||
_create_sequence(self._cr, "ir_sequence_%03d" % seq.id, seq.number_increment or 1, seq.number_next or 1)
|
||||
return seqs
|
||||
|
||||
def unlink(self):
|
||||
_drop_sequences(self._cr, ["ir_sequence_%03d" % x.id for x in self])
|
||||
return super(IrSequence, self).unlink()
|
||||
|
||||
def write(self, values):
|
||||
new_implementation = values.get('implementation')
|
||||
for seq in self:
|
||||
# 4 cases: we test the previous impl. against the new one.
|
||||
i = values.get('number_increment', seq.number_increment)
|
||||
n = values.get('number_next', seq.number_next)
|
||||
if seq.implementation == 'standard':
|
||||
if new_implementation in ('standard', None):
|
||||
# Implementation has NOT changed.
|
||||
# Only change sequence if really requested.
|
||||
if values.get('number_next'):
|
||||
_alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_next=n)
|
||||
if seq.number_increment != i:
|
||||
_alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_increment=i)
|
||||
seq.date_range_ids._alter_sequence(number_increment=i)
|
||||
else:
|
||||
_drop_sequences(self._cr, ["ir_sequence_%03d" % seq.id])
|
||||
for sub_seq in seq.date_range_ids:
|
||||
_drop_sequences(self._cr, ["ir_sequence_%03d_%03d" % (seq.id, sub_seq.id)])
|
||||
else:
|
||||
if new_implementation in ('no_gap', None):
|
||||
pass
|
||||
else:
|
||||
_create_sequence(self._cr, "ir_sequence_%03d" % seq.id, i, n)
|
||||
for sub_seq in seq.date_range_ids:
|
||||
_create_sequence(self._cr, "ir_sequence_%03d_%03d" % (seq.id, sub_seq.id), i, n)
|
||||
res = super(IrSequence, self).write(values)
|
||||
# DLE P179
|
||||
self.flush_model(values.keys())
|
||||
return res
|
||||
|
||||
def _next_do(self):
|
||||
if self.implementation == 'standard':
|
||||
number_next = _select_nextval(self._cr, 'ir_sequence_%03d' % self.id)
|
||||
else:
|
||||
number_next = _update_nogap(self, self.number_increment)
|
||||
return self.get_next_char(number_next)
|
||||
|
||||
def _get_prefix_suffix(self, date=None, date_range=None):
|
||||
def _interpolate(s, d):
|
||||
return (s % d) if s else ''
|
||||
|
||||
def _interpolation_dict():
|
||||
now = range_date = effective_date = datetime.now(pytz.timezone(self._context.get('tz') or 'UTC'))
|
||||
if date or self._context.get('ir_sequence_date'):
|
||||
effective_date = fields.Datetime.from_string(date or self._context.get('ir_sequence_date'))
|
||||
if date_range or self._context.get('ir_sequence_date_range'):
|
||||
range_date = fields.Datetime.from_string(date_range or self._context.get('ir_sequence_date_range'))
|
||||
|
||||
sequences = {
|
||||
'year': '%Y', 'month': '%m', 'day': '%d', 'y': '%y', 'doy': '%j', 'woy': '%W',
|
||||
'weekday': '%w', 'h24': '%H', 'h12': '%I', 'min': '%M', 'sec': '%S'
|
||||
}
|
||||
res = {}
|
||||
for key, format in sequences.items():
|
||||
res[key] = effective_date.strftime(format)
|
||||
res['range_' + key] = range_date.strftime(format)
|
||||
res['current_' + key] = now.strftime(format)
|
||||
|
||||
return res
|
||||
|
||||
self.ensure_one()
|
||||
d = _interpolation_dict()
|
||||
try:
|
||||
interpolated_prefix = _interpolate(self.prefix, d)
|
||||
interpolated_suffix = _interpolate(self.suffix, d)
|
||||
except (ValueError, TypeError):
|
||||
raise UserError(_('Invalid prefix or suffix for sequence \'%s\'') % self.name)
|
||||
return interpolated_prefix, interpolated_suffix
|
||||
|
||||
def get_next_char(self, number_next):
|
||||
interpolated_prefix, interpolated_suffix = self._get_prefix_suffix()
|
||||
return interpolated_prefix + '%%0%sd' % self.padding % number_next + interpolated_suffix
|
||||
|
||||
def _create_date_range_seq(self, date):
|
||||
year = fields.Date.from_string(date).strftime('%Y')
|
||||
date_from = '{}-01-01'.format(year)
|
||||
date_to = '{}-12-31'.format(year)
|
||||
date_range = self.env['ir.sequence.date_range'].search([('sequence_id', '=', self.id), ('date_from', '>=', date), ('date_from', '<=', date_to)], order='date_from desc', limit=1)
|
||||
if date_range:
|
||||
date_to = date_range.date_from + timedelta(days=-1)
|
||||
date_range = self.env['ir.sequence.date_range'].search([('sequence_id', '=', self.id), ('date_to', '>=', date_from), ('date_to', '<=', date)], order='date_to desc', limit=1)
|
||||
if date_range:
|
||||
date_from = date_range.date_to + timedelta(days=1)
|
||||
seq_date_range = self.env['ir.sequence.date_range'].sudo().create({
|
||||
'date_from': date_from,
|
||||
'date_to': date_to,
|
||||
'sequence_id': self.id,
|
||||
})
|
||||
return seq_date_range
|
||||
|
||||
def _next(self, sequence_date=None):
|
||||
""" Returns the next number in the preferred sequence in all the ones given in self."""
|
||||
if not self.use_date_range:
|
||||
return self._next_do()
|
||||
# date mode
|
||||
dt = sequence_date or self._context.get('ir_sequence_date', fields.Date.today())
|
||||
seq_date = self.env['ir.sequence.date_range'].search([('sequence_id', '=', self.id), ('date_from', '<=', dt), ('date_to', '>=', dt)], limit=1)
|
||||
if not seq_date:
|
||||
seq_date = self._create_date_range_seq(dt)
|
||||
return seq_date.with_context(ir_sequence_date_range=seq_date.date_from)._next()
|
||||
|
||||
def next_by_id(self, sequence_date=None):
|
||||
""" Draw an interpolated string using the specified sequence."""
|
||||
self.check_access_rights('read')
|
||||
return self._next(sequence_date=sequence_date)
|
||||
|
||||
@api.model
|
||||
def next_by_code(self, sequence_code, sequence_date=None):
|
||||
""" Draw an interpolated string using a sequence with the requested code.
|
||||
If several sequences with the correct code are available to the user
|
||||
(multi-company cases), the one from the user's current company will
|
||||
be used.
|
||||
"""
|
||||
self.check_access_rights('read')
|
||||
company_id = self.env.company.id
|
||||
seq_ids = self.search([('code', '=', sequence_code), ('company_id', 'in', [company_id, False])], order='company_id')
|
||||
if not seq_ids:
|
||||
_logger.debug("No ir.sequence has been found for code '%s'. Please make sure a sequence is set for current company." % sequence_code)
|
||||
return False
|
||||
seq_id = seq_ids[0]
|
||||
return seq_id._next(sequence_date=sequence_date)
|
||||
|
||||
@api.model
|
||||
def get_id(self, sequence_code_or_id, code_or_id='id'):
|
||||
""" Draw an interpolated string using the specified sequence.
|
||||
|
||||
The sequence to use is specified by the ``sequence_code_or_id``
|
||||
argument, which can be a code or an id (as controlled by the
|
||||
``code_or_id`` argument. This method is deprecated.
|
||||
"""
|
||||
_logger.warning("ir_sequence.get() and ir_sequence.get_id() are deprecated. "
|
||||
"Please use ir_sequence.next_by_code() or ir_sequence.next_by_id().")
|
||||
if code_or_id == 'id':
|
||||
return self.browse(sequence_code_or_id).next_by_id()
|
||||
else:
|
||||
return self.next_by_code(sequence_code_or_id)
|
||||
|
||||
@api.model
|
||||
def get(self, code):
|
||||
""" Draw an interpolated string using the specified sequence.
|
||||
|
||||
The sequence to use is specified by its code. This method is
|
||||
deprecated.
|
||||
"""
|
||||
return self.get_id(code, 'code')
|
||||
|
||||
|
||||
class IrSequenceDateRange(models.Model):
|
||||
_name = 'ir.sequence.date_range'
|
||||
_description = 'Sequence Date Range'
|
||||
_rec_name = "sequence_id"
|
||||
_allow_sudo_commands = False
|
||||
|
||||
_sql_constraints = [
|
||||
(
|
||||
'unique_range_per_sequence',
|
||||
'UNIQUE(sequence_id, date_from, date_to)',
|
||||
"You cannot create two date ranges for the same sequence with the same date range.",
|
||||
),
|
||||
]
|
||||
|
||||
def _get_number_next_actual(self):
|
||||
'''Return number from ir_sequence row when no_gap implementation,
|
||||
and number from postgres sequence when standard implementation.'''
|
||||
for seq in self:
|
||||
if seq.sequence_id.implementation != 'standard':
|
||||
seq.number_next_actual = seq.number_next
|
||||
else:
|
||||
seq_id = "%03d_%03d" % (seq.sequence_id.id, seq.id)
|
||||
seq.number_next_actual = _predict_nextval(self, seq_id)
|
||||
|
||||
def _set_number_next_actual(self):
|
||||
for seq in self:
|
||||
seq.write({'number_next': seq.number_next_actual or 1})
|
||||
|
||||
@api.model
|
||||
def default_get(self, fields):
|
||||
result = super(IrSequenceDateRange, self).default_get(fields)
|
||||
result['number_next_actual'] = 1
|
||||
return result
|
||||
|
||||
date_from = fields.Date(string='From', required=True)
|
||||
date_to = fields.Date(string='To', required=True)
|
||||
sequence_id = fields.Many2one("ir.sequence", string='Main Sequence', required=True, ondelete='cascade')
|
||||
number_next = fields.Integer(string='Next Number', required=True, default=1, help="Next number of this sequence")
|
||||
number_next_actual = fields.Integer(compute='_get_number_next_actual', inverse='_set_number_next_actual',
|
||||
string='Actual Next Number',
|
||||
help="Next number that will be used. This number can be incremented "
|
||||
"frequently so the displayed value might already be obsolete")
|
||||
|
||||
def _next(self):
|
||||
if self.sequence_id.implementation == 'standard':
|
||||
number_next = _select_nextval(self._cr, 'ir_sequence_%03d_%03d' % (self.sequence_id.id, self.id))
|
||||
else:
|
||||
number_next = _update_nogap(self, self.sequence_id.number_increment)
|
||||
return self.sequence_id.get_next_char(number_next)
|
||||
|
||||
def _alter_sequence(self, number_increment=None, number_next=None):
|
||||
for seq in self:
|
||||
_alter_sequence(self._cr, "ir_sequence_%03d_%03d" % (seq.sequence_id.id, seq.id), number_increment=number_increment, number_next=number_next)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Create a sequence, in implementation == standard a fast gaps-allowed PostgreSQL sequence is used.
|
||||
"""
|
||||
seqs = super().create(vals_list)
|
||||
for seq in seqs:
|
||||
main_seq = seq.sequence_id
|
||||
if main_seq.implementation == 'standard':
|
||||
_create_sequence(self._cr, "ir_sequence_%03d_%03d" % (main_seq.id, seq.id), main_seq.number_increment, seq.number_next_actual or 1)
|
||||
return seqs
|
||||
|
||||
def unlink(self):
|
||||
_drop_sequences(self._cr, ["ir_sequence_%03d_%03d" % (x.sequence_id.id, x.id) for x in self])
|
||||
return super(IrSequenceDateRange, self).unlink()
|
||||
|
||||
def write(self, values):
|
||||
if values.get('number_next'):
|
||||
seq_to_alter = self.filtered(lambda seq: seq.sequence_id.implementation == 'standard')
|
||||
seq_to_alter._alter_sequence(number_next=values.get('number_next'))
|
||||
# DLE P179: `test_in_invoice_line_onchange_sequence_number_1`
|
||||
# _update_nogap do a select to get the next sequence number_next
|
||||
# When changing (writing) the number next of a sequence, the number next must be flushed before doing the select.
|
||||
# Normally in such a case, we flush just above the execute, but for the sake of performance
|
||||
# I believe this is better to flush directly in the write:
|
||||
# - Changing the number next of a sequence is really really rare,
|
||||
# - But selecting the number next happens a lot,
|
||||
# Therefore, if I chose to put the flush just above the select, it would check the flush most of the time for no reason.
|
||||
res = super(IrSequenceDateRange, self).write(values)
|
||||
self.flush_model(values.keys())
|
||||
return res
|
||||
319
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_ui_menu.py
Normal file
319
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_ui_menu.py
Normal file
|
|
@ -0,0 +1,319 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
from collections import defaultdict
|
||||
import operator
|
||||
import re
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.http import request
|
||||
from odoo.modules import get_module_resource
|
||||
from odoo.osv import expression
|
||||
|
||||
MENU_ITEM_SEPARATOR = "/"
|
||||
NUMBER_PARENS = re.compile(r"\(([0-9]+)\)")
|
||||
|
||||
|
||||
class IrUiMenu(models.Model):
|
||||
_name = 'ir.ui.menu'
|
||||
_description = 'Menu'
|
||||
_order = "sequence,id"
|
||||
_parent_store = True
|
||||
_allow_sudo_commands = False
|
||||
|
||||
name = fields.Char(string='Menu', required=True, translate=True)
|
||||
active = fields.Boolean(default=True)
|
||||
sequence = fields.Integer(default=10)
|
||||
child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs')
|
||||
parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict")
|
||||
parent_path = fields.Char(index=True, unaccent=False)
|
||||
groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel',
|
||||
'menu_id', 'gid', string='Groups',
|
||||
help="If you have groups, the visibility of this menu will be based on these groups. "\
|
||||
"If this field is empty, Odoo will compute visibility based on the related object's read access.")
|
||||
complete_name = fields.Char(string='Full Path', compute='_compute_complete_name', recursive=True)
|
||||
web_icon = fields.Char(string='Web Icon File')
|
||||
action = fields.Reference(selection=[('ir.actions.report', 'ir.actions.report'),
|
||||
('ir.actions.act_window', 'ir.actions.act_window'),
|
||||
('ir.actions.act_url', 'ir.actions.act_url'),
|
||||
('ir.actions.server', 'ir.actions.server'),
|
||||
('ir.actions.client', 'ir.actions.client')])
|
||||
|
||||
web_icon_data = fields.Binary(string='Web Icon Image', attachment=True)
|
||||
|
||||
@api.depends('name', 'parent_id.complete_name')
|
||||
def _compute_complete_name(self):
|
||||
for menu in self:
|
||||
menu.complete_name = menu._get_full_name()
|
||||
|
||||
def _get_full_name(self, level=6):
|
||||
""" Return the full name of ``self`` (up to a certain level). """
|
||||
if level <= 0:
|
||||
return '...'
|
||||
if self.parent_id:
|
||||
return self.parent_id._get_full_name(level - 1) + MENU_ITEM_SEPARATOR + (self.name or "")
|
||||
else:
|
||||
return self.name
|
||||
|
||||
def read_image(self, path):
|
||||
if not path:
|
||||
return False
|
||||
path_info = path.split(',')
|
||||
icon_path = get_module_resource(path_info[0], path_info[1])
|
||||
icon_image = False
|
||||
if icon_path:
|
||||
with tools.file_open(icon_path, 'rb', filter_ext=(
|
||||
'.gif', '.ico', '.jfif', '.jpeg', '.jpg', '.png', '.svg', '.webp',
|
||||
)) as icon_file:
|
||||
icon_image = base64.encodebytes(icon_file.read())
|
||||
return icon_image
|
||||
|
||||
@api.constrains('parent_id')
|
||||
def _check_parent_id(self):
|
||||
if not self._check_recursion():
|
||||
raise ValidationError(_('Error! You cannot create recursive menus.'))
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug')
|
||||
def _visible_menu_ids(self, debug=False):
|
||||
""" Return the ids of the menu items visible to the user. """
|
||||
# retrieve all menus, and determine which ones are visible
|
||||
context = {'ir.ui.menu.full_list': True}
|
||||
menus = self.with_context(context).search([]).sudo()
|
||||
|
||||
groups = self.env.user.groups_id
|
||||
if not debug:
|
||||
groups = groups - self.env.ref('base.group_no_one')
|
||||
# first discard all menus with groups the user does not have
|
||||
menus = menus.filtered(
|
||||
lambda menu: not menu.groups_id or menu.groups_id & groups)
|
||||
|
||||
# take apart menus that have an action
|
||||
actions_by_model = defaultdict(set)
|
||||
for action in menus.mapped('action'):
|
||||
if action:
|
||||
actions_by_model[action._name].add(action.id)
|
||||
existing_actions = {
|
||||
action
|
||||
for model_name, action_ids in actions_by_model.items()
|
||||
for action in self.env[model_name].browse(action_ids).exists()
|
||||
}
|
||||
action_menus = menus.filtered(lambda m: m.action and m.action in existing_actions)
|
||||
folder_menus = menus - action_menus
|
||||
visible = self.browse()
|
||||
|
||||
# process action menus, check whether their action is allowed
|
||||
access = self.env['ir.model.access']
|
||||
MODEL_BY_TYPE = {
|
||||
'ir.actions.act_window': 'res_model',
|
||||
'ir.actions.report': 'model',
|
||||
'ir.actions.server': 'model_name',
|
||||
}
|
||||
|
||||
# performance trick: determine the ids to prefetch by type
|
||||
prefetch_ids = defaultdict(list)
|
||||
for action in action_menus.mapped('action'):
|
||||
prefetch_ids[action._name].append(action.id)
|
||||
|
||||
for menu in action_menus:
|
||||
action = menu.action
|
||||
action = action.with_prefetch(prefetch_ids[action._name])
|
||||
model_name = action._name in MODEL_BY_TYPE and action[MODEL_BY_TYPE[action._name]]
|
||||
if not model_name or access.check(model_name, 'read', False):
|
||||
# make menu visible, and its folder ancestors, too
|
||||
visible += menu
|
||||
menu = menu.parent_id
|
||||
while menu and menu in folder_menus and menu not in visible:
|
||||
visible += menu
|
||||
menu = menu.parent_id
|
||||
|
||||
return set(visible.ids)
|
||||
|
||||
@api.returns('self')
|
||||
def _filter_visible_menus(self):
|
||||
""" Filter `self` to only keep the menu items that should be visible in
|
||||
the menu hierarchy of the current user.
|
||||
Uses a cache for speeding up the computation.
|
||||
"""
|
||||
visible_ids = self._visible_menu_ids(request.session.debug if request else False)
|
||||
return self.filtered(lambda menu: menu.id in visible_ids)
|
||||
|
||||
@api.model
|
||||
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
|
||||
menu_ids = super(IrUiMenu, self)._search(args, offset=0, limit=None, order=order, count=False, access_rights_uid=access_rights_uid)
|
||||
menus = self.browse(menu_ids)
|
||||
if menus:
|
||||
# menu filtering is done only on main menu tree, not other menu lists
|
||||
if not self._context.get('ir.ui.menu.full_list'):
|
||||
menus = menus._filter_visible_menus()
|
||||
if offset:
|
||||
menus = menus[offset:]
|
||||
if limit:
|
||||
menus = menus[:limit]
|
||||
return len(menus) if count else menus.ids
|
||||
|
||||
def name_get(self):
|
||||
return [(menu.id, menu._get_full_name()) for menu in self]
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
for values in vals_list:
|
||||
if 'web_icon' in values:
|
||||
values['web_icon_data'] = self._compute_web_icon_data(values.get('web_icon'))
|
||||
return super(IrUiMenu, self).create(vals_list)
|
||||
|
||||
def write(self, values):
|
||||
self.clear_caches()
|
||||
if 'web_icon' in values:
|
||||
values['web_icon_data'] = self._compute_web_icon_data(values.get('web_icon'))
|
||||
return super(IrUiMenu, self).write(values)
|
||||
|
||||
def _compute_web_icon_data(self, web_icon):
|
||||
""" Returns the image associated to `web_icon`.
|
||||
`web_icon` can either be:
|
||||
- an image icon [module, path]
|
||||
- a built icon [icon_class, icon_color, background_color]
|
||||
and it only has to call `read_image` if it's an image.
|
||||
"""
|
||||
if web_icon and len(web_icon.split(',')) == 2:
|
||||
return self.read_image(web_icon)
|
||||
|
||||
def unlink(self):
|
||||
# Detach children and promote them to top-level, because it would be unwise to
|
||||
# cascade-delete submenus blindly. We also can't use ondelete=set null because
|
||||
# that is not supported when _parent_store is used (would silently corrupt it).
|
||||
# TODO: ideally we should move them under a generic "Orphans" menu somewhere?
|
||||
extra = {'ir.ui.menu.full_list': True,
|
||||
'active_test': False}
|
||||
direct_children = self.with_context(**extra).search([('parent_id', 'in', self.ids)])
|
||||
direct_children.write({'parent_id': False})
|
||||
|
||||
self.clear_caches()
|
||||
return super(IrUiMenu, self).unlink()
|
||||
|
||||
def copy(self, default=None):
|
||||
record = super(IrUiMenu, self).copy(default=default)
|
||||
match = NUMBER_PARENS.search(record.name)
|
||||
if match:
|
||||
next_num = int(match.group(1)) + 1
|
||||
record.name = NUMBER_PARENS.sub('(%d)' % next_num, record.name)
|
||||
else:
|
||||
record.name = record.name + '(1)'
|
||||
return record
|
||||
|
||||
@api.model
|
||||
@api.returns('self')
|
||||
def get_user_roots(self):
|
||||
""" Return all root menu ids visible for the user.
|
||||
|
||||
:return: the root menu ids
|
||||
:rtype: list(int)
|
||||
"""
|
||||
return self.search([('parent_id', '=', False)])
|
||||
|
||||
def _load_menus_blacklist(self):
|
||||
return []
|
||||
|
||||
@api.model
|
||||
@tools.ormcache_context('self._uid', keys=('lang',))
|
||||
def load_menus_root(self):
|
||||
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
|
||||
menu_roots = self.get_user_roots()
|
||||
menu_roots_data = menu_roots.read(fields) if menu_roots else []
|
||||
|
||||
menu_root = {
|
||||
'id': False,
|
||||
'name': 'root',
|
||||
'parent_id': [-1, ''],
|
||||
'children': menu_roots_data,
|
||||
'all_menu_ids': menu_roots.ids,
|
||||
}
|
||||
|
||||
xmlids = menu_roots._get_menuitems_xmlids()
|
||||
for menu in menu_roots_data:
|
||||
menu['xmlid'] = xmlids.get(menu['id'], '')
|
||||
|
||||
return menu_root
|
||||
|
||||
@api.model
|
||||
@tools.ormcache_context('self._uid', 'debug', keys=('lang',))
|
||||
def load_menus(self, debug):
|
||||
""" Loads all menu items (all applications and their sub-menus).
|
||||
|
||||
:return: the menu root
|
||||
:rtype: dict('children': menu_nodes)
|
||||
"""
|
||||
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon', 'web_icon_data']
|
||||
menu_roots = self.get_user_roots()
|
||||
menu_roots_data = menu_roots.read(fields) if menu_roots else []
|
||||
menu_root = {
|
||||
'id': False,
|
||||
'name': 'root',
|
||||
'parent_id': [-1, ''],
|
||||
'children': [menu['id'] for menu in menu_roots_data],
|
||||
}
|
||||
|
||||
all_menus = {'root': menu_root}
|
||||
|
||||
if not menu_roots_data:
|
||||
return all_menus
|
||||
|
||||
# menus are loaded fully unlike a regular tree view, cause there are a
|
||||
# limited number of items (752 when all 6.1 addons are installed)
|
||||
menus_domain = [('id', 'child_of', menu_roots.ids)]
|
||||
blacklisted_menu_ids = self._load_menus_blacklist()
|
||||
if blacklisted_menu_ids:
|
||||
menus_domain = expression.AND([menus_domain, [('id', 'not in', blacklisted_menu_ids)]])
|
||||
menus = self.search(menus_domain)
|
||||
menu_items = menus.read(fields)
|
||||
xmlids = (menu_roots + menus)._get_menuitems_xmlids()
|
||||
|
||||
# add roots at the end of the sequence, so that they will overwrite
|
||||
# equivalent menu items from full menu read when put into id:item
|
||||
# mapping, resulting in children being correctly set on the roots.
|
||||
menu_items.extend(menu_roots_data)
|
||||
|
||||
# set children ids and xmlids
|
||||
menu_items_map = {menu_item["id"]: menu_item for menu_item in menu_items}
|
||||
for menu_item in menu_items:
|
||||
menu_item.setdefault('children', [])
|
||||
parent = menu_item['parent_id'] and menu_item['parent_id'][0]
|
||||
menu_item['xmlid'] = xmlids.get(menu_item['id'], "")
|
||||
if parent in menu_items_map:
|
||||
menu_items_map[parent].setdefault(
|
||||
'children', []).append(menu_item['id'])
|
||||
all_menus.update(menu_items_map)
|
||||
|
||||
# sort by sequence
|
||||
for menu_id in all_menus:
|
||||
all_menus[menu_id]['children'].sort(key=lambda id: all_menus[id]['sequence'])
|
||||
|
||||
# recursively set app ids to related children
|
||||
def _set_app_id(app_id, menu):
|
||||
menu['app_id'] = app_id
|
||||
for child_id in menu['children']:
|
||||
_set_app_id(app_id, all_menus[child_id])
|
||||
|
||||
for app in menu_roots_data:
|
||||
app_id = app['id']
|
||||
_set_app_id(app_id, all_menus[app_id])
|
||||
|
||||
# filter out menus not related to an app (+ keep root menu)
|
||||
all_menus = {menu['id']: menu for menu in all_menus.values() if menu.get('app_id')}
|
||||
all_menus['root'] = menu_root
|
||||
|
||||
return all_menus
|
||||
|
||||
def _get_menuitems_xmlids(self):
|
||||
menuitems = self.env['ir.model.data'].sudo().search([
|
||||
('res_id', 'in', self.ids),
|
||||
('model', '=', 'ir.ui.menu')
|
||||
])
|
||||
|
||||
return {
|
||||
menu.res_id: menu.complete_name
|
||||
for menu in menuitems
|
||||
}
|
||||
3066
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_ui_view.py
Normal file
3066
odoo-bringout-oca-ocb-base/odoo/addons/base/models/ir_ui_view.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: ascii -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class ReportLayout(models.Model):
|
||||
_name = "report.layout"
|
||||
_description = 'Report Layout'
|
||||
_order = 'sequence'
|
||||
|
||||
view_id = fields.Many2one('ir.ui.view', 'Document Template', required=True)
|
||||
image = fields.Char(string="Preview image src")
|
||||
pdf = fields.Char(string="Preview pdf src")
|
||||
|
||||
sequence = fields.Integer(default=50)
|
||||
name = fields.Char()
|
||||
|
|
@ -0,0 +1,213 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
# see http://doc.qt.io/archives/qt-4.8/qprinter.html#PaperSize-enum
|
||||
PAPER_SIZES = [
|
||||
{
|
||||
'description': 'A0 5 841 x 1189 mm',
|
||||
'key': 'A0',
|
||||
'height': 1189.0,
|
||||
'width': 841.0,
|
||||
}, {
|
||||
'key': 'A1',
|
||||
'description': 'A1 6 594 x 841 mm',
|
||||
'height': 841.0,
|
||||
'width': 594.0,
|
||||
}, {
|
||||
'key': 'A2',
|
||||
'description': 'A2 7 420 x 594 mm',
|
||||
'height': 594.0,
|
||||
'width': 420.0,
|
||||
}, {
|
||||
'key': 'A3',
|
||||
'description': 'A3 8 297 x 420 mm',
|
||||
'height': 420.0,
|
||||
'width': 297.0,
|
||||
}, {
|
||||
'key': 'A4',
|
||||
'description': 'A4 0 210 x 297 mm, 8.26 x 11.69 inches',
|
||||
'height': 297.0,
|
||||
'width': 210.0,
|
||||
}, {
|
||||
'key': 'A5',
|
||||
'description': 'A5 9 148 x 210 mm',
|
||||
'height': 210.0,
|
||||
'width': 148.0,
|
||||
}, {
|
||||
'key': 'A6',
|
||||
'description': 'A6 10 105 x 148 mm',
|
||||
'height': 148.0,
|
||||
'width': 105.0,
|
||||
}, {
|
||||
'key': 'A7',
|
||||
'description': 'A7 11 74 x 105 mm',
|
||||
'height': 105.0,
|
||||
'width': 74.0,
|
||||
}, {
|
||||
'key': 'A8',
|
||||
'description': 'A8 12 52 x 74 mm',
|
||||
'height': 74.0,
|
||||
'width': 52.0,
|
||||
}, {
|
||||
'key': 'A9',
|
||||
'description': 'A9 13 37 x 52 mm',
|
||||
'height': 52.0,
|
||||
'width': 37.0,
|
||||
}, {
|
||||
'key': 'B0',
|
||||
'description': 'B0 14 1000 x 1414 mm',
|
||||
'height': 1414.0,
|
||||
'width': 1000.0,
|
||||
}, {
|
||||
'key': 'B1',
|
||||
'description': 'B1 15 707 x 1000 mm',
|
||||
'height': 1000.0,
|
||||
'width': 707.0,
|
||||
}, {
|
||||
'key': 'B2',
|
||||
'description': 'B2 17 500 x 707 mm',
|
||||
'height': 707.0,
|
||||
'width': 500.0,
|
||||
}, {
|
||||
'key': 'B3',
|
||||
'description': 'B3 18 353 x 500 mm',
|
||||
'height': 500.0,
|
||||
'width': 353.0,
|
||||
}, {
|
||||
'key': 'B4',
|
||||
'description': 'B4 19 250 x 353 mm',
|
||||
'height': 353.0,
|
||||
'width': 250.0,
|
||||
}, {
|
||||
'key': 'B5',
|
||||
'description': 'B5 1 176 x 250 mm, 6.93 x 9.84 inches',
|
||||
'height': 250.0,
|
||||
'width': 176.0,
|
||||
}, {
|
||||
'key': 'B6',
|
||||
'description': 'B6 20 125 x 176 mm',
|
||||
'height': 176.0,
|
||||
'width': 125.0,
|
||||
}, {
|
||||
'key': 'B7',
|
||||
'description': 'B7 21 88 x 125 mm',
|
||||
'height': 125.0,
|
||||
'width': 88.0,
|
||||
}, {
|
||||
'key': 'B8',
|
||||
'description': 'B8 22 62 x 88 mm',
|
||||
'height': 88.0,
|
||||
'width': 62.0,
|
||||
}, {
|
||||
'key': 'B9',
|
||||
'description': 'B9 23 33 x 62 mm',
|
||||
'height': 62.0,
|
||||
'width': 33.0,
|
||||
}, {
|
||||
'key': 'B10',
|
||||
'description': 'B10 16 31 x 44 mm',
|
||||
'height': 44.0,
|
||||
'width': 31.0,
|
||||
}, {
|
||||
'key': 'C5E',
|
||||
'description': 'C5E 24 163 x 229 mm',
|
||||
'height': 229.0,
|
||||
'width': 163.0,
|
||||
}, {
|
||||
'key': 'Comm10E',
|
||||
'description': 'Comm10E 25 105 x 241 mm, U.S. Common 10 Envelope',
|
||||
'height': 241.0,
|
||||
'width': 105.0,
|
||||
}, {
|
||||
'key': 'DLE',
|
||||
'description': 'DLE 26 110 x 220 mm',
|
||||
'height': 220.0,
|
||||
'width': 110.0,
|
||||
}, {
|
||||
'key': 'Executive',
|
||||
'description': 'Executive 4 7.5 x 10 inches, 190.5 x 254 mm',
|
||||
'height': 254.0,
|
||||
'width': 190.5,
|
||||
}, {
|
||||
'key': 'Folio',
|
||||
'description': 'Folio 27 210 x 330 mm',
|
||||
'height': 330.0,
|
||||
'width': 210.0,
|
||||
}, {
|
||||
'key': 'Ledger',
|
||||
'description': 'Ledger 28 431.8 x 279.4 mm',
|
||||
'height': 279.4,
|
||||
'width': 431.8,
|
||||
}, {
|
||||
'key': 'Legal',
|
||||
'description': 'Legal 3 8.5 x 14 inches, 215.9 x 355.6 mm',
|
||||
'height': 355.6,
|
||||
'width': 215.9,
|
||||
}, {
|
||||
'key': 'Letter',
|
||||
'description': 'Letter 2 8.5 x 11 inches, 215.9 x 279.4 mm',
|
||||
'height': 279.4,
|
||||
'width': 215.9,
|
||||
}, {
|
||||
'key': 'Tabloid',
|
||||
'description': 'Tabloid 29 279.4 x 431.8 mm',
|
||||
'height': 431.8,
|
||||
'width': 279.4,
|
||||
}, {
|
||||
'key': 'custom',
|
||||
'description': 'Custom',
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class report_paperformat(models.Model):
|
||||
_name = "report.paperformat"
|
||||
_description = "Paper Format Config"
|
||||
|
||||
name = fields.Char('Name', required=True)
|
||||
default = fields.Boolean('Default paper format ?')
|
||||
format = fields.Selection([(ps['key'], ps['description']) for ps in PAPER_SIZES], 'Paper size', default='A4', help="Select Proper Paper size")
|
||||
margin_top = fields.Float('Top Margin (mm)', default=40)
|
||||
margin_bottom = fields.Float('Bottom Margin (mm)', default=20)
|
||||
margin_left = fields.Float('Left Margin (mm)', default=7)
|
||||
margin_right = fields.Float('Right Margin (mm)', default=7)
|
||||
page_height = fields.Integer('Page height (mm)', default=False)
|
||||
page_width = fields.Integer('Page width (mm)', default=False)
|
||||
orientation = fields.Selection([
|
||||
('Landscape', 'Landscape'),
|
||||
('Portrait', 'Portrait')
|
||||
], 'Orientation', default='Landscape')
|
||||
header_line = fields.Boolean('Display a header line', default=False)
|
||||
header_spacing = fields.Integer('Header spacing', default=35)
|
||||
disable_shrinking = fields.Boolean('Disable smart shrinking')
|
||||
dpi = fields.Integer('Output DPI', required=True, default=90)
|
||||
report_ids = fields.One2many('ir.actions.report', 'paperformat_id', 'Associated reports', help="Explicitly associated reports")
|
||||
print_page_width = fields.Float('Print page width (mm)', compute='_compute_print_page_size')
|
||||
print_page_height = fields.Float('Print page height (mm)', compute='_compute_print_page_size')
|
||||
|
||||
@api.constrains('format')
|
||||
def _check_format_or_page(self):
|
||||
if self.filtered(lambda x: x.format != 'custom' and (x.page_width or x.page_height)):
|
||||
raise ValidationError(_('You can select either a format or a specific page width/height, but not both.'))
|
||||
|
||||
def _compute_print_page_size(self):
|
||||
for record in self:
|
||||
width = height = 0.0
|
||||
if record.format:
|
||||
if record.format == 'custom':
|
||||
width = record.page_width
|
||||
height = record.page_height
|
||||
else:
|
||||
paper_size = next(ps for ps in PAPER_SIZES if ps['key'] == record.format)
|
||||
width = paper_size['width']
|
||||
height = paper_size['height']
|
||||
|
||||
if record.orientation == 'Landscape':
|
||||
# swap sizes
|
||||
width, height = height, width
|
||||
|
||||
record.print_page_width = width
|
||||
record.print_page_height = height
|
||||
131
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_bank.py
Normal file
131
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_bank.py
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
|
||||
from collections.abc import Iterable
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.osv import expression
|
||||
|
||||
def sanitize_account_number(acc_number):
|
||||
if acc_number:
|
||||
return re.sub(r'\W+', '', acc_number).upper()
|
||||
return False
|
||||
|
||||
|
||||
class Bank(models.Model):
|
||||
_description = 'Bank'
|
||||
_name = 'res.bank'
|
||||
_order = 'name'
|
||||
|
||||
name = fields.Char(required=True)
|
||||
street = fields.Char()
|
||||
street2 = fields.Char()
|
||||
zip = fields.Char()
|
||||
city = fields.Char()
|
||||
state = fields.Many2one('res.country.state', 'Fed. State', domain="[('country_id', '=?', country)]")
|
||||
country = fields.Many2one('res.country')
|
||||
email = fields.Char()
|
||||
phone = fields.Char()
|
||||
active = fields.Boolean(default=True)
|
||||
bic = fields.Char('Bank Identifier Code', index=True, help="Sometimes called BIC or Swift.")
|
||||
|
||||
def name_get(self):
|
||||
result = []
|
||||
for bank in self:
|
||||
name = bank.name + (bank.bic and (' - ' + bank.bic) or '')
|
||||
result.append((bank.id, name))
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None):
|
||||
args = args or []
|
||||
domain = []
|
||||
if name:
|
||||
domain = ['|', ('bic', '=ilike', name + '%'), ('name', operator, name)]
|
||||
if operator in expression.NEGATIVE_TERM_OPERATORS:
|
||||
domain = ['&'] + domain
|
||||
return self._search(domain + args, limit=limit, access_rights_uid=name_get_uid)
|
||||
|
||||
@api.onchange('country')
|
||||
def _onchange_country_id(self):
|
||||
if self.country and self.country != self.state.country_id:
|
||||
self.state = False
|
||||
|
||||
@api.onchange('state')
|
||||
def _onchange_state(self):
|
||||
if self.state.country_id:
|
||||
self.country = self.state.country_id
|
||||
|
||||
|
||||
class ResPartnerBank(models.Model):
|
||||
_name = 'res.partner.bank'
|
||||
_rec_name = 'acc_number'
|
||||
_description = 'Bank Accounts'
|
||||
_order = 'sequence, id'
|
||||
|
||||
@api.model
|
||||
def get_supported_account_types(self):
|
||||
return self._get_supported_account_types()
|
||||
|
||||
@api.model
|
||||
def _get_supported_account_types(self):
|
||||
return [('bank', _('Normal'))]
|
||||
|
||||
active = fields.Boolean(default=True)
|
||||
acc_type = fields.Selection(selection=lambda x: x.env['res.partner.bank'].get_supported_account_types(), compute='_compute_acc_type', string='Type', help='Bank account type: Normal or IBAN. Inferred from the bank account number.')
|
||||
acc_number = fields.Char('Account Number', required=True)
|
||||
sanitized_acc_number = fields.Char(compute='_compute_sanitized_acc_number', string='Sanitized Account Number', readonly=True, store=True)
|
||||
acc_holder_name = fields.Char(string='Account Holder Name', help="Account holder name, in case it is different than the name of the Account Holder")
|
||||
partner_id = fields.Many2one('res.partner', 'Account Holder', ondelete='cascade', index=True, domain=['|', ('is_company', '=', True), ('parent_id', '=', False)], required=True)
|
||||
allow_out_payment = fields.Boolean('Send Money', help='This account can be used for outgoing payments', default=False, copy=False, readonly=False)
|
||||
bank_id = fields.Many2one('res.bank', string='Bank')
|
||||
bank_name = fields.Char(related='bank_id.name', readonly=False)
|
||||
bank_bic = fields.Char(related='bank_id.bic', readonly=False)
|
||||
sequence = fields.Integer(default=10)
|
||||
currency_id = fields.Many2one('res.currency', string='Currency')
|
||||
company_id = fields.Many2one('res.company', 'Company', related='partner_id.company_id', store=True, readonly=True)
|
||||
|
||||
_sql_constraints = [(
|
||||
'unique_number',
|
||||
'unique(sanitized_acc_number, partner_id)',
|
||||
'The combination Account Number/Partner must be unique.'
|
||||
)]
|
||||
|
||||
@api.depends('acc_number')
|
||||
def _compute_sanitized_acc_number(self):
|
||||
for bank in self:
|
||||
bank.sanitized_acc_number = sanitize_account_number(bank.acc_number)
|
||||
|
||||
@api.depends('acc_number')
|
||||
def _compute_acc_type(self):
|
||||
for bank in self:
|
||||
bank.acc_type = self.retrieve_acc_type(bank.acc_number)
|
||||
|
||||
@api.model
|
||||
def retrieve_acc_type(self, acc_number):
|
||||
""" To be overridden by subclasses in order to support other account_types.
|
||||
"""
|
||||
return 'bank'
|
||||
|
||||
def name_get(self):
|
||||
return [(acc.id, '{} - {}'.format(acc.acc_number, acc.bank_id.name) if acc.bank_id else acc.acc_number)
|
||||
for acc in self]
|
||||
|
||||
@api.model
|
||||
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
|
||||
pos = 0
|
||||
while pos < len(args):
|
||||
# DLE P14
|
||||
if args[pos][0] == 'acc_number':
|
||||
op = args[pos][1]
|
||||
value = args[pos][2]
|
||||
if not isinstance(value, str) and isinstance(value, Iterable):
|
||||
value = [sanitize_account_number(i) for i in value]
|
||||
else:
|
||||
value = sanitize_account_number(value)
|
||||
if 'like' in op:
|
||||
value = '%' + value + '%'
|
||||
args[pos] = ('sanitized_acc_number', op, value)
|
||||
pos += 1
|
||||
return super(ResPartnerBank, self)._search(args, offset, limit, order, count=count, access_rights_uid=access_rights_uid)
|
||||
|
|
@ -0,0 +1,347 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from odoo import api, fields, models, tools, _, Command
|
||||
from odoo.exceptions import ValidationError, UserError
|
||||
from odoo.modules.module import get_resource_path
|
||||
from odoo.tools import html2plaintext
|
||||
from random import randrange
|
||||
from PIL import Image
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Company(models.Model):
|
||||
_name = "res.company"
|
||||
_description = 'Companies'
|
||||
_order = 'sequence, name'
|
||||
|
||||
def copy(self, default=None):
|
||||
raise UserError(_('Duplicating a company is not allowed. Please create a new company instead.'))
|
||||
|
||||
def _get_logo(self):
|
||||
return base64.b64encode(open(os.path.join(tools.config['root_path'], 'addons', 'base', 'static', 'img', 'res_company_logo.png'), 'rb') .read())
|
||||
|
||||
def _default_currency_id(self):
|
||||
return self.env.user.company_id.currency_id
|
||||
|
||||
def _get_default_favicon(self, original=False):
|
||||
img_path = get_resource_path('web', 'static/img/favicon.ico')
|
||||
with tools.file_open(img_path, 'rb') as f:
|
||||
if original:
|
||||
return base64.b64encode(f.read())
|
||||
# Modify the source image to add a colored bar on the bottom
|
||||
# This could seem overkill to modify the pixels 1 by 1, but
|
||||
# Pillow doesn't provide an easy way to do it, and this
|
||||
# is acceptable for a 16x16 image.
|
||||
color = (randrange(32, 224, 24), randrange(32, 224, 24), randrange(32, 224, 24))
|
||||
original = Image.open(f)
|
||||
new_image = Image.new('RGBA', original.size)
|
||||
height = original.size[1]
|
||||
width = original.size[0]
|
||||
bar_size = 1
|
||||
for y in range(height):
|
||||
for x in range(width):
|
||||
pixel = original.getpixel((x, y))
|
||||
if height - bar_size <= y + 1 <= height:
|
||||
new_image.putpixel((x, y), (color[0], color[1], color[2], 255))
|
||||
else:
|
||||
new_image.putpixel((x, y), (pixel[0], pixel[1], pixel[2], pixel[3]))
|
||||
stream = io.BytesIO()
|
||||
new_image.save(stream, format="ICO")
|
||||
return base64.b64encode(stream.getvalue())
|
||||
|
||||
name = fields.Char(related='partner_id.name', string='Company Name', required=True, store=True, readonly=False)
|
||||
active = fields.Boolean(default=True)
|
||||
sequence = fields.Integer(help='Used to order Companies in the company switcher', default=10)
|
||||
parent_id = fields.Many2one('res.company', string='Parent Company', index=True)
|
||||
child_ids = fields.One2many('res.company', 'parent_id', string='Child Companies')
|
||||
partner_id = fields.Many2one('res.partner', string='Partner', required=True)
|
||||
report_header = fields.Html(string='Company Tagline', help="Appears by default on the top right corner of your printed documents (report header).")
|
||||
report_footer = fields.Html(string='Report Footer', translate=True, help="Footer text displayed at the bottom of all reports.")
|
||||
company_details = fields.Html(string='Company Details', help="Header text displayed at the top of all reports.")
|
||||
is_company_details_empty = fields.Boolean(compute='_compute_empty_company_details')
|
||||
logo = fields.Binary(related='partner_id.image_1920', default=_get_logo, string="Company Logo", readonly=False)
|
||||
# logo_web: do not store in attachments, since the image is retrieved in SQL for
|
||||
# performance reasons (see addons/web/controllers/main.py, Binary.company_logo)
|
||||
logo_web = fields.Binary(compute='_compute_logo_web', store=True, attachment=False)
|
||||
currency_id = fields.Many2one('res.currency', string='Currency', required=True, default=lambda self: self._default_currency_id())
|
||||
user_ids = fields.Many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', string='Accepted Users')
|
||||
street = fields.Char(compute='_compute_address', inverse='_inverse_street')
|
||||
street2 = fields.Char(compute='_compute_address', inverse='_inverse_street2')
|
||||
zip = fields.Char(compute='_compute_address', inverse='_inverse_zip')
|
||||
city = fields.Char(compute='_compute_address', inverse='_inverse_city')
|
||||
state_id = fields.Many2one(
|
||||
'res.country.state', compute='_compute_address', inverse='_inverse_state',
|
||||
string="Fed. State", domain="[('country_id', '=?', country_id)]"
|
||||
)
|
||||
bank_ids = fields.One2many(related='partner_id.bank_ids', readonly=False)
|
||||
country_id = fields.Many2one('res.country', compute='_compute_address', inverse='_inverse_country', string="Country")
|
||||
email = fields.Char(related='partner_id.email', store=True, readonly=False)
|
||||
phone = fields.Char(related='partner_id.phone', store=True, readonly=False)
|
||||
mobile = fields.Char(related='partner_id.mobile', store=True, readonly=False)
|
||||
website = fields.Char(related='partner_id.website', readonly=False)
|
||||
vat = fields.Char(related='partner_id.vat', string="Tax ID", readonly=False)
|
||||
company_registry = fields.Char(related='partner_id.company_registry', string="Company ID", readonly=False)
|
||||
paperformat_id = fields.Many2one('report.paperformat', 'Paper format', default=lambda self: self.env.ref('base.paperformat_euro', raise_if_not_found=False))
|
||||
external_report_layout_id = fields.Many2one('ir.ui.view', 'Document Template')
|
||||
base_onboarding_company_state = fields.Selection([
|
||||
('not_done', "Not done"), ('just_done', "Just done"), ('done', "Done")], string="State of the onboarding company step", default='not_done')
|
||||
favicon = fields.Binary(string="Company Favicon", help="This field holds the image used to display a favicon for a given company.", default=_get_default_favicon)
|
||||
font = fields.Selection([("Lato", "Lato"), ("Roboto", "Roboto"), ("Open_Sans", "Open Sans"), ("Montserrat", "Montserrat"), ("Oswald", "Oswald"), ("Raleway", "Raleway"), ('Tajawal', 'Tajawal')], default="Lato")
|
||||
primary_color = fields.Char()
|
||||
secondary_color = fields.Char()
|
||||
layout_background = fields.Selection([('Blank', 'Blank'), ('Geometric', 'Geometric'), ('Custom', 'Custom')], default="Blank", required=True)
|
||||
layout_background_image = fields.Binary("Background Image")
|
||||
_sql_constraints = [
|
||||
('name_uniq', 'unique (name)', 'The company name must be unique !')
|
||||
]
|
||||
|
||||
def init(self):
|
||||
for company in self.search([('paperformat_id', '=', False)]):
|
||||
paperformat_euro = self.env.ref('base.paperformat_euro', False)
|
||||
if paperformat_euro:
|
||||
company.write({'paperformat_id': paperformat_euro.id})
|
||||
sup = super(Company, self)
|
||||
if hasattr(sup, 'init'):
|
||||
sup.init()
|
||||
|
||||
def _get_company_address_field_names(self):
|
||||
""" Return a list of fields coming from the address partner to match
|
||||
on company address fields. Fields are labeled same on both models. """
|
||||
return ['street', 'street2', 'city', 'zip', 'state_id', 'country_id']
|
||||
|
||||
def _get_company_address_update(self, partner):
|
||||
return dict((fname, partner[fname])
|
||||
for fname in self._get_company_address_field_names())
|
||||
|
||||
# TODO @api.depends(): currently now way to formulate the dependency on the
|
||||
# partner's contact address
|
||||
def _compute_address(self):
|
||||
for company in self.filtered(lambda company: company.partner_id):
|
||||
address_data = company.partner_id.sudo().address_get(adr_pref=['contact'])
|
||||
if address_data['contact']:
|
||||
partner = company.partner_id.browse(address_data['contact']).sudo()
|
||||
company.update(company._get_company_address_update(partner))
|
||||
|
||||
def _inverse_street(self):
|
||||
for company in self:
|
||||
company.partner_id.street = company.street
|
||||
|
||||
def _inverse_street2(self):
|
||||
for company in self:
|
||||
company.partner_id.street2 = company.street2
|
||||
|
||||
def _inverse_zip(self):
|
||||
for company in self:
|
||||
company.partner_id.zip = company.zip
|
||||
|
||||
def _inverse_city(self):
|
||||
for company in self:
|
||||
company.partner_id.city = company.city
|
||||
|
||||
def _inverse_state(self):
|
||||
for company in self:
|
||||
company.partner_id.state_id = company.state_id
|
||||
|
||||
def _inverse_country(self):
|
||||
for company in self:
|
||||
company.partner_id.country_id = company.country_id
|
||||
|
||||
@api.depends('partner_id.image_1920')
|
||||
def _compute_logo_web(self):
|
||||
for company in self:
|
||||
img = company.partner_id.image_1920
|
||||
company.logo_web = img and base64.b64encode(tools.image_process(base64.b64decode(img), size=(180, 0)))
|
||||
|
||||
@api.onchange('state_id')
|
||||
def _onchange_state(self):
|
||||
if self.state_id.country_id:
|
||||
self.country_id = self.state_id.country_id
|
||||
|
||||
@api.onchange('country_id')
|
||||
def _onchange_country_id(self):
|
||||
if self.country_id:
|
||||
self.currency_id = self.country_id.currency_id
|
||||
|
||||
@api.model
|
||||
def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None):
|
||||
context = dict(self.env.context)
|
||||
newself = self
|
||||
if context.pop('user_preference', None):
|
||||
# We browse as superuser. Otherwise, the user would be able to
|
||||
# select only the currently visible companies (according to rules,
|
||||
# which are probably to allow to see the child companies) even if
|
||||
# she belongs to some other companies.
|
||||
companies = self.env.user.company_ids
|
||||
args = (args or []) + [('id', 'in', companies.ids)]
|
||||
newself = newself.sudo()
|
||||
return super(Company, newself.with_context(context))._name_search(name=name, args=args, operator=operator, limit=limit, name_get_uid=name_get_uid)
|
||||
|
||||
@api.model
|
||||
@api.returns('self', lambda value: value.id)
|
||||
def _company_default_get(self, object=False, field=False):
|
||||
""" Returns the user's company
|
||||
- Deprecated
|
||||
"""
|
||||
_logger.warning("The method '_company_default_get' on res.company is deprecated and shouldn't be used anymore")
|
||||
return self.env.company
|
||||
|
||||
@api.depends('company_details')
|
||||
def _compute_empty_company_details(self):
|
||||
# In recent change when an html field is empty a <p> balise remains with a <br> in it,
|
||||
# but when company details is empty we want to put the info of the company
|
||||
for record in self:
|
||||
record.is_company_details_empty = not html2plaintext(record.company_details or '')
|
||||
|
||||
# deprecated, use clear_caches() instead
|
||||
def cache_restart(self):
|
||||
self.clear_caches()
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
# add default favicon
|
||||
for vals in vals_list:
|
||||
if not vals.get('favicon'):
|
||||
vals['favicon'] = self._get_default_favicon()
|
||||
|
||||
# create missing partners
|
||||
no_partner_vals_list = [
|
||||
vals
|
||||
for vals in vals_list
|
||||
if vals.get('name') and not vals.get('partner_id')
|
||||
]
|
||||
if no_partner_vals_list:
|
||||
partners = self.env['res.partner'].create([
|
||||
{
|
||||
'name': vals['name'],
|
||||
'is_company': True,
|
||||
'image_1920': vals.get('logo'),
|
||||
'email': vals.get('email'),
|
||||
'phone': vals.get('phone'),
|
||||
'website': vals.get('website'),
|
||||
'vat': vals.get('vat'),
|
||||
'country_id': vals.get('country_id'),
|
||||
}
|
||||
for vals in no_partner_vals_list
|
||||
])
|
||||
# compute stored fields, for example address dependent fields
|
||||
partners.flush_model()
|
||||
for vals, partner in zip(no_partner_vals_list, partners):
|
||||
vals['partner_id'] = partner.id
|
||||
|
||||
self.clear_caches()
|
||||
companies = super().create(vals_list)
|
||||
|
||||
# The write is made on the user to set it automatically in the multi company group.
|
||||
if companies:
|
||||
self.env.user.write({
|
||||
'company_ids': [Command.link(company.id) for company in companies],
|
||||
})
|
||||
|
||||
# Make sure that the selected currencies are enabled
|
||||
companies.currency_id.sudo().filtered(lambda c: not c.active).active = True
|
||||
|
||||
return companies
|
||||
|
||||
def write(self, values):
|
||||
self.clear_caches()
|
||||
# Make sure that the selected currency is enabled
|
||||
if values.get('currency_id'):
|
||||
currency = self.env['res.currency'].browse(values['currency_id'])
|
||||
if not currency.active:
|
||||
currency.write({'active': True})
|
||||
|
||||
res = super(Company, self).write(values)
|
||||
|
||||
# invalidate company cache to recompute address based on updated partner
|
||||
company_address_fields = self._get_company_address_field_names()
|
||||
company_address_fields_upd = set(company_address_fields) & set(values.keys())
|
||||
if company_address_fields_upd:
|
||||
self.invalidate_model(company_address_fields)
|
||||
return res
|
||||
|
||||
@api.constrains('active')
|
||||
def _check_active(self):
|
||||
for company in self:
|
||||
if not company.active:
|
||||
company_active_users = self.env['res.users'].search_count([
|
||||
('company_id', '=', company.id),
|
||||
('active', '=', True),
|
||||
])
|
||||
if company_active_users:
|
||||
# You cannot disable companies with active users
|
||||
raise ValidationError(_(
|
||||
'The company %(company_name)s cannot be archived because it is still used '
|
||||
'as the default company of %(active_users)s users.',
|
||||
company_name=company.name,
|
||||
active_users=company_active_users,
|
||||
))
|
||||
|
||||
@api.constrains('parent_id')
|
||||
def _check_parent_id(self):
|
||||
if not self._check_recursion():
|
||||
raise ValidationError(_('You cannot create recursive companies.'))
|
||||
|
||||
def open_company_edit_report(self):
|
||||
self.ensure_one()
|
||||
return self.env['res.config.settings'].open_company()
|
||||
|
||||
def write_company_and_print_report(self):
|
||||
context = self.env.context
|
||||
report_name = context.get('default_report_name')
|
||||
active_ids = context.get('active_ids')
|
||||
active_model = context.get('active_model')
|
||||
if report_name and active_ids and active_model:
|
||||
docids = self.env[active_model].browse(active_ids)
|
||||
return (self.env['ir.actions.report'].search([('report_name', '=', report_name)], limit=1)
|
||||
.report_action(docids))
|
||||
|
||||
@api.model
|
||||
def action_open_base_onboarding_company(self):
|
||||
""" Onboarding step for company basic information. """
|
||||
action = self.env["ir.actions.actions"]._for_xml_id("base.action_open_base_onboarding_company")
|
||||
action['res_id'] = self.env.company.id
|
||||
return action
|
||||
|
||||
def set_onboarding_step_done(self, step_name):
|
||||
if self[step_name] == 'not_done':
|
||||
self[step_name] = 'just_done'
|
||||
|
||||
def _get_and_update_onboarding_state(self, onboarding_state, steps_states):
|
||||
""" Needed to display onboarding animations only one time. """
|
||||
old_values = {}
|
||||
all_done = True
|
||||
for step_state in steps_states:
|
||||
old_values[step_state] = self[step_state]
|
||||
if self[step_state] == 'just_done':
|
||||
self[step_state] = 'done'
|
||||
all_done = all_done and self[step_state] == 'done'
|
||||
|
||||
if all_done:
|
||||
if self[onboarding_state] == 'not_done':
|
||||
# string `onboarding_state` instead of variable name is not an error
|
||||
old_values['onboarding_state'] = 'just_done'
|
||||
else:
|
||||
old_values['onboarding_state'] = 'done'
|
||||
self[onboarding_state] = 'done'
|
||||
return old_values
|
||||
|
||||
def action_save_onboarding_company_step(self):
|
||||
if bool(self.street):
|
||||
self.set_onboarding_step_done('base_onboarding_company_state')
|
||||
|
||||
@api.model
|
||||
def _get_main_company(self):
|
||||
try:
|
||||
main_company = self.sudo().env.ref('base.main_company')
|
||||
except ValueError:
|
||||
main_company = self.env['res.company'].sudo().search([], limit=1, order="id")
|
||||
|
||||
return main_company
|
||||
767
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_config.py
Normal file
767
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_config.py
Normal file
|
|
@ -0,0 +1,767 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ast import literal_eval
|
||||
from lxml import etree
|
||||
|
||||
from odoo import api, models, _
|
||||
from odoo.exceptions import AccessError, RedirectWarning, UserError
|
||||
from odoo.tools import ustr
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResConfigModuleInstallationMixin(object):
|
||||
__slots__ = ()
|
||||
|
||||
@api.model
|
||||
def _install_modules(self, modules):
|
||||
""" Install the requested modules.
|
||||
|
||||
:param modules: a recordset of ir.module.module records
|
||||
:return: the next action to execute
|
||||
"""
|
||||
result = None
|
||||
|
||||
to_install_modules = modules.filtered(lambda module: module.state == 'uninstalled')
|
||||
if to_install_modules:
|
||||
result = to_install_modules.button_immediate_install()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ResConfigConfigurable(models.TransientModel):
|
||||
''' Base classes for new-style configuration items
|
||||
|
||||
Configuration items should inherit from this class, implement
|
||||
the execute method (and optionally the cancel one) and have
|
||||
their view inherit from the related res_config_view_base view.
|
||||
'''
|
||||
_name = 'res.config'
|
||||
_description = 'Config'
|
||||
|
||||
def start(self):
|
||||
# pylint: disable=next-method-called
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
"""
|
||||
Reload the settings page
|
||||
"""
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'reload',
|
||||
}
|
||||
|
||||
def execute(self):
|
||||
""" Method called when the user clicks on the ``Next`` button.
|
||||
|
||||
Execute *must* be overloaded unless ``action_next`` is overloaded
|
||||
(which is something you generally don't need to do).
|
||||
|
||||
If ``execute`` returns an action dictionary, that action is executed
|
||||
rather than just going to the next configuration item.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
'Configuration items need to implement execute')
|
||||
|
||||
def cancel(self):
|
||||
""" Method called when the user click on the ``Skip`` button.
|
||||
|
||||
``cancel`` should be overloaded instead of ``action_skip``. As with
|
||||
``execute``, if it returns an action dictionary that action is
|
||||
executed in stead of the default (going to the next configuration item)
|
||||
|
||||
The default implementation is a NOOP.
|
||||
|
||||
``cancel`` is also called by the default implementation of
|
||||
``action_cancel``.
|
||||
"""
|
||||
pass
|
||||
|
||||
def action_next(self):
|
||||
""" Action handler for the ``next`` event.
|
||||
|
||||
Sets the status of the todo the event was sent from to
|
||||
``done``, calls ``execute`` and -- unless ``execute`` returned
|
||||
an action dictionary -- executes the action provided by calling
|
||||
``next``.
|
||||
"""
|
||||
# pylint: disable=next-method-called
|
||||
return self.execute() or self.next()
|
||||
|
||||
def action_skip(self):
|
||||
""" Action handler for the ``skip`` event.
|
||||
|
||||
Sets the status of the todo the event was sent from to
|
||||
``skip``, calls ``cancel`` and -- unless ``cancel`` returned
|
||||
an action dictionary -- executes the action provided by calling
|
||||
``next``.
|
||||
"""
|
||||
# pylint: disable=next-method-called
|
||||
return self.cancel() or self.next()
|
||||
|
||||
def action_cancel(self):
|
||||
""" Action handler for the ``cancel`` event. That event isn't
|
||||
generated by the res.config.view.base inheritable view, the
|
||||
inherited view has to overload one of the buttons (or add one
|
||||
more).
|
||||
|
||||
Sets the status of the todo the event was sent from to
|
||||
``cancel``, calls ``cancel`` and -- unless ``cancel`` returned
|
||||
an action dictionary -- executes the action provided by calling
|
||||
``next``.
|
||||
"""
|
||||
# pylint: disable=next-method-called
|
||||
return self.cancel() or self.next()
|
||||
|
||||
|
||||
class ResConfigInstaller(models.TransientModel, ResConfigModuleInstallationMixin):
|
||||
""" New-style configuration base specialized for addons selection
|
||||
and installation.
|
||||
|
||||
Basic usage
|
||||
-----------
|
||||
|
||||
Subclasses can simply define a number of boolean fields. The field names
|
||||
should be the names of the addons to install (when selected). Upon action
|
||||
execution, selected boolean fields (and those only) will be interpreted as
|
||||
addons to install, and batch-installed.
|
||||
|
||||
Additional addons
|
||||
-----------------
|
||||
|
||||
It is also possible to require the installation of an additional
|
||||
addon set when a specific preset of addons has been marked for
|
||||
installation (in the basic usage only, additionals can't depend on
|
||||
one another).
|
||||
|
||||
These additionals are defined through the ``_install_if``
|
||||
property. This property is a mapping of a collection of addons (by
|
||||
name) to a collection of addons (by name) [#]_, and if all the *key*
|
||||
addons are selected for installation, then the *value* ones will
|
||||
be selected as well. For example::
|
||||
|
||||
_install_if = {
|
||||
('sale','crm'): ['sale_crm'],
|
||||
}
|
||||
|
||||
This will install the ``sale_crm`` addon if and only if both the
|
||||
``sale`` and ``crm`` addons are selected for installation.
|
||||
|
||||
You can define as many additionals as you wish, and additionals
|
||||
can overlap in key and value. For instance::
|
||||
|
||||
_install_if = {
|
||||
('sale','crm'): ['sale_crm'],
|
||||
('sale','project'): ['sale_service'],
|
||||
}
|
||||
|
||||
will install both ``sale_crm`` and ``sale_service`` if all of
|
||||
``sale``, ``crm`` and ``project`` are selected for installation.
|
||||
|
||||
Hook methods
|
||||
------------
|
||||
|
||||
Subclasses might also need to express dependencies more complex
|
||||
than that provided by additionals. In this case, it's possible to
|
||||
define methods of the form ``_if_%(name)s`` where ``name`` is the
|
||||
name of a boolean field. If the field is selected, then the
|
||||
corresponding module will be marked for installation *and* the
|
||||
hook method will be executed.
|
||||
|
||||
Hook methods take the usual set of parameters (cr, uid, ids,
|
||||
context) and can return a collection of additional addons to
|
||||
install (if they return anything, otherwise they should not return
|
||||
anything, though returning any "falsy" value such as None or an
|
||||
empty collection will have the same effect).
|
||||
|
||||
Complete control
|
||||
----------------
|
||||
|
||||
The last hook is to simply overload the ``modules_to_install``
|
||||
method, which implements all the mechanisms above. This method
|
||||
takes the usual set of parameters (cr, uid, ids, context) and
|
||||
returns a ``set`` of addons to install (addons selected by the
|
||||
above methods minus addons from the *basic* set which are already
|
||||
installed) [#]_ so an overloader can simply manipulate the ``set``
|
||||
returned by ``ResConfigInstaller.modules_to_install`` to add or
|
||||
remove addons.
|
||||
|
||||
Skipping the installer
|
||||
----------------------
|
||||
|
||||
Unless it is removed from the view, installers have a *skip*
|
||||
button which invokes ``action_skip`` (and the ``cancel`` hook from
|
||||
``res.config``). Hooks and additionals *are not run* when skipping
|
||||
installation, even for already installed addons.
|
||||
|
||||
Again, setup your hooks accordingly.
|
||||
|
||||
.. [#] note that since a mapping key needs to be hashable, it's
|
||||
possible to use a tuple or a frozenset, but not a list or a
|
||||
regular set
|
||||
|
||||
.. [#] because the already-installed modules are only pruned at
|
||||
the very end of ``modules_to_install``, additionals and
|
||||
hooks depending on them *are guaranteed to execute*. Setup
|
||||
your hooks accordingly.
|
||||
"""
|
||||
_name = 'res.config.installer'
|
||||
_inherit = 'res.config'
|
||||
_description = 'Config Installer'
|
||||
|
||||
_install_if = {}
|
||||
|
||||
def already_installed(self):
|
||||
""" For each module, check if it's already installed and if it
|
||||
is return its name
|
||||
|
||||
:returns: a list of the already installed modules in this
|
||||
installer
|
||||
:rtype: [str]
|
||||
"""
|
||||
return [m.name for m in self._already_installed()]
|
||||
|
||||
def _already_installed(self):
|
||||
""" For each module (boolean fields in a res.config.installer),
|
||||
check if it's already installed (either 'to install', 'to upgrade'
|
||||
or 'installed') and if it is return the module's record
|
||||
|
||||
:returns: a list of all installed modules in this installer
|
||||
:rtype: recordset (collection of Record)
|
||||
"""
|
||||
selectable = [name for name, field in self._fields.items()
|
||||
if field.type == 'boolean']
|
||||
return self.env['ir.module.module'].search([('name', 'in', selectable),
|
||||
('state', 'in', ['to install', 'installed', 'to upgrade'])])
|
||||
|
||||
def modules_to_install(self):
|
||||
""" selects all modules to install:
|
||||
|
||||
* checked boolean fields
|
||||
* return values of hook methods. Hook methods are of the form
|
||||
``_if_%(addon_name)s``, and are called if the corresponding
|
||||
addon is marked for installation. They take the arguments
|
||||
cr, uid, ids and context, and return an iterable of addon
|
||||
names
|
||||
* additionals, additionals are setup through the ``_install_if``
|
||||
class variable. ``_install_if`` is a dict of {iterable:iterable}
|
||||
where key and value are iterables of addon names.
|
||||
|
||||
If all the addons in the key are selected for installation
|
||||
(warning: addons added through hooks don't count), then the
|
||||
addons in the value are added to the set of modules to install
|
||||
* not already installed
|
||||
"""
|
||||
base = set(module_name
|
||||
for installer in self.read()
|
||||
for module_name, to_install in installer.items()
|
||||
if self._fields[module_name].type == 'boolean' and to_install)
|
||||
|
||||
hooks_results = set()
|
||||
for module in base:
|
||||
hook = getattr(self, '_if_%s'% module, None)
|
||||
if hook:
|
||||
hooks_results.update(hook() or set())
|
||||
|
||||
additionals = set(module
|
||||
for requirements, consequences in self._install_if.items()
|
||||
if base.issuperset(requirements)
|
||||
for module in consequences)
|
||||
|
||||
return (base | hooks_results | additionals) - set(self.already_installed())
|
||||
|
||||
@api.model
|
||||
def default_get(self, fields_list):
|
||||
''' If an addon is already installed, check it by default
|
||||
'''
|
||||
defaults = super(ResConfigInstaller, self).default_get(fields_list)
|
||||
return dict(defaults, **dict.fromkeys(self.already_installed(), True))
|
||||
|
||||
@api.model
|
||||
def fields_get(self, allfields=None, attributes=None):
|
||||
""" If an addon is already installed, set it to readonly as
|
||||
res.config.installer doesn't handle uninstallations of already
|
||||
installed addons
|
||||
"""
|
||||
fields = super().fields_get(allfields=allfields, attributes=attributes)
|
||||
|
||||
for name in self.already_installed():
|
||||
if name not in fields:
|
||||
continue
|
||||
fields[name].update(
|
||||
readonly=True,
|
||||
help= ustr(fields[name].get('help', '')) +
|
||||
_('\n\nThis addon is already installed on your system'))
|
||||
return fields
|
||||
|
||||
def execute(self):
|
||||
to_install = list(self.modules_to_install())
|
||||
_logger.info('Selecting addons %s to install', to_install)
|
||||
|
||||
IrModule = self.env['ir.module.module']
|
||||
modules = IrModule.search([('name', 'in', to_install)])
|
||||
module_names = {module.name for module in modules}
|
||||
to_install_missing_names = [name for name in to_install if name not in module_names]
|
||||
|
||||
result = self._install_modules(modules)
|
||||
#FIXME: if result is not none, the corresponding todo will be skipped because it was just marked done
|
||||
if to_install_missing_names:
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'apps',
|
||||
'params': {'modules': to_install_missing_names},
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin):
|
||||
""" Base configuration wizard for application settings. It provides support for setting
|
||||
default values, assigning groups to employee users, and installing modules.
|
||||
To make such a 'settings' wizard, define a model like::
|
||||
|
||||
class MyConfigWizard(models.TransientModel):
|
||||
_name = 'my.settings'
|
||||
_inherit = 'res.config.settings'
|
||||
|
||||
default_foo = fields.type(..., default_model='my.model'),
|
||||
group_bar = fields.Boolean(..., group='base.group_user', implied_group='my.group'),
|
||||
module_baz = fields.Boolean(...),
|
||||
config_qux = fields.Char(..., config_parameter='my.parameter')
|
||||
other_field = fields.type(...),
|
||||
|
||||
The method ``execute`` provides some support based on a naming convention:
|
||||
|
||||
* For a field like 'default_XXX', ``execute`` sets the (global) default value of
|
||||
the field 'XXX' in the model named by ``default_model`` to the field's value.
|
||||
|
||||
* For a boolean field like 'group_XXX', ``execute`` adds/removes 'implied_group'
|
||||
to/from the implied groups of 'group', depending on the field's value.
|
||||
By default 'group' is the group Employee. Groups are given by their xml id.
|
||||
The attribute 'group' may contain several xml ids, separated by commas.
|
||||
|
||||
* For a selection field like 'group_XXX' composed of 2 string values ('0' and '1'),
|
||||
``execute`` adds/removes 'implied_group' to/from the implied groups of 'group',
|
||||
depending on the field's value.
|
||||
By default 'group' is the group Employee. Groups are given by their xml id.
|
||||
The attribute 'group' may contain several xml ids, separated by commas.
|
||||
|
||||
* For a boolean field like 'module_XXX', ``execute`` triggers the immediate
|
||||
installation of the module named 'XXX' if the field has value ``True``.
|
||||
|
||||
* For a selection field like 'module_XXX' composed of 2 string values ('0' and '1'),
|
||||
``execute`` triggers the immediate installation of the module named 'XXX'
|
||||
if the field has the value ``'1'``.
|
||||
|
||||
* For a field with no specific prefix BUT an attribute 'config_parameter',
|
||||
``execute``` will save its value in an ir.config.parameter (global setting for the
|
||||
database).
|
||||
|
||||
* For the other fields, the method ``execute`` invokes `set_values`.
|
||||
Override it to implement the effect of those fields.
|
||||
|
||||
The method ``default_get`` retrieves values that reflect the current status of the
|
||||
fields like 'default_XXX', 'group_XXX', 'module_XXX' and config_XXX.
|
||||
It also invokes all methods with a name that starts with 'get_default_';
|
||||
such methods can be defined to provide current values for other fields.
|
||||
"""
|
||||
_name = 'res.config.settings'
|
||||
_description = 'Config Settings'
|
||||
|
||||
def _valid_field_parameter(self, field, name):
|
||||
return (
|
||||
name in ('default_model', 'config_parameter')
|
||||
or field.type in ('boolean', 'selection') and name in ('group', 'implied_group')
|
||||
or super()._valid_field_parameter(field, name)
|
||||
)
|
||||
|
||||
def copy(self, default=None):
|
||||
raise UserError(_("Cannot duplicate configuration!"))
|
||||
|
||||
def onchange_module(self, field_value, module_name):
|
||||
module_sudo = self.env['ir.module.module']._get(module_name[7:])
|
||||
if not int(field_value) and module_sudo.state in ('to install', 'installed', 'to upgrade'):
|
||||
deps = module_sudo.downstream_dependencies()
|
||||
dep_names = (deps | module_sudo).mapped('shortdesc')
|
||||
message = '\n'.join(dep_names)
|
||||
return {
|
||||
'warning': {
|
||||
'title': _('Warning!'),
|
||||
'message': _('Disabling this option will also uninstall the following modules \n%s', message),
|
||||
}
|
||||
}
|
||||
return {}
|
||||
|
||||
def _register_hook(self):
|
||||
""" Add an onchange method for each module field. """
|
||||
def make_method(name):
|
||||
return lambda self: self.onchange_module(self[name], name)
|
||||
|
||||
for name in self._fields:
|
||||
if name.startswith('module_'):
|
||||
method = make_method(name)
|
||||
self._onchange_methods[name].append(method)
|
||||
|
||||
@api.model
|
||||
def _get_classified_fields(self, fnames=None):
|
||||
""" return a dictionary with the fields classified by category::
|
||||
|
||||
{ 'default': [('default_foo', 'model', 'foo'), ...],
|
||||
'group': [('group_bar', [browse_group], browse_implied_group), ...],
|
||||
'module': [('module_baz', browse_module), ...],
|
||||
'config': [('config_qux', 'my.parameter'), ...],
|
||||
'other': ['other_field', ...],
|
||||
}
|
||||
"""
|
||||
IrModule = self.env['ir.module.module']
|
||||
IrModelData = self.env['ir.model.data']
|
||||
Groups = self.env['res.groups']
|
||||
|
||||
def ref(xml_id):
|
||||
res_model, res_id = IrModelData._xmlid_to_res_model_res_id(xml_id)
|
||||
return self.env[res_model].browse(res_id)
|
||||
|
||||
if fnames is None:
|
||||
fnames = self._fields.keys()
|
||||
|
||||
defaults, groups, configs, others = [], [], [], []
|
||||
modules = IrModule
|
||||
for name in fnames:
|
||||
field = self._fields[name]
|
||||
if name.startswith('default_'):
|
||||
if not hasattr(field, 'default_model'):
|
||||
raise Exception("Field %s without attribute 'default_model'" % field)
|
||||
defaults.append((name, field.default_model, name[8:]))
|
||||
elif name.startswith('group_'):
|
||||
if field.type not in ('boolean', 'selection'):
|
||||
raise Exception("Field %s must have type 'boolean' or 'selection'" % field)
|
||||
if not hasattr(field, 'implied_group'):
|
||||
raise Exception("Field %s without attribute 'implied_group'" % field)
|
||||
field_group_xmlids = getattr(field, 'group', 'base.group_user').split(',')
|
||||
field_groups = Groups.concat(*(ref(it) for it in field_group_xmlids))
|
||||
groups.append((name, field_groups, ref(field.implied_group)))
|
||||
elif name.startswith('module_'):
|
||||
if field.type not in ('boolean', 'selection'):
|
||||
raise Exception("Field %s must have type 'boolean' or 'selection'" % field)
|
||||
modules += IrModule._get(name[7:])
|
||||
elif hasattr(field, 'config_parameter') and field.config_parameter:
|
||||
if field.type not in ('boolean', 'integer', 'float', 'char', 'selection', 'many2one', 'datetime'):
|
||||
raise Exception("Field %s must have type 'boolean', 'integer', 'float', 'char', 'selection', 'many2one' or 'datetime'" % field)
|
||||
configs.append((name, field.config_parameter))
|
||||
else:
|
||||
others.append(name)
|
||||
|
||||
return {'default': defaults, 'group': groups, 'module': modules, 'config': configs, 'other': others}
|
||||
|
||||
def get_values(self):
|
||||
"""
|
||||
Return values for the fields other that `default`, `group` and `module`
|
||||
"""
|
||||
return {}
|
||||
|
||||
@api.model
|
||||
def default_get(self, fields):
|
||||
IrDefault = self.env['ir.default']
|
||||
IrConfigParameter = self.env['ir.config_parameter'].sudo()
|
||||
classified = self._get_classified_fields(fields)
|
||||
|
||||
res = super(ResConfigSettings, self).default_get(fields)
|
||||
|
||||
# defaults: take the corresponding default value they set
|
||||
for name, model, field in classified['default']:
|
||||
value = IrDefault.get(model, field)
|
||||
if value is not None:
|
||||
res[name] = value
|
||||
|
||||
# groups: which groups are implied by the group Employee
|
||||
for name, groups, implied_group in classified['group']:
|
||||
res[name] = all(implied_group in group.implied_ids for group in groups)
|
||||
if self._fields[name].type == 'selection':
|
||||
res[name] = str(int(res[name])) # True, False -> '1', '0'
|
||||
|
||||
# modules: which modules are installed/to install
|
||||
for module in classified['module']:
|
||||
res[f'module_{module.name}'] = module.state in ('installed', 'to install', 'to upgrade')
|
||||
|
||||
# config: get & convert stored ir.config_parameter (or default)
|
||||
WARNING_MESSAGE = "Error when converting value %r of field %s for ir.config.parameter %r"
|
||||
for name, icp in classified['config']:
|
||||
field = self._fields[name]
|
||||
value = IrConfigParameter.get_param(icp, field.default(self) if field.default else False)
|
||||
if value is not False:
|
||||
if field.type == 'many2one':
|
||||
try:
|
||||
# Special case when value is the id of a deleted record, we do not want to
|
||||
# block the settings screen
|
||||
value = self.env[field.comodel_name].browse(int(value)).exists().id
|
||||
except (ValueError, TypeError):
|
||||
_logger.warning(WARNING_MESSAGE, value, field, icp)
|
||||
value = False
|
||||
elif field.type == 'integer':
|
||||
try:
|
||||
value = int(value)
|
||||
except (ValueError, TypeError):
|
||||
_logger.warning(WARNING_MESSAGE, value, field, icp)
|
||||
value = 0
|
||||
elif field.type == 'float':
|
||||
try:
|
||||
value = float(value)
|
||||
except (ValueError, TypeError):
|
||||
_logger.warning(WARNING_MESSAGE, value, field, icp)
|
||||
value = 0.0
|
||||
elif field.type == 'boolean':
|
||||
value = bool(value)
|
||||
res[name] = value
|
||||
|
||||
res.update(self.get_values())
|
||||
|
||||
return res
|
||||
|
||||
def set_values(self):
|
||||
"""
|
||||
Set values for the fields other that `default`, `group` and `module`
|
||||
"""
|
||||
self = self.with_context(active_test=False)
|
||||
classified = self._get_classified_fields()
|
||||
current_settings = self.default_get(list(self.fields_get()))
|
||||
|
||||
# default values fields
|
||||
IrDefault = self.env['ir.default'].sudo()
|
||||
for name, model, field in classified['default']:
|
||||
if isinstance(self[name], models.BaseModel):
|
||||
if self._fields[name].type == 'many2one':
|
||||
value = self[name].id
|
||||
else:
|
||||
value = self[name].ids
|
||||
else:
|
||||
value = self[name]
|
||||
if name not in current_settings or value != current_settings[name]:
|
||||
IrDefault.set(model, field, value)
|
||||
|
||||
# group fields: modify group / implied groups
|
||||
with self.env.norecompute():
|
||||
for name, groups, implied_group in sorted(classified['group'], key=lambda k: self[k[0]]):
|
||||
groups = groups.sudo()
|
||||
implied_group = implied_group.sudo()
|
||||
if self[name] == current_settings[name]:
|
||||
continue
|
||||
if int(self[name]):
|
||||
groups._apply_group(implied_group)
|
||||
else:
|
||||
groups._remove_group(implied_group)
|
||||
|
||||
# config fields: store ir.config_parameters
|
||||
IrConfigParameter = self.env['ir.config_parameter'].sudo()
|
||||
for name, icp in classified['config']:
|
||||
field = self._fields[name]
|
||||
value = self[name]
|
||||
current_value = IrConfigParameter.get_param(icp)
|
||||
|
||||
if field.type == 'char':
|
||||
# storing developer keys as ir.config_parameter may lead to nasty
|
||||
# bugs when users leave spaces around them
|
||||
value = (value or "").strip() or False
|
||||
elif field.type in ('integer', 'float'):
|
||||
value = repr(value) if value else False
|
||||
elif field.type == 'many2one':
|
||||
# value is a (possibly empty) recordset
|
||||
value = value.id
|
||||
|
||||
if current_value == str(value) or current_value == value:
|
||||
continue
|
||||
IrConfigParameter.set_param(icp, value)
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
Called when settings are saved.
|
||||
|
||||
This method will call `set_values` and will install/uninstall any modules defined by
|
||||
`module_` Boolean fields and then trigger a web client reload.
|
||||
|
||||
.. warning::
|
||||
|
||||
This method **SHOULD NOT** be overridden, in most cases what you want to override is
|
||||
`~set_values()` since `~execute()` does little more than simply call `~set_values()`.
|
||||
|
||||
The part that installs/uninstalls modules **MUST ALWAYS** be at the end of the
|
||||
transaction, otherwise there's a big risk of registry <-> database desynchronisation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not self.env.is_admin():
|
||||
raise AccessError(_("Only administrators can change the settings"))
|
||||
|
||||
self = self.with_context(active_test=False)
|
||||
classified = self._get_classified_fields()
|
||||
|
||||
self.set_values()
|
||||
|
||||
# module fields: install/uninstall the selected modules
|
||||
to_install = classified['module'].filtered(
|
||||
lambda m: self[f'module_{m.name}'] and m.state != 'installed')
|
||||
to_uninstall = classified['module'].filtered(
|
||||
lambda m: not self[f'module_{m.name}'] and m.state in ('installed', 'to upgrade'))
|
||||
|
||||
if to_install or to_uninstall:
|
||||
self.env.flush_all()
|
||||
|
||||
if to_uninstall:
|
||||
to_uninstall.button_immediate_uninstall()
|
||||
|
||||
installation_status = self._install_modules(to_install)
|
||||
|
||||
if installation_status or to_uninstall:
|
||||
# After the uninstall/install calls, the registry and environments
|
||||
# are no longer valid. So we reset the environment.
|
||||
self.env.reset()
|
||||
self = self.env()[self._name]
|
||||
|
||||
# pylint: disable=next-method-called
|
||||
config = self.env['res.config'].next() or {}
|
||||
if config.get('type') not in ('ir.actions.act_window_close',):
|
||||
return config
|
||||
|
||||
# force client-side reload (update user menu and current view)
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'reload',
|
||||
}
|
||||
|
||||
def cancel(self):
|
||||
# ignore the current record, and send the action to reopen the view
|
||||
actions = self.env['ir.actions.act_window'].search([('res_model', '=', self._name)], limit=1)
|
||||
if actions:
|
||||
return actions.read()[0]
|
||||
return {}
|
||||
|
||||
def name_get(self):
|
||||
""" Override name_get method to return an appropriate configuration wizard
|
||||
name, and not the generated name."""
|
||||
action = self.env['ir.actions.act_window'].search([('res_model', '=', self._name)], limit=1)
|
||||
name = action.name or self._name
|
||||
return [(record.id, name) for record in self]
|
||||
|
||||
@api.model
|
||||
def get_option_path(self, menu_xml_id):
|
||||
"""
|
||||
Fetch the path to a specified configuration view and the action id to access it.
|
||||
|
||||
:param string menu_xml_id: the xml id of the menuitem where the view is located,
|
||||
structured as follows: module_name.menuitem_xml_id (e.g.: "sales_team.menu_sale_config")
|
||||
:return tuple:
|
||||
- t[0]: string: full path to the menuitem (e.g.: "Settings/Configuration/Sales")
|
||||
- t[1]: int or long: id of the menuitem's action
|
||||
"""
|
||||
ir_ui_menu = self.env.ref(menu_xml_id)
|
||||
return (ir_ui_menu.complete_name, ir_ui_menu.action.id)
|
||||
|
||||
@api.model
|
||||
def get_option_name(self, full_field_name):
|
||||
"""
|
||||
Fetch the human readable name of a specified configuration option.
|
||||
|
||||
:param string full_field_name: the full name of the field, structured as follows:
|
||||
model_name.field_name (e.g.: "sale.config.settings.fetchmail_lead")
|
||||
:return string: human readable name of the field (e.g.: "Create leads from incoming mails")
|
||||
"""
|
||||
model_name, field_name = full_field_name.rsplit('.', 1)
|
||||
return self.env[model_name].fields_get([field_name])[field_name]['string']
|
||||
|
||||
@api.model
|
||||
def get_config_warning(self, msg):
|
||||
"""
|
||||
Helper: return a Warning exception with the given message where the %(field:xxx)s
|
||||
and/or %(menu:yyy)s are replaced by the human readable field's name and/or menuitem's
|
||||
full path.
|
||||
|
||||
Usage:
|
||||
------
|
||||
Just include in your error message %(field:model_name.field_name)s to obtain the human
|
||||
readable field's name, and/or %(menu:module_name.menuitem_xml_id)s to obtain the menuitem's
|
||||
full path.
|
||||
|
||||
Example of use:
|
||||
---------------
|
||||
from odoo.addons.base.models.res_config import get_warning_config
|
||||
raise get_warning_config(cr, _("Error: this action is prohibited. You should check the field %(field:sale.config.settings.fetchmail_lead)s in %(menu:sales_team.menu_sale_config)s."), context=context)
|
||||
|
||||
This will return an exception containing the following message:
|
||||
Error: this action is prohibited. You should check the field Create leads from incoming mails in Settings/Configuration/Sales.
|
||||
|
||||
What if there is another substitution in the message already?
|
||||
-------------------------------------------------------------
|
||||
You could have a situation where the error message you want to upgrade already contains a substitution. Example:
|
||||
Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\\Journals\\Journals.
|
||||
What you want to do here is simply to replace the path by %menu:account.menu_account_config)s, and leave the rest alone.
|
||||
In order to do that, you can use the double percent (%%) to escape your new substitution, like so:
|
||||
Cannot find any account journal of %s type for this company.\n\nYou can create one in the %%(menu:account.menu_account_config)s.
|
||||
"""
|
||||
self = self.sudo()
|
||||
|
||||
# Process the message
|
||||
# 1/ find the menu and/or field references, put them in a list
|
||||
regex_path = r'%\(((?:menu|field):[a-z_\.]*)\)s'
|
||||
references = re.findall(regex_path, msg, flags=re.I)
|
||||
|
||||
# 2/ fetch the menu and/or field replacement values (full path and
|
||||
# human readable field's name) and the action_id if any
|
||||
values = {}
|
||||
action_id = None
|
||||
for item in references:
|
||||
ref_type, ref = item.split(':')
|
||||
if ref_type == 'menu':
|
||||
values[item], action_id = self.get_option_path(ref)
|
||||
elif ref_type == 'field':
|
||||
values[item] = self.get_option_name(ref)
|
||||
|
||||
# 3/ substitute and return the result
|
||||
if (action_id):
|
||||
return RedirectWarning(msg % values, action_id, _('Go to the configuration panel'))
|
||||
return UserError(msg % values)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
# Optimisation: saving a res.config.settings even without changing any
|
||||
# values will trigger the write of all related values. This in turn may
|
||||
# trigger chain of further recomputation. To avoid it, delete values
|
||||
# that were not changed.
|
||||
for vals in vals_list:
|
||||
for field in self._fields.values():
|
||||
if not (field.name in vals and field.related and not field.readonly):
|
||||
continue
|
||||
# we write on a related field like
|
||||
# qr_code = fields.Boolean(related='company_id.qr_code', readonly=False)
|
||||
fname0, *fnames = field.related.split(".")
|
||||
if fname0 not in vals:
|
||||
continue
|
||||
|
||||
# determine the current value
|
||||
field0 = self._fields[fname0]
|
||||
old_value = field0.convert_to_record(
|
||||
field0.convert_to_cache(vals[fname0], self), self)
|
||||
for fname in fnames:
|
||||
old_value = next(iter(old_value), old_value)[fname]
|
||||
|
||||
# determine the new value
|
||||
new_value = field.convert_to_record(
|
||||
field.convert_to_cache(vals[field.name], self), self)
|
||||
|
||||
# drop if the value is the same
|
||||
if old_value == new_value:
|
||||
vals.pop(field.name)
|
||||
|
||||
return super().create(vals_list)
|
||||
|
||||
def action_open_template_user(self):
|
||||
action = self.env["ir.actions.actions"]._for_xml_id("base.action_res_users")
|
||||
template_user_id = literal_eval(self.env['ir.config_parameter'].sudo().get_param('base.template_portal_user_id', 'False'))
|
||||
template_user = self.env['res.users'].browse(template_user_id)
|
||||
if not template_user.exists():
|
||||
raise UserError(_('Invalid template user. It seems it has been deleted.'))
|
||||
action['res_id'] = template_user_id
|
||||
action['views'] = [[self.env.ref('base.view_users_form').id, 'form']]
|
||||
return action
|
||||
|
|
@ -0,0 +1,223 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
import logging
|
||||
from odoo import api, fields, models, tools
|
||||
from odoo.osv import expression
|
||||
from odoo.exceptions import UserError
|
||||
from psycopg2 import IntegrityError
|
||||
from odoo.tools.translate import _
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FLAG_MAPPING = {
|
||||
"GF": "fr",
|
||||
"BV": "no",
|
||||
"BQ": "nl",
|
||||
"GP": "fr",
|
||||
"HM": "au",
|
||||
"YT": "fr",
|
||||
"RE": "fr",
|
||||
"MF": "fr",
|
||||
"UM": "us",
|
||||
}
|
||||
|
||||
NO_FLAG_COUNTRIES = [
|
||||
"AQ", #Antarctica
|
||||
"SJ", #Svalbard + Jan Mayen : separate jurisdictions : no dedicated flag
|
||||
]
|
||||
|
||||
|
||||
class Country(models.Model):
|
||||
_name = 'res.country'
|
||||
_description = 'Country'
|
||||
_order = 'name'
|
||||
|
||||
name = fields.Char(
|
||||
string='Country Name', required=True, translate=True)
|
||||
code = fields.Char(
|
||||
string='Country Code', size=2,
|
||||
help='The ISO country code in two chars. \nYou can use this field for quick search.')
|
||||
address_format = fields.Text(string="Layout in Reports",
|
||||
help="Display format to use for addresses belonging to this country.\n\n"
|
||||
"You can use python-style string pattern with all the fields of the address "
|
||||
"(for example, use '%(street)s' to display the field 'street') plus"
|
||||
"\n%(state_name)s: the name of the state"
|
||||
"\n%(state_code)s: the code of the state"
|
||||
"\n%(country_name)s: the name of the country"
|
||||
"\n%(country_code)s: the code of the country",
|
||||
default='%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s')
|
||||
address_view_id = fields.Many2one(
|
||||
comodel_name='ir.ui.view', string="Input View",
|
||||
domain=[('model', '=', 'res.partner'), ('type', '=', 'form')],
|
||||
help="Use this field if you want to replace the usual way to encode a complete address. "
|
||||
"Note that the address_format field is used to modify the way to display addresses "
|
||||
"(in reports for example), while this field is used to modify the input form for "
|
||||
"addresses.")
|
||||
currency_id = fields.Many2one('res.currency', string='Currency')
|
||||
image_url = fields.Char(
|
||||
compute="_compute_image_url", string="Flag",
|
||||
help="Url of static flag image",
|
||||
)
|
||||
phone_code = fields.Integer(string='Country Calling Code')
|
||||
country_group_ids = fields.Many2many('res.country.group', 'res_country_res_country_group_rel',
|
||||
'res_country_id', 'res_country_group_id', string='Country Groups')
|
||||
state_ids = fields.One2many('res.country.state', 'country_id', string='States')
|
||||
name_position = fields.Selection([
|
||||
('before', 'Before Address'),
|
||||
('after', 'After Address'),
|
||||
], string="Customer Name Position", default="before",
|
||||
help="Determines where the customer/company name should be placed, i.e. after or before the address.")
|
||||
vat_label = fields.Char(string='Vat Label', translate=True, prefetch=True, help="Use this field if you want to change vat label.")
|
||||
|
||||
state_required = fields.Boolean(default=False)
|
||||
zip_required = fields.Boolean(default=True)
|
||||
|
||||
_sql_constraints = [
|
||||
('name_uniq', 'unique (name)',
|
||||
'The name of the country must be unique !'),
|
||||
('code_uniq', 'unique (code)',
|
||||
'The code of the country must be unique !')
|
||||
]
|
||||
|
||||
def _name_search(self, name='', args=None, operator='ilike', limit=100, name_get_uid=None):
|
||||
if args is None:
|
||||
args = []
|
||||
|
||||
ids = []
|
||||
if len(name) == 2:
|
||||
ids = list(self._search([('code', 'ilike', name)] + args, limit=limit))
|
||||
|
||||
search_domain = [('name', operator, name)]
|
||||
if ids:
|
||||
search_domain.append(('id', 'not in', ids))
|
||||
ids += list(self._search(search_domain + args, limit=limit))
|
||||
|
||||
return ids
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('code')
|
||||
def _phone_code_for(self, code):
|
||||
return self.search([('code', '=', code)]).phone_code
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if vals.get('code'):
|
||||
vals['code'] = vals['code'].upper()
|
||||
return super(Country, self).create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
if vals.get('code'):
|
||||
vals['code'] = vals['code'].upper()
|
||||
res = super().write(vals)
|
||||
if ('code' in vals or 'phone_code' in vals):
|
||||
# Intentionally simplified by not clearing the cache in create and unlink.
|
||||
self.clear_caches()
|
||||
if 'address_view_id' in vals:
|
||||
# Changing the address view of the company must invalidate the view cached for res.partner
|
||||
# because of _view_get_address
|
||||
self.env['res.partner'].clear_caches()
|
||||
return res
|
||||
|
||||
def get_address_fields(self):
|
||||
self.ensure_one()
|
||||
return re.findall(r'\((.+?)\)', self.address_format)
|
||||
|
||||
@api.depends('code')
|
||||
def _compute_image_url(self):
|
||||
for country in self:
|
||||
if not country.code or country.code in NO_FLAG_COUNTRIES:
|
||||
country.image_url = False
|
||||
else:
|
||||
code = FLAG_MAPPING.get(country.code, country.code.lower())
|
||||
country.image_url = "/base/static/img/country_flags/%s.png" % code
|
||||
|
||||
@api.constrains('address_format')
|
||||
def _check_address_format(self):
|
||||
for record in self:
|
||||
if record.address_format:
|
||||
address_fields = self.env['res.partner']._formatting_address_fields() + ['state_code', 'state_name', 'country_code', 'country_name', 'company_name']
|
||||
try:
|
||||
record.address_format % {i: 1 for i in address_fields}
|
||||
except (ValueError, KeyError):
|
||||
raise UserError(_('The layout contains an invalid format key'))
|
||||
|
||||
@api.constrains('code')
|
||||
def _check_country_code(self):
|
||||
for record in self:
|
||||
if not record.code:
|
||||
raise UserError(_('Country code cannot be empty'))
|
||||
|
||||
class CountryGroup(models.Model):
|
||||
_description = "Country Group"
|
||||
_name = 'res.country.group'
|
||||
|
||||
name = fields.Char(required=True, translate=True)
|
||||
country_ids = fields.Many2many('res.country', 'res_country_res_country_group_rel',
|
||||
'res_country_group_id', 'res_country_id', string='Countries')
|
||||
|
||||
|
||||
class CountryState(models.Model):
|
||||
_description = "Country state"
|
||||
_name = 'res.country.state'
|
||||
_order = 'code'
|
||||
|
||||
country_id = fields.Many2one('res.country', string='Country', required=True)
|
||||
name = fields.Char(string='State Name', required=True,
|
||||
help='Administrative divisions of a country. E.g. Fed. State, Departement, Canton')
|
||||
code = fields.Char(string='State Code', help='The state code.', required=True)
|
||||
|
||||
_sql_constraints = [
|
||||
('name_code_uniq', 'unique(country_id, code)', 'The code of the state must be unique by country !')
|
||||
]
|
||||
|
||||
@api.model
|
||||
def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None):
|
||||
args = args or []
|
||||
if self.env.context.get('country_id'):
|
||||
args = expression.AND([args, [('country_id', '=', self.env.context.get('country_id'))]])
|
||||
|
||||
if operator == 'ilike' and not (name or '').strip():
|
||||
first_domain = []
|
||||
domain = []
|
||||
else:
|
||||
first_domain = [('code', '=ilike', name)]
|
||||
domain = [('name', operator, name)]
|
||||
|
||||
fallback_domain = None
|
||||
if name and operator in ['ilike', '=']:
|
||||
fallback_domain = self._get_name_search_domain(name, operator)
|
||||
|
||||
if name and operator in ['in', 'any']:
|
||||
fallback_domain = expression.OR([self._get_name_search_domain(n, '=') for n in name])
|
||||
|
||||
first_state_ids = self._search(expression.AND([first_domain, args]), limit=limit, access_rights_uid=name_get_uid) if first_domain else []
|
||||
return list(first_state_ids) + [
|
||||
state_id
|
||||
for state_id in self._search(expression.AND([domain, args]),
|
||||
limit=limit, access_rights_uid=name_get_uid)
|
||||
if state_id not in first_state_ids
|
||||
] or (
|
||||
list(self._search(expression.AND([fallback_domain, args]), limit=limit))
|
||||
if fallback_domain
|
||||
else []
|
||||
)
|
||||
|
||||
def _get_name_search_domain(self, name, operator):
|
||||
m = re.fullmatch(r"(?P<name>.+)\((?P<country>.+)\)", name)
|
||||
if m:
|
||||
return [
|
||||
('name', operator, m['name'].strip()),
|
||||
'|', ('country_id.name', 'ilike', m['country'].strip()),
|
||||
('country_id.code', '=', m['country'].strip()),
|
||||
]
|
||||
return None
|
||||
|
||||
|
||||
def name_get(self):
|
||||
result = []
|
||||
for record in self:
|
||||
result.append((record.id, "{} ({})".format(record.name, record.country_id.code)))
|
||||
return result
|
||||
|
|
@ -0,0 +1,495 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
import math
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import parse_date
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from num2words import num2words
|
||||
except ImportError:
|
||||
_logger.warning("The num2words python library is not installed, amount-to-text features won't be fully available.")
|
||||
num2words = None
|
||||
|
||||
|
||||
class Currency(models.Model):
|
||||
_name = "res.currency"
|
||||
_description = "Currency"
|
||||
_rec_names_search = ['name', 'full_name']
|
||||
_order = 'active desc, name'
|
||||
|
||||
# Note: 'code' column was removed as of v6.0, the 'name' should now hold the ISO code.
|
||||
name = fields.Char(string='Currency', size=3, required=True, help="Currency Code (ISO 4217)")
|
||||
full_name = fields.Char(string='Name')
|
||||
symbol = fields.Char(help="Currency sign, to be used when printing amounts.", required=True)
|
||||
rate = fields.Float(compute='_compute_current_rate', string='Current Rate', digits=0,
|
||||
help='The rate of the currency to the currency of rate 1.')
|
||||
inverse_rate = fields.Float(compute='_compute_current_rate', digits=0, readonly=True,
|
||||
help='The currency of rate 1 to the rate of the currency.')
|
||||
rate_string = fields.Char(compute='_compute_current_rate')
|
||||
rate_ids = fields.One2many('res.currency.rate', 'currency_id', string='Rates')
|
||||
rounding = fields.Float(string='Rounding Factor', digits=(12, 6), default=0.01,
|
||||
help='Amounts in this currency are rounded off to the nearest multiple of the rounding factor.')
|
||||
decimal_places = fields.Integer(compute='_compute_decimal_places', store=True,
|
||||
help='Decimal places taken into account for operations on amounts in this currency. It is determined by the rounding factor.')
|
||||
active = fields.Boolean(default=True)
|
||||
position = fields.Selection([('after', 'After Amount'), ('before', 'Before Amount')], default='after',
|
||||
string='Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.")
|
||||
date = fields.Date(compute='_compute_date')
|
||||
currency_unit_label = fields.Char(string="Currency Unit")
|
||||
currency_subunit_label = fields.Char(string="Currency Subunit")
|
||||
is_current_company_currency = fields.Boolean(compute='_compute_is_current_company_currency')
|
||||
|
||||
_sql_constraints = [
|
||||
('unique_name', 'unique (name)', 'The currency code must be unique!'),
|
||||
('rounding_gt_zero', 'CHECK (rounding>0)', 'The rounding factor must be greater than 0!')
|
||||
]
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super().create(vals_list)
|
||||
self._toggle_group_multi_currency()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
res = super().unlink()
|
||||
self._toggle_group_multi_currency()
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
res = super().write(vals)
|
||||
if 'active' not in vals:
|
||||
return res
|
||||
self._toggle_group_multi_currency()
|
||||
return res
|
||||
|
||||
@api.model
|
||||
def _toggle_group_multi_currency(self):
|
||||
"""
|
||||
Automatically activate group_multi_currency if there is more than 1 active currency; deactivate it otherwise
|
||||
"""
|
||||
active_currency_count = self.search_count([('active', '=', True)])
|
||||
if active_currency_count > 1:
|
||||
self._activate_group_multi_currency()
|
||||
elif active_currency_count <= 1:
|
||||
self._deactivate_group_multi_currency()
|
||||
|
||||
@api.model
|
||||
def _activate_group_multi_currency(self):
|
||||
group_user = self.env.ref('base.group_user', raise_if_not_found=False)
|
||||
group_mc = self.env.ref('base.group_multi_currency', raise_if_not_found=False)
|
||||
if group_user and group_mc:
|
||||
group_user.sudo()._apply_group(group_mc)
|
||||
|
||||
@api.model
|
||||
def _deactivate_group_multi_currency(self):
|
||||
group_user = self.env.ref('base.group_user', raise_if_not_found=False)
|
||||
group_mc = self.env.ref('base.group_multi_currency', raise_if_not_found=False)
|
||||
if group_user and group_mc:
|
||||
group_user.sudo()._remove_group(group_mc.sudo())
|
||||
|
||||
@api.constrains('active')
|
||||
def _check_company_currency_stays_active(self):
|
||||
if self._context.get('install_mode') or self._context.get('force_deactivate'):
|
||||
# install_mode : At install, when this check is run, the "active" field of a currency added to a company will
|
||||
# still be evaluated as False, despite it's automatically set at True when added to the company.
|
||||
# force_deactivate : Allows deactivation of a currency in tests to enable non multi_currency behaviors
|
||||
return
|
||||
|
||||
currencies = self.filtered(lambda c: not c.active)
|
||||
if self.env['res.company'].search([('currency_id', 'in', currencies.ids)]):
|
||||
raise UserError(_("This currency is set on a company and therefore cannot be deactivated."))
|
||||
|
||||
def _get_rates(self, company, date):
|
||||
if not self.ids:
|
||||
return {}
|
||||
self.env['res.currency.rate'].flush_model(['rate', 'currency_id', 'company_id', 'name'])
|
||||
query = """
|
||||
SELECT c.id,
|
||||
COALESCE(
|
||||
( -- take the first rate before the given date
|
||||
SELECT r.rate
|
||||
FROM res_currency_rate r
|
||||
WHERE r.currency_id = c.id
|
||||
AND r.name <= %(date)s
|
||||
AND (r.company_id IS NULL OR r.company_id = %(company_id)s)
|
||||
ORDER BY r.company_id, r.name DESC
|
||||
LIMIT 1
|
||||
),
|
||||
( -- if no rate is found, take the rate for the very first date
|
||||
SELECT r.rate
|
||||
FROM res_currency_rate r
|
||||
WHERE r.currency_id = c.id
|
||||
AND (r.company_id IS NULL OR r.company_id = %(company_id)s)
|
||||
ORDER BY r.company_id, r.name ASC
|
||||
LIMIT 1
|
||||
),
|
||||
1.0 -- fallback to 1
|
||||
) AS rate
|
||||
FROM res_currency c
|
||||
WHERE c.id IN %(currency_ids)s
|
||||
"""
|
||||
self._cr.execute(query, {
|
||||
'date': date,
|
||||
'company_id': company.id,
|
||||
'currency_ids': tuple(self.ids),
|
||||
})
|
||||
currency_rates = dict(self._cr.fetchall())
|
||||
return currency_rates
|
||||
|
||||
@api.depends_context('company')
|
||||
def _compute_is_current_company_currency(self):
|
||||
for currency in self:
|
||||
currency.is_current_company_currency = self.env.company.currency_id == currency
|
||||
|
||||
@api.depends('rate_ids.rate')
|
||||
def _compute_current_rate(self):
|
||||
date = self._context.get('date') or fields.Date.context_today(self)
|
||||
company = self.env['res.company'].browse(self._context.get('company_id')) or self.env.company
|
||||
# the subquery selects the last rate before 'date' for the given currency/company
|
||||
currency_rates = self._get_rates(company, date)
|
||||
last_rate = self.env['res.currency.rate']._get_last_rates_for_companies(company)
|
||||
for currency in self:
|
||||
currency.rate = (currency_rates.get(currency.id) or 1.0) / last_rate[company]
|
||||
currency.inverse_rate = 1 / currency.rate
|
||||
if currency != company.currency_id:
|
||||
currency.rate_string = '1 %s = %.6f %s' % (company.currency_id.name, currency.rate, currency.name)
|
||||
else:
|
||||
currency.rate_string = ''
|
||||
|
||||
@api.depends('rounding')
|
||||
def _compute_decimal_places(self):
|
||||
for currency in self:
|
||||
if 0 < currency.rounding < 1:
|
||||
currency.decimal_places = int(math.ceil(math.log10(1/currency.rounding)))
|
||||
else:
|
||||
currency.decimal_places = 0
|
||||
|
||||
@api.depends('rate_ids.name')
|
||||
def _compute_date(self):
|
||||
for currency in self:
|
||||
currency.date = currency.rate_ids[:1].name
|
||||
|
||||
def name_get(self):
|
||||
return [(currency.id, tools.ustr(currency.name)) for currency in self]
|
||||
|
||||
def amount_to_text(self, amount):
|
||||
self.ensure_one()
|
||||
def _num2words(number, lang):
|
||||
try:
|
||||
return num2words(number, lang=lang).title()
|
||||
except NotImplementedError:
|
||||
return num2words(number, lang='en').title()
|
||||
|
||||
if num2words is None:
|
||||
logging.getLogger(__name__).warning("The library 'num2words' is missing, cannot render textual amounts.")
|
||||
return ""
|
||||
|
||||
formatted = "%.{0}f".format(self.decimal_places) % amount
|
||||
parts = formatted.partition('.')
|
||||
integer_value = int(parts[0])
|
||||
fractional_value = int(parts[2] or 0)
|
||||
|
||||
lang = tools.get_lang(self.env)
|
||||
amount_words = tools.ustr('{amt_value} {amt_word}').format(
|
||||
amt_value=_num2words(integer_value, lang=lang.iso_code),
|
||||
amt_word=self.currency_unit_label,
|
||||
)
|
||||
if not self.is_zero(amount - integer_value):
|
||||
amount_words += ' ' + _('and') + tools.ustr(' {amt_value} {amt_word}').format(
|
||||
amt_value=_num2words(fractional_value, lang=lang.iso_code),
|
||||
amt_word=self.currency_subunit_label,
|
||||
)
|
||||
return amount_words
|
||||
|
||||
def format(self, amount):
|
||||
"""Return ``amount`` formatted according to ``self``'s rounding rules, symbols and positions.
|
||||
|
||||
Also take care of removing the minus sign when 0.0 is negative
|
||||
|
||||
:param float amount: the amount to round
|
||||
:return: formatted str
|
||||
"""
|
||||
self.ensure_one()
|
||||
return tools.format_amount(self.env, amount + 0.0, self)
|
||||
|
||||
def round(self, amount):
|
||||
"""Return ``amount`` rounded according to ``self``'s rounding rules.
|
||||
|
||||
:param float amount: the amount to round
|
||||
:return: rounded float
|
||||
"""
|
||||
self.ensure_one()
|
||||
return tools.float_round(amount, precision_rounding=self.rounding)
|
||||
|
||||
def compare_amounts(self, amount1, amount2):
|
||||
"""Compare ``amount1`` and ``amount2`` after rounding them according to the
|
||||
given currency's precision..
|
||||
An amount is considered lower/greater than another amount if their rounded
|
||||
value is different. This is not the same as having a non-zero difference!
|
||||
|
||||
For example 1.432 and 1.431 are equal at 2 digits precision,
|
||||
so this method would return 0.
|
||||
However 0.006 and 0.002 are considered different (returns 1) because
|
||||
they respectively round to 0.01 and 0.0, even though
|
||||
0.006-0.002 = 0.004 which would be considered zero at 2 digits precision.
|
||||
|
||||
:param float amount1: first amount to compare
|
||||
:param float amount2: second amount to compare
|
||||
:return: (resp.) -1, 0 or 1, if ``amount1`` is (resp.) lower than,
|
||||
equal to, or greater than ``amount2``, according to
|
||||
``currency``'s rounding.
|
||||
|
||||
With the new API, call it like: ``currency.compare_amounts(amount1, amount2)``.
|
||||
"""
|
||||
self.ensure_one()
|
||||
return tools.float_compare(amount1, amount2, precision_rounding=self.rounding)
|
||||
|
||||
def is_zero(self, amount):
|
||||
"""Returns true if ``amount`` is small enough to be treated as
|
||||
zero according to current currency's rounding rules.
|
||||
Warning: ``is_zero(amount1-amount2)`` is not always equivalent to
|
||||
``compare_amounts(amount1,amount2) == 0``, as the former will round after
|
||||
computing the difference, while the latter will round before, giving
|
||||
different results for e.g. 0.006 and 0.002 at 2 digits precision.
|
||||
|
||||
:param float amount: amount to compare with currency's zero
|
||||
|
||||
With the new API, call it like: ``currency.is_zero(amount)``.
|
||||
"""
|
||||
self.ensure_one()
|
||||
return tools.float_is_zero(amount, precision_rounding=self.rounding)
|
||||
|
||||
@api.model
|
||||
def _get_conversion_rate(self, from_currency, to_currency, company, date):
|
||||
currency_rates = (from_currency + to_currency)._get_rates(company, date)
|
||||
res = currency_rates.get(to_currency.id) / currency_rates.get(from_currency.id)
|
||||
return res
|
||||
|
||||
def _convert(self, from_amount, to_currency, company, date, round=True):
|
||||
"""Returns the converted amount of ``from_amount``` from the currency
|
||||
``self`` to the currency ``to_currency`` for the given ``date`` and
|
||||
company.
|
||||
|
||||
:param company: The company from which we retrieve the convertion rate
|
||||
:param date: The nearest date from which we retriev the conversion rate.
|
||||
:param round: Round the result or not
|
||||
"""
|
||||
self, to_currency = self or to_currency, to_currency or self
|
||||
assert self, "convert amount from unknown currency"
|
||||
assert to_currency, "convert amount to unknown currency"
|
||||
assert company, "convert amount from unknown company"
|
||||
assert date, "convert amount from unknown date"
|
||||
# apply conversion rate
|
||||
if self == to_currency:
|
||||
to_amount = from_amount
|
||||
elif from_amount:
|
||||
to_amount = from_amount * self._get_conversion_rate(self, to_currency, company, date)
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
# apply rounding
|
||||
return to_currency.round(to_amount) if round else to_amount
|
||||
|
||||
@api.model
|
||||
def _compute(self, from_currency, to_currency, from_amount, round=True):
|
||||
_logger.warning('The `_compute` method is deprecated. Use `_convert` instead')
|
||||
date = self._context.get('date') or fields.Date.today()
|
||||
company = self.env['res.company'].browse(self._context.get('company_id')) or self.env.company
|
||||
return from_currency._convert(from_amount, to_currency, company, date)
|
||||
|
||||
def compute(self, from_amount, to_currency, round=True):
|
||||
_logger.warning('The `compute` method is deprecated. Use `_convert` instead')
|
||||
date = self._context.get('date') or fields.Date.today()
|
||||
company = self.env['res.company'].browse(self._context.get('company_id')) or self.env.company
|
||||
return self._convert(from_amount, to_currency, company, date)
|
||||
|
||||
def _select_companies_rates(self):
|
||||
return """
|
||||
SELECT
|
||||
r.currency_id,
|
||||
COALESCE(r.company_id, c.id) as company_id,
|
||||
r.rate,
|
||||
r.name AS date_start,
|
||||
(SELECT name FROM res_currency_rate r2
|
||||
WHERE r2.name > r.name AND
|
||||
r2.currency_id = r.currency_id AND
|
||||
(r2.company_id is null or r2.company_id = c.id)
|
||||
ORDER BY r2.name ASC
|
||||
LIMIT 1) AS date_end
|
||||
FROM res_currency_rate r
|
||||
JOIN res_company c ON (r.company_id is null or r.company_id = c.id)
|
||||
"""
|
||||
|
||||
@api.model
|
||||
def _get_view_cache_key(self, view_id=None, view_type='form', **options):
|
||||
"""The override of _get_view changing the rate field labels according to the company currency
|
||||
makes the view cache dependent on the company currency"""
|
||||
key = super()._get_view_cache_key(view_id, view_type, **options)
|
||||
return key + ((self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,)
|
||||
|
||||
@api.model
|
||||
def _get_view(self, view_id=None, view_type='form', **options):
|
||||
arch, view = super()._get_view(view_id, view_type, **options)
|
||||
if view_type in ('tree', 'form'):
|
||||
currency_name = (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name
|
||||
for field in [['company_rate', _('Unit per %s', currency_name)],
|
||||
['inverse_company_rate', _('%s per Unit', currency_name)]]:
|
||||
node = arch.xpath("//tree//field[@name='%s']" % field[0])
|
||||
if node:
|
||||
node[0].set('string', field[1])
|
||||
return arch, view
|
||||
|
||||
|
||||
class CurrencyRate(models.Model):
|
||||
_name = "res.currency.rate"
|
||||
_description = "Currency Rate"
|
||||
_rec_names_search = ['name', 'rate']
|
||||
_order = "name desc"
|
||||
|
||||
name = fields.Date(string='Date', required=True, index=True,
|
||||
default=fields.Date.context_today)
|
||||
rate = fields.Float(
|
||||
digits=0,
|
||||
group_operator="avg",
|
||||
help='The rate of the currency to the currency of rate 1',
|
||||
string='Technical Rate'
|
||||
)
|
||||
company_rate = fields.Float(
|
||||
digits=0,
|
||||
compute="_compute_company_rate",
|
||||
inverse="_inverse_company_rate",
|
||||
group_operator="avg",
|
||||
help="The currency of rate 1 to the rate of the currency.",
|
||||
)
|
||||
inverse_company_rate = fields.Float(
|
||||
digits=0,
|
||||
compute="_compute_inverse_company_rate",
|
||||
inverse="_inverse_inverse_company_rate",
|
||||
group_operator="avg",
|
||||
help="The rate of the currency to the currency of rate 1 ",
|
||||
)
|
||||
currency_id = fields.Many2one('res.currency', string='Currency', readonly=True, required=True, ondelete="cascade")
|
||||
company_id = fields.Many2one('res.company', string='Company',
|
||||
default=lambda self: self.env.company)
|
||||
|
||||
_sql_constraints = [
|
||||
('unique_name_per_day', 'unique (name,currency_id,company_id)', 'Only one currency rate per day allowed!'),
|
||||
('currency_rate_check', 'CHECK (rate>0)', 'The currency rate must be strictly positive.'),
|
||||
]
|
||||
|
||||
def _sanitize_vals(self, vals):
|
||||
if 'inverse_company_rate' in vals and ('company_rate' in vals or 'rate' in vals):
|
||||
del vals['inverse_company_rate']
|
||||
if 'company_rate' in vals and 'rate' in vals:
|
||||
del vals['company_rate']
|
||||
return vals
|
||||
|
||||
def write(self, vals):
|
||||
return super().write(self._sanitize_vals(vals))
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
return super().create([self._sanitize_vals(vals) for vals in vals_list])
|
||||
|
||||
def _get_latest_rate(self):
|
||||
# Make sure 'name' is defined when creating a new rate.
|
||||
if not self.name:
|
||||
raise UserError(_("The name for the current rate is empty.\nPlease set it."))
|
||||
return self.currency_id.rate_ids.sudo().filtered(lambda x: (
|
||||
x.rate
|
||||
and x.company_id == (self.company_id or self.env.company)
|
||||
and x.name < (self.name or fields.Date.today())
|
||||
)).sorted('name')[-1:]
|
||||
|
||||
def _get_last_rates_for_companies(self, companies):
|
||||
return {
|
||||
company: company.currency_id.rate_ids.sudo().filtered(lambda x: (
|
||||
x.rate
|
||||
and x.company_id == company or not x.company_id
|
||||
)).sorted('name')[-1:].rate or 1
|
||||
for company in companies
|
||||
}
|
||||
|
||||
@api.depends('currency_id', 'company_id', 'name')
|
||||
def _compute_rate(self):
|
||||
for currency_rate in self:
|
||||
currency_rate.rate = currency_rate.rate or currency_rate._get_latest_rate().rate or 1.0
|
||||
|
||||
@api.depends('rate', 'name', 'currency_id', 'company_id', 'currency_id.rate_ids.rate')
|
||||
@api.depends_context('company')
|
||||
def _compute_company_rate(self):
|
||||
last_rate = self.env['res.currency.rate']._get_last_rates_for_companies(self.company_id | self.env.company)
|
||||
for currency_rate in self:
|
||||
company = currency_rate.company_id or self.env.company
|
||||
currency_rate.company_rate = (currency_rate.rate or currency_rate._get_latest_rate().rate or 1.0) / last_rate[company]
|
||||
|
||||
@api.onchange('company_rate')
|
||||
def _inverse_company_rate(self):
|
||||
last_rate = self.env['res.currency.rate']._get_last_rates_for_companies(self.company_id | self.env.company)
|
||||
for currency_rate in self:
|
||||
company = currency_rate.company_id or self.env.company
|
||||
currency_rate.rate = currency_rate.company_rate * last_rate[company]
|
||||
|
||||
@api.depends('company_rate')
|
||||
def _compute_inverse_company_rate(self):
|
||||
for currency_rate in self:
|
||||
if not currency_rate.company_rate:
|
||||
currency_rate.company_rate = 1.0
|
||||
currency_rate.inverse_company_rate = 1.0 / currency_rate.company_rate
|
||||
|
||||
@api.onchange('inverse_company_rate')
|
||||
def _inverse_inverse_company_rate(self):
|
||||
for currency_rate in self:
|
||||
if not currency_rate.inverse_company_rate:
|
||||
currency_rate.inverse_company_rate = 1.0
|
||||
currency_rate.company_rate = 1.0 / currency_rate.inverse_company_rate
|
||||
|
||||
@api.onchange('company_rate')
|
||||
def _onchange_rate_warning(self):
|
||||
latest_rate = self._get_latest_rate()
|
||||
if latest_rate:
|
||||
diff = (latest_rate.rate - self.rate) / latest_rate.rate
|
||||
if abs(diff) > 0.2:
|
||||
return {
|
||||
'warning': {
|
||||
'title': _("Warning for %s", self.currency_id.name),
|
||||
'message': _(
|
||||
"The new rate is quite far from the previous rate.\n"
|
||||
"Incorrect currency rates may cause critical problems, make sure the rate is correct !"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None):
|
||||
return super()._name_search(parse_date(self.env, name), args, operator, limit, name_get_uid)
|
||||
|
||||
@api.model
|
||||
def _get_view_cache_key(self, view_id=None, view_type='form', **options):
|
||||
"""The override of _get_view changing the rate field labels according to the company currency
|
||||
makes the view cache dependent on the company currency"""
|
||||
key = super()._get_view_cache_key(view_id, view_type, **options)
|
||||
return key + ((self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,)
|
||||
|
||||
@api.model
|
||||
def _get_view(self, view_id=None, view_type='form', **options):
|
||||
arch, view = super()._get_view(view_id, view_type, **options)
|
||||
if view_type in ('tree'):
|
||||
names = {
|
||||
'company_currency_name': (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,
|
||||
'rate_currency_name': self.env['res.currency'].browse(self._context.get('active_id')).name or 'Unit',
|
||||
}
|
||||
for field in [['company_rate', _('%(rate_currency_name)s per %(company_currency_name)s', **names)],
|
||||
['inverse_company_rate', _('%(company_currency_name)s per %(rate_currency_name)s', **names)]]:
|
||||
node = arch.xpath("//tree//field[@name='%s']" % field[0])
|
||||
if node:
|
||||
node[0].set('string', field[1])
|
||||
return arch, view
|
||||
431
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_lang.py
Normal file
431
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_lang.py
Normal file
|
|
@ -0,0 +1,431 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import ast
|
||||
import json
|
||||
import locale
|
||||
import logging
|
||||
import re
|
||||
from operator import itemgetter
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DATE_FORMAT = '%m/%d/%Y'
|
||||
DEFAULT_TIME_FORMAT = '%H:%M:%S'
|
||||
|
||||
|
||||
class Lang(models.Model):
|
||||
_name = "res.lang"
|
||||
_description = "Languages"
|
||||
_order = "active desc,name"
|
||||
|
||||
_disallowed_datetime_patterns = list(tools.DATETIME_FORMATS_MAP)
|
||||
_disallowed_datetime_patterns.remove('%y') # this one is in fact allowed, just not good practice
|
||||
|
||||
name = fields.Char(required=True)
|
||||
code = fields.Char(string='Locale Code', required=True, help='This field is used to set/get locales for user')
|
||||
iso_code = fields.Char(string='ISO code', help='This ISO code is the name of po files to use for translations')
|
||||
url_code = fields.Char('URL Code', required=True, help='The Lang Code displayed in the URL')
|
||||
active = fields.Boolean()
|
||||
direction = fields.Selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')], required=True, default='ltr')
|
||||
date_format = fields.Char(string='Date Format', required=True, default=DEFAULT_DATE_FORMAT)
|
||||
time_format = fields.Char(string='Time Format', required=True, default=DEFAULT_TIME_FORMAT)
|
||||
week_start = fields.Selection([('1', 'Monday'),
|
||||
('2', 'Tuesday'),
|
||||
('3', 'Wednesday'),
|
||||
('4', 'Thursday'),
|
||||
('5', 'Friday'),
|
||||
('6', 'Saturday'),
|
||||
('7', 'Sunday')], string='First Day of Week', required=True, default='7')
|
||||
grouping = fields.Char(string='Separator Format', required=True, default='[]',
|
||||
help="The Separator Format should be like [,n] where 0 < n :starting from Unit digit. "
|
||||
"-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500; "
|
||||
"[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. "
|
||||
"Provided ',' as the thousand separator in each case.")
|
||||
decimal_point = fields.Char(string='Decimal Separator', required=True, default='.', trim=False)
|
||||
thousands_sep = fields.Char(string='Thousands Separator', default=',', trim=False)
|
||||
|
||||
@api.depends('code', 'flag_image')
|
||||
def _compute_field_flag_image_url(self):
|
||||
for lang in self:
|
||||
if lang.flag_image:
|
||||
lang.flag_image_url = f"/web/image/res.lang/{lang.id}/flag_image"
|
||||
else:
|
||||
lang.flag_image_url = f"/base/static/img/country_flags/{lang.code.lower().rsplit('_')[-1]}.png"
|
||||
|
||||
flag_image = fields.Image("Image")
|
||||
flag_image_url = fields.Char(compute=_compute_field_flag_image_url)
|
||||
|
||||
_sql_constraints = [
|
||||
('name_uniq', 'unique(name)', 'The name of the language must be unique !'),
|
||||
('code_uniq', 'unique(code)', 'The code of the language must be unique !'),
|
||||
('url_code_uniq', 'unique(url_code)', 'The URL code of the language must be unique !'),
|
||||
]
|
||||
|
||||
@api.constrains('active')
|
||||
def _check_active(self):
|
||||
# do not check during installation
|
||||
if self.env.registry.ready and not self.search_count([]):
|
||||
raise ValidationError(_('At least one language must be active.'))
|
||||
|
||||
@api.constrains('time_format', 'date_format')
|
||||
def _check_format(self):
|
||||
for lang in self:
|
||||
for pattern in lang._disallowed_datetime_patterns:
|
||||
if (lang.time_format and pattern in lang.time_format) or \
|
||||
(lang.date_format and pattern in lang.date_format):
|
||||
raise ValidationError(_('Invalid date/time format directive specified. '
|
||||
'Please refer to the list of allowed directives, '
|
||||
'displayed when you edit a language.'))
|
||||
|
||||
@api.onchange('time_format', 'date_format')
|
||||
def _onchange_format(self):
|
||||
warning = {
|
||||
'warning': {
|
||||
'title': _("Using 24-hour clock format with AM/PM can cause issues."),
|
||||
'message': _("Changing to 12-hour clock format instead."),
|
||||
'type': 'notification'
|
||||
}
|
||||
}
|
||||
for lang in self:
|
||||
if lang.date_format and "%H" in lang.date_format and "%p" in lang.date_format:
|
||||
lang.date_format = lang.date_format.replace("%H", "%I")
|
||||
return warning
|
||||
if lang.time_format and "%H" in lang.time_format and "%p" in lang.time_format:
|
||||
lang.time_format = lang.time_format.replace("%H", "%I")
|
||||
return warning
|
||||
|
||||
@api.constrains('grouping')
|
||||
def _check_grouping(self):
|
||||
warning = _('The Separator Format should be like [,n] where 0 < n :starting from Unit digit. '
|
||||
'-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;'
|
||||
'[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. '
|
||||
'Provided as the thousand separator in each case.')
|
||||
for lang in self:
|
||||
try:
|
||||
if any(not isinstance(x, int) for x in json.loads(lang.grouping)):
|
||||
raise ValidationError(warning)
|
||||
except Exception:
|
||||
raise ValidationError(warning)
|
||||
|
||||
def _register_hook(self):
|
||||
# check that there is at least one active language
|
||||
if not self.search_count([]):
|
||||
_logger.error("No language is active.")
|
||||
|
||||
# TODO remove me after v14
|
||||
def load_lang(self, lang, lang_name=None):
|
||||
_logger.warning("Call to deprecated method load_lang, use _create_lang or _activate_lang instead")
|
||||
language = self._activate_lang(lang) or self._create_lang(lang, lang_name)
|
||||
return language.id
|
||||
|
||||
def _activate_lang(self, code):
|
||||
""" Activate languages
|
||||
:param code: code of the language to activate
|
||||
:return: the language matching 'code' activated
|
||||
"""
|
||||
lang = self.with_context(active_test=False).search([('code', '=', code)])
|
||||
if lang and not lang.active:
|
||||
lang.active = True
|
||||
return lang
|
||||
|
||||
def _create_lang(self, lang, lang_name=None):
|
||||
""" Create the given language and make it active. """
|
||||
# create the language with locale information
|
||||
fail = True
|
||||
iso_lang = tools.get_iso_codes(lang)
|
||||
for ln in tools.get_locales(lang):
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, str(ln))
|
||||
fail = False
|
||||
break
|
||||
except locale.Error:
|
||||
continue
|
||||
if fail:
|
||||
lc = locale.getlocale()[0]
|
||||
msg = 'Unable to get information for locale %s. Information from the default locale (%s) have been used.'
|
||||
_logger.warning(msg, lang, lc)
|
||||
|
||||
if not lang_name:
|
||||
lang_name = lang
|
||||
|
||||
def fix_xa0(s):
|
||||
"""Fix badly-encoded non-breaking space Unicode character from locale.localeconv(),
|
||||
coercing to utf-8, as some platform seem to output localeconv() in their system
|
||||
encoding, e.g. Windows-1252"""
|
||||
if s == '\xa0':
|
||||
return '\xc2\xa0'
|
||||
return s
|
||||
|
||||
def fix_datetime_format(format):
|
||||
"""Python's strftime supports only the format directives
|
||||
that are available on the platform's libc, so in order to
|
||||
be 100% cross-platform we map to the directives required by
|
||||
the C standard (1989 version), always available on platforms
|
||||
with a C standard implementation."""
|
||||
# For some locales, nl_langinfo returns a D_FMT/T_FMT that contains
|
||||
# unsupported '%-' patterns, e.g. for cs_CZ
|
||||
format = format.replace('%-', '%')
|
||||
for pattern, replacement in tools.DATETIME_FORMATS_MAP.items():
|
||||
format = format.replace(pattern, replacement)
|
||||
return str(format)
|
||||
|
||||
conv = locale.localeconv()
|
||||
lang_info = {
|
||||
'code': lang,
|
||||
'iso_code': iso_lang,
|
||||
'name': lang_name,
|
||||
'active': True,
|
||||
'date_format' : fix_datetime_format(locale.nl_langinfo(locale.D_FMT)),
|
||||
'time_format' : fix_datetime_format(locale.nl_langinfo(locale.T_FMT)),
|
||||
'decimal_point' : fix_xa0(str(conv['decimal_point'])),
|
||||
'thousands_sep' : fix_xa0(str(conv['thousands_sep'])),
|
||||
'grouping' : str(conv.get('grouping', [])),
|
||||
}
|
||||
try:
|
||||
return self.create(lang_info)
|
||||
finally:
|
||||
tools.resetlocale()
|
||||
|
||||
@api.model
|
||||
def install_lang(self):
|
||||
"""
|
||||
|
||||
This method is called from odoo/addons/base/data/res_lang_data.xml to load
|
||||
some language and set it as the default for every partners. The
|
||||
language is set via tools.config by the '_initialize_db' method on the
|
||||
'db' object. This is a fragile solution and something else should be
|
||||
found.
|
||||
|
||||
"""
|
||||
# config['load_language'] is a comma-separated list or None
|
||||
lang_code = (tools.config.get('load_language') or 'en_US').split(',')[0]
|
||||
lang = self._activate_lang(lang_code) or self._create_lang(lang_code)
|
||||
IrDefault = self.env['ir.default']
|
||||
default_value = IrDefault.get('res.partner', 'lang')
|
||||
if default_value is None:
|
||||
IrDefault.set('res.partner', 'lang', lang_code)
|
||||
# set language of main company, created directly by db bootstrap SQL
|
||||
partner = self.env.company.partner_id
|
||||
if not partner.lang:
|
||||
partner.write({'lang': lang_code})
|
||||
return True
|
||||
|
||||
@tools.ormcache('code')
|
||||
def _lang_get_id(self, code):
|
||||
return self.with_context(active_test=True).search([('code', '=', code)]).id
|
||||
|
||||
@tools.ormcache('code')
|
||||
def _lang_get_direction(self, code):
|
||||
return self.with_context(active_test=True).search([('code', '=', code)]).direction
|
||||
|
||||
@tools.ormcache('url_code')
|
||||
def _lang_get_code(self, url_code):
|
||||
return self.with_context(active_test=True).search([('url_code', '=', url_code)]).code or url_code
|
||||
|
||||
def _lang_get(self, code):
|
||||
""" Return the language using this code if it is active """
|
||||
return self.browse(self._lang_get_id(code))
|
||||
|
||||
@tools.ormcache('self.code', 'monetary')
|
||||
def _data_get(self, monetary=False):
|
||||
thousands_sep = self.thousands_sep or ''
|
||||
decimal_point = self.decimal_point
|
||||
grouping = self.grouping
|
||||
return grouping, thousands_sep, decimal_point
|
||||
|
||||
@api.model
|
||||
@tools.ormcache()
|
||||
def get_available(self):
|
||||
""" Return the available languages as a list of (code, url_code, name,
|
||||
active) sorted by name.
|
||||
"""
|
||||
langs = self.with_context(active_test=False).search([])
|
||||
return langs.get_sorted()
|
||||
|
||||
def get_sorted(self):
|
||||
return sorted([(lang.code, lang.url_code, lang.name, lang.active, lang.flag_image_url) for lang in self], key=itemgetter(2))
|
||||
|
||||
@tools.ormcache('self.id')
|
||||
def _get_cached_values(self):
|
||||
self.ensure_one()
|
||||
return {
|
||||
'id': self.id,
|
||||
'code': self.code,
|
||||
'url_code': self.url_code,
|
||||
'name': self.name,
|
||||
}
|
||||
|
||||
def _get_cached(self, field):
|
||||
return self._get_cached_values()[field]
|
||||
|
||||
@api.model
|
||||
@tools.ormcache('code')
|
||||
def _lang_code_to_urlcode(self, code):
|
||||
for c, urlc, name, *_ in self.get_available():
|
||||
if c == code:
|
||||
return urlc
|
||||
return self._lang_get(code).url_code
|
||||
|
||||
@api.model
|
||||
@tools.ormcache()
|
||||
def get_installed(self):
|
||||
""" Return the installed languages as a list of (code, name) sorted by name. """
|
||||
langs = self.with_context(active_test=True).search([])
|
||||
return sorted([(lang.code, lang.name) for lang in langs], key=itemgetter(1))
|
||||
|
||||
def toggle_active(self):
|
||||
super().toggle_active()
|
||||
# Automatically load translation
|
||||
active_lang = [lang.code for lang in self.filtered(lambda l: l.active)]
|
||||
if active_lang:
|
||||
mods = self.env['ir.module.module'].search([('state', '=', 'installed')])
|
||||
mods._update_translations(active_lang)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
for vals in vals_list:
|
||||
if not vals.get('url_code'):
|
||||
vals['url_code'] = vals.get('iso_code') or vals['code']
|
||||
return super(Lang, self).create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
lang_codes = self.mapped('code')
|
||||
if 'code' in vals and any(code != vals['code'] for code in lang_codes):
|
||||
raise UserError(_("Language code cannot be modified."))
|
||||
if vals.get('active') == False:
|
||||
if self.env['res.users'].with_context(active_test=True).search_count([('lang', 'in', lang_codes)], limit=1):
|
||||
raise UserError(_("Cannot deactivate a language that is currently used by users."))
|
||||
if self.env['res.partner'].with_context(active_test=True).search_count([('lang', 'in', lang_codes)], limit=1):
|
||||
raise UserError(_("Cannot deactivate a language that is currently used by contacts."))
|
||||
if self.env['res.users'].with_context(active_test=False).search_count([('lang', 'in', lang_codes)], limit=1):
|
||||
raise UserError(_("You cannot archive the language in which Odoo was setup as it is used by automated processes."))
|
||||
# delete linked ir.default specifying default partner's language
|
||||
self.env['ir.default'].discard_values('res.partner', 'lang', lang_codes)
|
||||
|
||||
res = super(Lang, self).write(vals)
|
||||
self.env.flush_all()
|
||||
self.clear_caches()
|
||||
return res
|
||||
|
||||
@api.ondelete(at_uninstall=True)
|
||||
def _unlink_except_default_lang(self):
|
||||
for language in self:
|
||||
if language.code == 'en_US':
|
||||
raise UserError(_("Base Language 'en_US' can not be deleted."))
|
||||
ctx_lang = self._context.get('lang')
|
||||
if ctx_lang and (language.code == ctx_lang):
|
||||
raise UserError(_("You cannot delete the language which is the user's preferred language."))
|
||||
if language.active:
|
||||
raise UserError(_("You cannot delete the language which is Active!\nPlease de-activate the language first."))
|
||||
|
||||
def unlink(self):
|
||||
self.clear_caches()
|
||||
return super(Lang, self).unlink()
|
||||
|
||||
def copy_data(self, default=None):
|
||||
default = dict(default or {})
|
||||
|
||||
if "name" not in default:
|
||||
default["name"] = _("%s (copy)", self.name)
|
||||
if "code" not in default:
|
||||
default["code"] = _("%s (copy)", self.code)
|
||||
if "url_code" not in default:
|
||||
default["url_code"] = _("%s (copy)", self.url_code)
|
||||
return super().copy_data(default=default)
|
||||
|
||||
def format(self, percent, value, grouping=False, monetary=False):
|
||||
""" Format() will return the language-specific output for float values"""
|
||||
self.ensure_one()
|
||||
if percent[0] != '%':
|
||||
raise ValueError(_("format() must be given exactly one %char format specifier"))
|
||||
|
||||
formatted = percent % value
|
||||
|
||||
# floats and decimal ints need special action!
|
||||
if grouping:
|
||||
lang_grouping, thousands_sep, decimal_point = self._data_get(monetary)
|
||||
eval_lang_grouping = ast.literal_eval(lang_grouping)
|
||||
|
||||
if percent[-1] in 'eEfFgG':
|
||||
parts = formatted.split('.')
|
||||
parts[0] = intersperse(parts[0], eval_lang_grouping, thousands_sep)[0]
|
||||
|
||||
formatted = decimal_point.join(parts)
|
||||
|
||||
elif percent[-1] in 'diu':
|
||||
formatted = intersperse(formatted, eval_lang_grouping, thousands_sep)[0]
|
||||
|
||||
return formatted
|
||||
|
||||
def action_activate_langs(self):
|
||||
""" Activate the selected languages """
|
||||
for lang in self.filtered(lambda l: not l.active):
|
||||
lang.toggle_active()
|
||||
message = _("The languages that you selected have been successfully installed. Users can choose their favorite language in their preferences.")
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'target': 'new',
|
||||
'params': {
|
||||
'message': message,
|
||||
'type': 'success',
|
||||
'sticky': False,
|
||||
'next': {'type': 'ir.actions.act_window_close'},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def split(l, counts):
|
||||
"""
|
||||
|
||||
>>> split("hello world", [])
|
||||
['hello world']
|
||||
>>> split("hello world", [1])
|
||||
['h', 'ello world']
|
||||
>>> split("hello world", [2])
|
||||
['he', 'llo world']
|
||||
>>> split("hello world", [2,3])
|
||||
['he', 'llo', ' world']
|
||||
>>> split("hello world", [2,3,0])
|
||||
['he', 'llo', ' wo', 'rld']
|
||||
>>> split("hello world", [2,-1,3])
|
||||
['he', 'llo world']
|
||||
|
||||
"""
|
||||
res = []
|
||||
saved_count = len(l) # count to use when encoutering a zero
|
||||
for count in counts:
|
||||
if not l:
|
||||
break
|
||||
if count == -1:
|
||||
break
|
||||
if count == 0:
|
||||
while l:
|
||||
res.append(l[:saved_count])
|
||||
l = l[saved_count:]
|
||||
break
|
||||
res.append(l[:count])
|
||||
l = l[count:]
|
||||
saved_count = count
|
||||
if l:
|
||||
res.append(l)
|
||||
return res
|
||||
|
||||
intersperse_pat = re.compile('([^0-9]*)([^ ]*)(.*)')
|
||||
|
||||
def intersperse(string, counts, separator=''):
|
||||
"""
|
||||
|
||||
See the asserts below for examples.
|
||||
|
||||
"""
|
||||
left, rest, right = intersperse_pat.match(string).groups()
|
||||
def reverse(s): return s[::-1]
|
||||
splits = split(reverse(rest), counts)
|
||||
res = separator.join(reverse(s) for s in reverse(splits))
|
||||
return left + res + right, len(splits) > 0 and len(splits) -1 or 0
|
||||
1132
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_partner.py
Normal file
1132
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_partner.py
Normal file
File diff suppressed because it is too large
Load diff
2220
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_users.py
Normal file
2220
odoo-bringout-oca-ocb-base/odoo/addons/base/models/res_users.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,66 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ResUsersDeletion(models.Model):
|
||||
"""User deletion requests.
|
||||
|
||||
Those requests are logged in a different model to keep a trace of this action and the
|
||||
deletion is done in a CRON. Indeed, removing a user can be a heavy operation on
|
||||
large database (because of create_uid, write_uid on each model, which are not always
|
||||
indexed). This model just remove the users added in the deletion queue, remaining code
|
||||
must deal with other consideration (archiving, blacklist email...).
|
||||
"""
|
||||
|
||||
_name = 'res.users.deletion'
|
||||
_description = 'Users Deletion Request'
|
||||
_rec_name = 'user_id'
|
||||
|
||||
# Integer field because the related user might be deleted from the database
|
||||
user_id = fields.Many2one('res.users', string='User', ondelete='set null')
|
||||
user_id_int = fields.Integer('User Id', compute='_compute_user_id_int', store=True)
|
||||
state = fields.Selection([('todo', 'To Do'), ('done', 'Done'), ('fail', 'Failed')],
|
||||
string='State', required=True, default='todo')
|
||||
|
||||
@api.depends('user_id')
|
||||
def _compute_user_id_int(self):
|
||||
for user_deletion in self:
|
||||
if user_deletion.user_id:
|
||||
user_deletion.user_id_int = user_deletion.user_id.id
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_portal_users(self):
|
||||
"""Remove the portal users that asked to deactivate their account.
|
||||
|
||||
(see <res.users>::_deactivate_portal_user)
|
||||
|
||||
Removing a user can be an heavy operation on large database (because of
|
||||
create_uid, write_uid on each models, which are not always indexed). Because of
|
||||
that, this operation is done in a CRON.
|
||||
"""
|
||||
delete_requests = self.search([('state', '=', 'todo')])
|
||||
|
||||
# filter the requests related to a deleted user
|
||||
done_requests = delete_requests.filtered(lambda request: not request.user_id)
|
||||
done_requests.state = 'done'
|
||||
|
||||
for delete_request in (delete_requests - done_requests):
|
||||
user = delete_request.user_id
|
||||
user_name = user.name
|
||||
try:
|
||||
with self.env.cr.savepoint():
|
||||
partner = user.partner_id
|
||||
user.unlink()
|
||||
partner.unlink()
|
||||
_logger.info('User #%i %r, deleted. Original request from %r.',
|
||||
user.id, user_name, delete_request.create_uid.name)
|
||||
delete_request.state = 'done'
|
||||
except Exception:
|
||||
delete_request.state = 'fail'
|
||||
Loading…
Add table
Add a link
Reference in a new issue