19.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:07:25 +02:00
parent 0a7ae8db93
commit 991d2234ca
416 changed files with 646602 additions and 300844 deletions

View file

@ -1,80 +0,0 @@
# -*- coding: utf-8 -*-
# ruff: noqa: E402, F401
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" OpenERP core library."""
# ----------------------------------------------------------
# odoo must be a namespace package for odoo.addons to become one too
# https://packaging.python.org/guides/packaging-namespace-packages/
# ----------------------------------------------------------
import pkgutil
import os.path
__path__ = [
os.path.abspath(path)
for path in pkgutil.extend_path(__path__, __name__)
]
import sys
MIN_PY_VERSION = (3, 10)
MAX_PY_VERSION = (3, 13)
assert sys.version_info > MIN_PY_VERSION, f"Outdated python version detected, Odoo requires Python >= {'.'.join(map(str, MIN_PY_VERSION))} to run."
# ----------------------------------------------------------
# Shortcuts
# ----------------------------------------------------------
# The hard-coded super-user id (a.k.a. administrator, or root user).
SUPERUSER_ID = 1
def registry(database_name=None):
"""
Return the model registry for the given database, or the database mentioned
on the current thread. If the registry does not exist yet, it is created on
the fly.
"""
import warnings # noqa: PLC0415
warnings.warn("Use directly odoo.modules.registry.Registry", DeprecationWarning, 2)
if database_name is None:
import threading
database_name = threading.current_thread().dbname
return modules.registry.Registry(database_name)
# ----------------------------------------------------------
# Import tools to patch code and libraries
# required to do as early as possible for evented and timezone
# ----------------------------------------------------------
from . import _monkeypatches
_monkeypatches.patch_all()
# ----------------------------------------------------------
# Imports
# ----------------------------------------------------------
from . import upgrade # this namespace must be imported first
from . import addons
from . import conf
from . import loglevels
from . import modules
from . import netsvc
from . import osv
from . import release
from . import service
from . import sql_db
from . import tools
# ----------------------------------------------------------
# Model classes, fields, api decorators, and translations
# ----------------------------------------------------------
from . import models
from . import fields
from . import api
from odoo.tools.translate import _, _lt
from odoo.fields import Command
# ----------------------------------------------------------
# Other imports, which may require stuff from above
# ----------------------------------------------------------
from . import cli
from . import http

View file

@ -1,39 +1,67 @@
# ruff: noqa: F401, PLC0415 """Lazy module monkeypatcher
# ignore import not at top of the file
Submodules should be named after the module (stdlib or third-party) they need
to patch, and should define a `patch_module` function.
This function will be called either immediately if the module to patch is
already imported when the monkey patcher runs, or right after that module is
imported otherwise.
"""
import importlib
import os import os
import pkgutil
import sys
import time import time
from .evented import patch_evented from types import ModuleType, SimpleNamespace
def set_timezone_utc(): class PatchImportHook:
"""Register hooks that are run on import."""
def __init__(self):
self.hooks = set()
def add_hook(self, fullname: str) -> None:
"""Register a hook after a module is loaded.
If already loaded, run hook immediately."""
self.hooks.add(fullname)
if fullname in sys.modules:
patch_module(fullname)
def find_spec(self, fullname, path=None, target=None):
if fullname not in self.hooks:
return None # let python use another import hook to import this fullname
# skip all finders before this one
idx = sys.meta_path.index(self)
for finder in sys.meta_path[idx + 1:]:
spec = finder.find_spec(fullname, path, target)
if spec is not None:
# we found a spec, change the loader
def exec_module(module: ModuleType, exec_module=spec.loader.exec_module) -> None:
exec_module(module)
patch_module(module.__name__)
spec.loader = SimpleNamespace(create_module=spec.loader.create_module, exec_module=exec_module)
return spec
raise ImportError(f"Could not load the module {fullname!r} to patch")
HOOK_IMPORT = PatchImportHook()
sys.meta_path.insert(0, HOOK_IMPORT)
def patch_init() -> None:
os.environ['TZ'] = 'UTC' # Set the timezone os.environ['TZ'] = 'UTC' # Set the timezone
if hasattr(time, 'tzset'): if hasattr(time, 'tzset'):
time.tzset() time.tzset()
for submodule in pkgutil.iter_modules(__path__):
HOOK_IMPORT.add_hook(submodule.name)
def patch_all():
patch_evented()
set_timezone_utc()
from .codecs import patch_codecs def patch_module(name: str) -> None:
patch_codecs() module = importlib.import_module(f'.{name}', __name__)
from .email import patch_email module.patch_module()
patch_email()
from .mimetypes import patch_mimetypes
patch_mimetypes()
from .pytz import patch_pytz
patch_pytz()
from .literal_eval import patch_literal_eval
patch_literal_eval()
from .lxml import patch_lxml
patch_lxml()
from .num2words import patch_num2words
patch_num2words()
from .stdnum import patch_stdnum
patch_stdnum()
from .urllib3 import patch_urllib3
patch_urllib3()
from .werkzeug_urls import patch_werkzeug
patch_werkzeug()
from .zeep import patch_zeep
patch_zeep()

View file

@ -28,5 +28,5 @@ def literal_eval(expr):
return orig_literal_eval(expr) return orig_literal_eval(expr)
def patch_literal_eval(): def patch_module():
ast.literal_eval = literal_eval ast.literal_eval = literal_eval

View file

@ -0,0 +1,9 @@
import bs4
import warnings
def patch_module():
if hasattr(bs4, 'XMLParsedAsHTMLWarning'):
# ofxparse use an html parser to parse ofx xml files and triggers a
# warning since bs4 4.11.0 https://github.com/jseutter/ofxparse/issues/170
warnings.filterwarnings('ignore', category=bs4.XMLParsedAsHTMLWarning)

View file

@ -1,26 +0,0 @@
import codecs
import encodings.aliases
import re
import babel.core
def patch_codecs():
# ---------------------------------------------------------
# some charset are known by Python under a different name
# ---------------------------------------------------------
encodings.aliases.aliases['874'] = 'cp874'
encodings.aliases.aliases['windows_874'] = 'cp874'
# ---------------------------------------------------------
# alias hebrew iso-8859-8-i and iso-8859-8-e on iso-8859-8
# https://bugs.python.org/issue18624
# ---------------------------------------------------------
iso8859_8 = codecs.lookup('iso8859_8')
iso8859_8ie_re = re.compile(r'iso[-_]?8859[-_]8[-_]?[ei]', re.IGNORECASE)
codecs.register(lambda charset: iso8859_8 if iso8859_8ie_re.match(charset) else None)
# To remove when corrected in Babel
babel.core.LOCALE_ALIASES['nb'] = 'nb_NO'

View file

@ -0,0 +1,13 @@
import csv
def patch_module():
""" The default limit for CSV fields in the module is 128KiB,
which is not quite sufficient to import images to store
in attachment. 500MiB is a bit overkill, but better safe
than sorry I guess
"""
class UNIX_LINE_TERMINATOR(csv.excel):
lineterminator = '\n'
csv.field_size_limit(500 * 1024 * 1024)
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)

View file

@ -0,0 +1,29 @@
"""
The docstrings can use many more roles and directives than the one
present natively in docutils. That's because we use Sphinx to render
them in the documentation, and Sphinx defines the "Python Domain", a set
of additional rules and directive to understand the python language.
It is not desirable to add a dependency on Sphinx in community, as it is
a *too big* dependency.
The following code adds a bunch of dummy elements for the missing roles
and directives, so docutils is able to parse them with no warning.
"""
import docutils.nodes
import docutils.parsers.rst.directives.admonitions
def _role_literal(name, rawtext, text, lineno, inliner, options=None, content=None):
literal = docutils.nodes.literal(rawtext, text)
return [literal], []
def patch_module():
for role in ('attr', 'class', 'func', 'meth', 'ref', 'const', 'samp', 'term'):
docutils.parsers.rst.roles.register_local_role(role, _role_literal)
for directive in ('attribute', 'deprecated'):
docutils.parsers.rst.directives.register_directive(
directive, docutils.parsers.rst.directives.admonitions.Note)

View file

@ -1,7 +1,7 @@
from email._policybase import _PolicyBase from email._policybase import _PolicyBase
def patch_email(): def patch_module():
def policy_clone(self, **kwargs): def policy_clone(self, **kwargs):
for arg in kwargs: for arg in kwargs:
if arg.startswith("_") or "__" in arg: if arg.startswith("_") or "__" in arg:

View file

@ -0,0 +1,31 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import locale
import time
import datetime
def patch_module():
if not hasattr(locale, 'D_FMT'):
locale.D_FMT = 1
if not hasattr(locale, 'T_FMT'):
locale.T_FMT = 2
if not hasattr(locale, 'nl_langinfo'):
def nl_langinfo(param):
if param == locale.D_FMT:
val = time.strptime('30/12/2004', '%d/%m/%Y')
dt = datetime.datetime(*val[:-2])
format_date = dt.strftime('%x')
for x, y in [('30', '%d'), ('12', '%m'), ('2004', '%Y'), ('04', '%Y')]:
format_date = format_date.replace(x, y)
return format_date
if param == locale.T_FMT:
val = time.strptime('13:24:56', '%H:%M:%S')
dt = datetime.datetime(*val[:-2])
format_time = dt.strftime('%X')
for x, y in [('13', '%H'), ('24', '%M'), ('56', '%S')]:
format_time = format_time.replace(x, y)
return format_time
locale.nl_langinfo = nl_langinfo

View file

@ -6,7 +6,7 @@ from importlib.metadata import version
from odoo.tools import parse_version from odoo.tools import parse_version
def patch_lxml(): def patch_module():
# between these versions having a couple data urls in a style attribute # between these versions having a couple data urls in a style attribute
# or style node removes the attribute or node erroneously # or style node removes the attribute or node erroneously
if parse_version("4.6.0") <= parse_version(version('lxml')) < parse_version("5.2.0"): if parse_version("4.6.0") <= parse_version(version('lxml')) < parse_version("5.2.0"):

View file

@ -1,7 +1,7 @@
import mimetypes import mimetypes
def patch_mimetypes(): def patch_module():
# if extension is already knows, the new definition will remplace the existing one # if extension is already knows, the new definition will remplace the existing one
# Add potentially missing (older ubuntu) font mime types # Add potentially missing (older ubuntu) font mime types
mimetypes.add_type('application/font-woff', '.woff') mimetypes.add_type('application/font-woff', '.woff')

View file

@ -6,7 +6,7 @@ from collections import OrderedDict
from decimal import ROUND_HALF_UP, Decimal from decimal import ROUND_HALF_UP, Decimal
from math import floor from math import floor
from odoo import MIN_PY_VERSION from odoo.release import MIN_PY_VERSION
# The following section of the code is used to monkey patch # The following section of the code is used to monkey patch
# the Arabic class of num2words package as there are some problems # the Arabic class of num2words package as there are some problems
@ -145,7 +145,7 @@ class Num2Word_Base:
def to_cardinal_float(self, value): def to_cardinal_float(self, value):
try: try:
float(value) == value _ = float(value) == value
except (ValueError, TypeError, AssertionError, AttributeError): except (ValueError, TypeError, AssertionError, AttributeError):
raise TypeError(self.errmsg_nonnum % value) raise TypeError(self.errmsg_nonnum % value)
@ -971,7 +971,7 @@ class NumberToWords_BG(Num2Word_Base):
return ret_minus + ''.join(ret) return ret_minus + ''.join(ret)
def patch_num2words(): def patch_module():
try: try:
import num2words # noqa: PLC0415 import num2words # noqa: PLC0415
except ImportError: except ImportError:

View file

@ -122,7 +122,7 @@ _tz_mapping = {
original_pytz_timezone = pytz.timezone original_pytz_timezone = pytz.timezone
def patch_pytz(): def patch_module():
def timezone(name): def timezone(name):
if name not in pytz.all_timezones_set and name in _tz_mapping: if name not in pytz.all_timezones_set and name in _tz_mapping:
name = _tz_mapping[name] name = _tz_mapping[name]

View file

@ -0,0 +1,6 @@
import re
def patch_module():
""" Default is 512, a little too small for odoo """
re._MAXCACHE = 4096

View file

@ -1,16 +1,30 @@
""" """Patcher for any change not strictly related to an stdlib module
Running mode flags (gevent, prefork)
This should be imported as early as possible.
It will initialize the `odoo.evented` variable.
""" """
import odoo
import codecs
import encodings.aliases
import re
import sys import sys
import babel.core
import odoo
def patch_module():
patch_evented()
patch_codecs()
odoo.evented = False odoo.evented = False
def patch_evented(): def patch_evented():
"""Running mode flags (gevent, prefork)
This should be executed early. It will initialize the `odoo.evented` variable.
"""
if odoo.evented or not (len(sys.argv) > 1 and sys.argv[1] == 'gevent'): if odoo.evented or not (len(sys.argv) > 1 and sys.argv[1] == 'gevent'):
return return
sys.argv.remove('gevent') sys.argv.remove('gevent')
@ -35,5 +49,27 @@ def patch_evented():
else: else:
raise psycopg2.OperationalError( raise psycopg2.OperationalError(
"Bad result from poll: %r" % state) "Bad result from poll: %r" % state)
psycopg2.extensions.set_wait_callback(gevent_wait_callback) psycopg2.extensions.set_wait_callback(gevent_wait_callback)
odoo.evented = True odoo.evented = True
def patch_codecs():
# ---------------------------------------------------------
# some charset are known by Python under a different name
# ---------------------------------------------------------
encodings.aliases.aliases['874'] = 'cp874'
encodings.aliases.aliases['windows_874'] = 'cp874'
# ---------------------------------------------------------
# alias hebrew iso-8859-8-i and iso-8859-8-e on iso-8859-8
# https://bugs.python.org/issue18624
# ---------------------------------------------------------
iso8859_8 = codecs.lookup('iso8859_8')
iso8859_8ie_re = re.compile(r'iso[-_]?8859[-_]8[-_]?[ei]', re.IGNORECASE)
codecs.register(lambda charset: iso8859_8 if iso8859_8ie_re.match(charset) else None)
# To remove when corrected in Babel
babel.core.LOCALE_ALIASES['nb'] = 'nb_NO'

View file

@ -48,7 +48,7 @@ def new_get_soap_client(wsdlurl, timeout=30):
return _soap_clients[(wsdlurl, timeout)] return _soap_clients[(wsdlurl, timeout)]
def patch_stdnum(): def patch_module():
try: try:
from stdnum import util from stdnum import util
except ImportError: except ImportError:

View file

@ -8,5 +8,5 @@ def pool_init(self, *args, **kwargs):
self.pool_classes_by_scheme = {**self.pool_classes_by_scheme} self.pool_classes_by_scheme = {**self.pool_classes_by_scheme}
def patch_urllib3(): def patch_module():
PoolManager.__init__ = pool_init PoolManager.__init__ = pool_init

View file

@ -1040,8 +1040,8 @@ def url_join(
return url_unparse((scheme, netloc, path, query, fragment)) return url_unparse((scheme, netloc, path, query, fragment))
def patch_werkzeug(): def patch_module():
from ..tools.json import scriptsafe # noqa: PLC0415 from odoo.tools.json import scriptsafe
Request.json_module = Response.json_module = scriptsafe Request.json_module = Response.json_module = scriptsafe
FileStorage.save = lambda self, dst, buffer_size=(1 << 20): copyfileobj(self.stream, dst, buffer_size) FileStorage.save = lambda self, dst, buffer_size=(1 << 20): copyfileobj(self.stream, dst, buffer_size)

View file

@ -0,0 +1,20 @@
def patch_module():
try:
from xlrd import xlsx # noqa: PLC0415
except ImportError:
xlsx = None
else:
from lxml import etree # noqa: PLC0415
# xlrd.xlsx supports defusedxml, defusedxml's etree interface is broken
# (missing ElementTree and thus ElementTree.iter) which causes a fallback to
# Element.getiterator(), triggering a warning before 3.9 and an error from 3.9.
#
# Historically we had defusedxml installed because zeep had a hard dep on
# it. They have dropped it as of 4.1.0 which we now require (since 18.0),
# but keep this patch for now as Odoo might get updated in a legacy env
# which still has defused.
#
# Directly instruct xlsx to use lxml as we have a hard dependency on that.
xlsx.ET = etree
xlsx.ET_has_iterparse = True
xlsx.Element_has_iter = True

View file

@ -0,0 +1,22 @@
"""
Patch xlsxwriter to add some sanitization to respect the excel sheet name
restrictions as the sheet name is often translatable, can not control the input
"""
import re
import xlsxwriter
class PatchedXlsxWorkbook(xlsxwriter.Workbook):
def add_worksheet(self, name=None, worksheet_class=None):
if name:
# invalid Excel character: []:*?/\
name = re.sub(r'[\[\]:*?/\\]', '', name)
# maximum size is 31 characters
name = name[:31]
return super().add_worksheet(name, worksheet_class=worksheet_class)
def patch_module():
xlsxwriter.Workbook = PatchedXlsxWorkbook

View file

@ -0,0 +1,21 @@
"""
Patch xlwt to add some sanitization to respect the excel sheet name
restrictions as the sheet name is often translatable, can not control the input
"""
import re
import xlwt
class PatchedWorkbook(xlwt.Workbook):
def add_sheet(self, name, cell_overwrite_ok=False):
# invalid Excel character: []:*?/\
name = re.sub(r'[\[\]:*?/\\]', '', name)
# maximum size is 31 characters
name = name[:31]
return super().add_sheet(name, cell_overwrite_ok=cell_overwrite_ok)
def patch_module():
xlwt.Workbook = PatchedWorkbook

View file

@ -2,7 +2,7 @@ from zeep.xsd import visitor
from zeep.xsd.const import xsd_ns from zeep.xsd.const import xsd_ns
def patch_zeep(): def patch_module():
# see https://github.com/mvantellingen/python-zeep/issues/1185 # see https://github.com/mvantellingen/python-zeep/issues/1185
if visitor.tags.notation.localname != 'notation': if visitor.tags.notation.localname != 'notation':
visitor.tags.notation = xsd_ns('notation') visitor.tags.notation = xsd_ns('notation')

View file

@ -0,0 +1 @@
(python namespace)

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import controllers
from . import models from . import models
from . import report from . import report
from . import wizard from . import wizard

View file

@ -23,6 +23,7 @@ The kernel of Odoo, needed for all installation.
'data/ir_demo_data.xml', 'data/ir_demo_data.xml',
'security/base_groups.xml', 'security/base_groups.xml',
'security/base_security.xml', 'security/base_security.xml',
'wizard/wizard_ir_model_menu_create_views.xml',
'views/base_menus.xml', 'views/base_menus.xml',
'views/decimal_precision_views.xml', 'views/decimal_precision_views.xml',
'views/res_config_views.xml', 'views/res_config_views.xml',
@ -67,7 +68,9 @@ The kernel of Odoo, needed for all installation.
'views/res_bank_views.xml', 'views/res_bank_views.xml',
'views/res_country_views.xml', 'views/res_country_views.xml',
'views/res_currency_views.xml', 'views/res_currency_views.xml',
'views/res_groups_views.xml',
'views/res_users_views.xml', 'views/res_users_views.xml',
'views/res_users_apikeys_views.xml',
'views/res_device_views.xml', 'views/res_device_views.xml',
'views/res_users_identitycheck_views.xml', 'views/res_users_identitycheck_views.xml',
'views/res_config_settings_views.xml', 'views/res_config_settings_views.xml',
@ -91,5 +94,6 @@ The kernel of Odoo, needed for all installation.
'installable': True, 'installable': True,
'auto_install': True, 'auto_install': True,
'post_init_hook': 'post_init', 'post_init_hook': 'post_init',
'author': 'Odoo S.A.',
'license': 'LGPL-3', 'license': 'LGPL-3',
} }

View file

@ -1 +0,0 @@
from . import rpc

View file

@ -1,178 +0,0 @@
import re
import sys
import traceback
import xmlrpc.client
from datetime import date, datetime
from collections import defaultdict
from markupsafe import Markup
import odoo
from odoo.http import Controller, route, dispatch_rpc, request, Response
from odoo.fields import Date, Datetime, Command
from odoo.tools import lazy
from odoo.tools.misc import frozendict
# ==========================================================
# XML-RPC helpers
# ==========================================================
# XML-RPC fault codes. Some care must be taken when changing these: the
# constants are also defined client-side and must remain in sync.
# User code must use the exceptions defined in ``odoo.exceptions`` (not
# create directly ``xmlrpc.client.Fault`` objects).
RPC_FAULT_CODE_CLIENT_ERROR = 1 # indistinguishable from app. error.
RPC_FAULT_CODE_APPLICATION_ERROR = 1
RPC_FAULT_CODE_WARNING = 2
RPC_FAULT_CODE_ACCESS_DENIED = 3
RPC_FAULT_CODE_ACCESS_ERROR = 4
# 0 to 31, excluding tab, newline, and carriage return
CONTROL_CHARACTERS = dict.fromkeys(set(range(32)) - {9, 10, 13})
def xmlrpc_handle_exception_int(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_WARNING, str(e))
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_ACCESS_ERROR, str(e))
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_ACCESS_DENIED, str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_WARNING, str(e))
else:
info = sys.exc_info()
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_APPLICATION_ERROR, formatted_info)
return dumps(fault)
def xmlrpc_handle_exception_string(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpc.client.Fault('warning -- Warning\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.MissingError):
fault = xmlrpc.client.Fault('warning -- MissingError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpc.client.Fault('warning -- AccessError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpc.client.Fault('AccessDenied', str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpc.client.Fault('warning -- UserError\n\n' + str(e), '')
#InternalError
else:
info = sys.exc_info()
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(odoo.tools.exception_to_unicode(e), formatted_info)
return dumps(fault)
class OdooMarshaller(xmlrpc.client.Marshaller):
dispatch = dict(xmlrpc.client.Marshaller.dispatch)
def dump_frozen_dict(self, value, write):
value = dict(value)
self.dump_struct(value, write)
# By default, in xmlrpc, bytes are converted to xmlrpc.client.Binary object.
# Historically, odoo is sending binary as base64 string.
# In python 3, base64.b64{de,en}code() methods now works on bytes.
def dump_bytes(self, value, write):
self.dump_unicode(value.decode(), write)
def dump_datetime(self, value, write):
# override to marshall as a string for backwards compatibility
value = Datetime.to_string(value)
self.dump_unicode(value, write)
# convert date objects to strings in iso8061 format.
def dump_date(self, value, write):
value = Date.to_string(value)
self.dump_unicode(value, write)
def dump_lazy(self, value, write):
v = value._value
return self.dispatch[type(v)](self, v, write)
def dump_unicode(self, value, write):
# XML 1.0 disallows control characters, remove them otherwise they break clients
return super().dump_unicode(value.translate(CONTROL_CHARACTERS), write)
dispatch[frozendict] = dump_frozen_dict
dispatch[bytes] = dump_bytes
dispatch[datetime] = dump_datetime
dispatch[date] = dump_date
dispatch[lazy] = dump_lazy
dispatch[str] = dump_unicode
dispatch[Command] = dispatch[int]
dispatch[defaultdict] = dispatch[dict]
dispatch[Markup] = lambda self, value, write: self.dispatch[str](self, str(value), write)
def dumps(params: list | tuple | xmlrpc.client.Fault) -> str:
response = OdooMarshaller(allow_none=False).dumps(params)
return f"""\
<?xml version="1.0"?>
<methodResponse>
{response}
</methodResponse>
"""
# ==========================================================
# RPC Controller
# ==========================================================
def _check_request():
if request.db:
request.env.cr.close()
class RPC(Controller):
"""Handle RPC connections."""
def _xmlrpc(self, service):
"""Common method to handle an XML-RPC request."""
_check_request()
data = request.httprequest.get_data()
params, method = xmlrpc.client.loads(data, use_datetime=True)
result = dispatch_rpc(service, method, params)
return dumps((result,))
@route("/xmlrpc/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_1(self, service):
"""XML-RPC service that returns faultCode as strings.
This entrypoint is historical and non-compliant, but kept for
backwards-compatibility.
"""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
error.error_response = Response(
response=xmlrpc_handle_exception_string(error),
mimetype='text/xml',
)
raise
return Response(response=response, mimetype='text/xml')
@route("/xmlrpc/2/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_2(self, service):
"""XML-RPC service that returns faultCode as int."""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
error.error_response = Response(
response=xmlrpc_handle_exception_int(error),
mimetype='text/xml',
)
raise
return Response(response=response, mimetype='text/xml')
@route('/jsonrpc', type='json', auth="none", save_session=False)
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
_check_request()
return dispatch_rpc(service, method, args)

View file

@ -3,13 +3,13 @@
<data> <data>
<record model="ir.module.category" id="module_category_hidden"> <record model="ir.module.category" id="module_category_hidden">
<field name="name">Technical</field> <field name="name">Technical</field>
<field name="sequence">60</field> <field name="sequence">70</field>
<field name="visible" eval="0" /> <field name="visible" eval="0" />
</record> </record>
<record model="ir.module.category" id="module_category_accounting"> <record model="ir.module.category" id="module_category_accounting">
<field name="name">Accounting</field> <field name="name">Accounting</field>
<field name="sequence">15</field> <field name="sequence">20</field>
</record> </record>
<record model="ir.module.category" id="module_category_accounting_localizations"> <record model="ir.module.category" id="module_category_accounting_localizations">
@ -30,12 +30,6 @@
<field name="visible" eval="0" /> <field name="visible" eval="0" />
</record> </record>
<record model="ir.module.category" id="module_category_user_type">
<field name="name">User types</field>
<field name="description">Helps you manage users.</field>
<field name="sequence">9</field>
</record>
<record model="ir.module.category" id="module_category_accounting_accounting"> <record model="ir.module.category" id="module_category_accounting_accounting">
<field name="name">Invoicing</field> <field name="name">Invoicing</field>
<field name="sequence">4</field> <field name="sequence">4</field>
@ -56,14 +50,19 @@
<field name="sequence">40</field> <field name="sequence">40</field>
</record> </record>
<record model="ir.module.category" id="module_category_manufacturing"> <record model="ir.module.category" id="module_category_supply_chain">
<field name="name">Manufacturing</field> <field name="name">Supply Chain</field>
<field name="sequence">30</field> <field name="sequence">25</field>
</record>
<record model="ir.module.category" id="module_category_shipping_connectors">
<field name="name">Shipping Connectors</field>
<field name="sequence">50</field>
</record> </record>
<record model="ir.module.category" id="module_category_website"> <record model="ir.module.category" id="module_category_website">
<field name="name">Website</field> <field name="name">Website</field>
<field name="sequence">35</field> <field name="sequence">10</field>
</record> </record>
<record model="ir.module.category" id="module_category_theme"> <record model="ir.module.category" id="module_category_theme">
@ -78,6 +77,14 @@
<field name="parent_id" eval="False"/> <field name="parent_id" eval="False"/>
</record> </record>
<record model="ir.module.category" id="base.module_category_human_resources_referrals">
<field name="name">Referrals</field>
<field name="description">Helps you manage referrals
User : Access to referral, share job, gain points, buy rewards
Administrator : edit rewards and more</field>
<field name="sequence">11</field>
</record>
<record model="ir.module.category" id="module_category_human_resources_appraisals"> <record model="ir.module.category" id="module_category_human_resources_appraisals">
<field name="name">Appraisals</field> <field name="name">Appraisals</field>
<field name="description">A user without any rights on Appraisals will be able to see the application, create and manage appraisals for himself and the people he's manager of.</field> <field name="description">A user without any rights on Appraisals will be able to see the application, create and manage appraisals for himself and the people he's manager of.</field>
@ -92,7 +99,7 @@
<record model="ir.module.category" id="module_category_services"> <record model="ir.module.category" id="module_category_services">
<field name="name">Services</field> <field name="name">Services</field>
<field name="sequence">10</field> <field name="sequence">15</field>
</record> </record>
<record model="ir.module.category" id="module_category_services_helpdesk"> <record model="ir.module.category" id="module_category_services_helpdesk">
@ -111,14 +118,14 @@
<field name="parent_id" ref="module_category_services"/> <field name="parent_id" ref="module_category_services"/>
</record> </record>
<record model="ir.module.category" id="module_category_inventory">
<field name="name">Inventory</field>
<field name="sequence">25</field>
</record>
<record model="ir.module.category" id="module_category_productivity"> <record model="ir.module.category" id="module_category_productivity">
<field name="name">Productivity</field> <field name="name">Productivity</field>
<field name="sequence">50</field> <field name="sequence">30</field>
</record>
<record model="ir.module.category" id="module_category_esg">
<field name="name">ESG</field>
<field name="sequence">52</field>
</record> </record>
<record model="ir.module.category" id="module_category_customizations"> <record model="ir.module.category" id="module_category_customizations">
@ -126,56 +133,20 @@
<field name="sequence">55</field> <field name="sequence">55</field>
</record> </record>
<record model="ir.module.category" id="module_category_internet_of_things_(iot)">
<field name="name">Internet of Things (IoT)</field>
<field name="sequence">60</field>
</record>
<record model="ir.module.category" id="module_category_administration_administration"> <record model="ir.module.category" id="module_category_administration_administration">
<field name="name">Administration</field> <field name="name">Administration</field>
<field name="parent_id" ref="module_category_administration"/> <field name="parent_id" ref="module_category_administration"/>
</record> </record>
<record model="ir.module.category" id="module_category_usability">
<field name="name">Extra Rights</field>
<field name="sequence">101</field>
</record>
<record model="ir.module.category" id="module_category_extra"> <record model="ir.module.category" id="module_category_extra">
<field name="name">Other Extra Rights</field> <field name="name">Other Extra Rights</field>
<field name="sequence">102</field> <field name="sequence">102</field>
</record> </record>
<!-- add applications to base groups -->
<record model="res.groups" id="group_erp_manager">
<field name="category_id" ref="module_category_administration_administration"/>
</record>
<record model="res.groups" id="group_system">
<field name="category_id" ref="module_category_administration_administration"/>
</record>
<record model="res.groups" id="group_user">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record model="res.groups" id="group_multi_company">
<field name="category_id" ref="module_category_usability"/>
</record>
<record model="res.groups" id="group_multi_currency">
<field name="category_id" ref="module_category_usability"/>
</record>
<record model="res.groups" id="group_no_one">
<field name="category_id" ref="module_category_usability"/>
</record>
<record id="group_portal" model="res.groups">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record id="group_public" model="res.groups">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record id="group_partner_manager" model="res.groups">
<field name="category_id" ref="module_category_usability"/>
</record>
</data> </data>
</odoo> </odoo>

View file

@ -117,7 +117,7 @@
<record model="ir.module.module" id="base.module_mrp_plm"> <record model="ir.module.module" id="base.module_mrp_plm">
<field name="name">mrp_plm</field> <field name="name">mrp_plm</field>
<field name="shortdesc">Product Lifecycle Management (PLM)</field> <field name="shortdesc">Product Lifecycle Management (PLM)</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/> <field name="category_id" ref="base.module_category_supply_chain"/>
<field name="sequence">155</field> <field name="sequence">155</field>
<field name="application" eval="True"/> <field name="application" eval="True"/>
<field name="summary">PLM, ECOs, Versions</field> <field name="summary">PLM, ECOs, Versions</field>
@ -132,7 +132,7 @@
<field name="name">quality_control</field> <field name="name">quality_control</field>
<field name="shortdesc">Quality</field> <field name="shortdesc">Quality</field>
<field name="sequence">120</field> <field name="sequence">120</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/> <field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/> <field name="application" eval="True"/>
<field name="summary">Quality Alerts, Control Points</field> <field name="summary">Quality Alerts, Control Points</field>
<field name="license">OEEL-1</field> <field name="license">OEEL-1</field>
@ -202,7 +202,7 @@
<field name="name">stock_barcode</field> <field name="name">stock_barcode</field>
<field name="shortdesc">Barcode</field> <field name="shortdesc">Barcode</field>
<field name="sequence">255</field> <field name="sequence">255</field>
<field name="category_id" ref="base.module_category_inventory_inventory"/> <field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/> <field name="application" eval="True"/>
<field name="summary">Barcode scanner for warehouses</field> <field name="summary">Barcode scanner for warehouses</field>
<field name="license">OEEL-1</field> <field name="license">OEEL-1</field>
@ -214,7 +214,7 @@
<record model="ir.module.module" id="base.module_voip"> <record model="ir.module.module" id="base.module_voip">
<field name="name">voip</field> <field name="name">voip</field>
<field name="shortdesc">VoIP</field> <field name="shortdesc">Phone</field>
<field name="sequence">280</field> <field name="sequence">280</field>
<field name="category_id" ref="base.module_category_sales_sales"/> <field name="category_id" ref="base.module_category_sales_sales"/>
<field name="application" eval="True"/> <field name="application" eval="True"/>
@ -258,7 +258,7 @@
<field name="name">mrp_workorder</field> <field name="name">mrp_workorder</field>
<field name="sequence">16</field> <field name="sequence">16</field>
<field name="shortdesc">MRP II</field> <field name="shortdesc">MRP II</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/> <field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/> <field name="application" eval="True"/>
<field name="summary">Work Orders, Planning, Routing</field> <field name="summary">Work Orders, Planning, Routing</field>
<field name="license">OEEL-1</field> <field name="license">OEEL-1</field>

View file

@ -638,7 +638,7 @@ state_id_pb,id,"Papua Barat","PB"
state_id_pd,id,"Papua Barat Daya","PD" state_id_pd,id,"Papua Barat Daya","PD"
state_id_ps,id,"Papua Selatan","PS" state_id_ps,id,"Papua Selatan","PS"
state_id_pt,id,"Papua Tengah","PT" state_id_pt,id,"Papua Tengah","PT"
state_id_pp,id,"Papua Pegunungan","PE" state_id_pe,id,"Papua Pegunungan","PE"
state_id_ri,id,"Riau","RI" state_id_ri,id,"Riau","RI"
state_id_sr,id,"Sulawesi Barat","SR" state_id_sr,id,"Sulawesi Barat","SR"
state_id_sn,id,"Sulawesi Selatan","SN" state_id_sn,id,"Sulawesi Selatan","SN"
@ -1047,16 +1047,7 @@ state_tr_79,tr,"Kilis","79"
state_tr_80,tr,"Osmaniye","80" state_tr_80,tr,"Osmaniye","80"
state_tr_81,tr,"Düzce","81" state_tr_81,tr,"Düzce","81"
state_vn_VN-44,vn,"An Giang","VN-44" state_vn_VN-44,vn,"An Giang","VN-44"
state_vn_VN-57,vn,"Bình Dương","VN-57"
state_vn_VN-31,vn,"Bình Định","VN-31"
state_vn_VN-54,vn,"Bắc Giang","VN-54"
state_vn_VN-53,vn,"Bắc Kạn","VN-53"
state_vn_VN-55,vn,"Bạc Liêu","VN-55"
state_vn_VN-56,vn,"Bắc Ninh","VN-56" state_vn_VN-56,vn,"Bắc Ninh","VN-56"
state_vn_VN-58,vn,"Bình Phước","VN-58"
state_vn_VN-43,vn,"Bà Rịa - Vũng Tàu","VN-43"
state_vn_VN-40,vn,"Bình Thuận","VN-40"
state_vn_VN-50,vn,"Bến Tre","VN-50"
state_vn_VN-04,vn,"Cao Bằng","VN-04" state_vn_VN-04,vn,"Cao Bằng","VN-04"
state_vn_VN-59,vn,"Cà Mau","VN-59" state_vn_VN-59,vn,"Cà Mau","VN-59"
state_vn_VN-CT,vn,"TP Cần Thơ","VN-CT" state_vn_VN-CT,vn,"TP Cần Thơ","VN-CT"
@ -1064,51 +1055,31 @@ state_vn_VN-71,vn,"Điện Biên","VN-71"
state_vn_VN-33,vn,"Đắk Lắk","VN-33" state_vn_VN-33,vn,"Đắk Lắk","VN-33"
state_vn_VN-DN,vn,"TP Đà Nẵng","VN-DN" state_vn_VN-DN,vn,"TP Đà Nẵng","VN-DN"
state_vn_VN-39,vn,"Đồng Nai","VN-39" state_vn_VN-39,vn,"Đồng Nai","VN-39"
state_vn_VN-72,vn,"Đắk Nông","VN-72"
state_vn_VN-45,vn,"Đồng Tháp","VN-45" state_vn_VN-45,vn,"Đồng Tháp","VN-45"
state_vn_VN-30,vn,"Gia Lai","VN-30" state_vn_VN-30,vn,"Gia Lai","VN-30"
state_vn_VN-14,vn,"Hòa Bình","VN-14"
state_vn_VN-SG,vn,"TP Hồ Chí Minh","VN-SG" state_vn_VN-SG,vn,"TP Hồ Chí Minh","VN-SG"
state_vn_VN-61,vn,"Hải Dương","VN-61"
state_vn_VN-73,vn,"Hậu Giang","VN-73"
state_vn_VN-03,vn,"Hà Giang","VN-03"
state_vn_VN-HN,vn,"Hà Nội","VN-HN" state_vn_VN-HN,vn,"Hà Nội","VN-HN"
state_vn_VN-63,vn,"Hà Nam","VN-63"
state_vn_VN-HP,vn,"TP Hải Phòng","VN-HP" state_vn_VN-HP,vn,"TP Hải Phòng","VN-HP"
state_vn_VN-23,vn,"Hà Tĩnh","VN-23" state_vn_VN-23,vn,"Hà Tĩnh","VN-23"
state_vn_VN-66,vn,"Hưng Yên","VN-66" state_vn_VN-66,vn,"Hưng Yên","VN-66"
state_vn_VN-47,vn,"Kiên Giang","VN-47"
state_vn_VN-34,vn,"Khánh Hòa","VN-34" state_vn_VN-34,vn,"Khánh Hòa","VN-34"
state_vn_VN-28,vn,"Kon Tum","VN-28"
state_vn_VN-41,vn,"Long An","VN-41"
state_vn_VN-02,vn,"Lào Cai","VN-02" state_vn_VN-02,vn,"Lào Cai","VN-02"
state_vn_VN-01,vn,"Lai Châu","VN-01" state_vn_VN-01,vn,"Lai Châu","VN-01"
state_vn_VN-35,vn,"Lâm Đồng","VN-35" state_vn_VN-35,vn,"Lâm Đồng","VN-35"
state_vn_VN-09,vn,"Lạng Sơn","VN-09" state_vn_VN-09,vn,"Lạng Sơn","VN-09"
state_vn_VN-22,vn,"Nghệ An","VN-22" state_vn_VN-22,vn,"Nghệ An","VN-22"
state_vn_VN-18,vn,"Ninh Bình","VN-18" state_vn_VN-18,vn,"Ninh Bình","VN-18"
state_vn_VN-67,vn,"Nam Định","VN-67"
state_vn_VN-36,vn,"Ninh Thuận","VN-36"
state_vn_VN-68,vn,"Phú Thọ","VN-68" state_vn_VN-68,vn,"Phú Thọ","VN-68"
state_vn_VN-32,vn,"Phú Yên","VN-32"
state_vn_VN-24,vn,"Quảng Bình","VN-24"
state_vn_VN-13,vn,"Quảng Ninh","VN-13" state_vn_VN-13,vn,"Quảng Ninh","VN-13"
state_vn_VN-27,vn,"Quảng Nam","VN-27"
state_vn_VN-29,vn,"Quảng Ngãi","VN-29" state_vn_VN-29,vn,"Quảng Ngãi","VN-29"
state_vn_VN-25,vn,"Quảng Trị","VN-25" state_vn_VN-25,vn,"Quảng Trị","VN-25"
state_vn_VN-05,vn,"Sơn La","VN-05" state_vn_VN-05,vn,"Sơn La","VN-05"
state_vn_VN-52,vn,"Sóc Trăng","VN-52"
state_vn_VN-20,vn,"Thái Bình","VN-20"
state_vn_VN-46,vn,"Tiền Giang","VN-46"
state_vn_VN-21,vn,"Thanh Hóa","VN-21" state_vn_VN-21,vn,"Thanh Hóa","VN-21"
state_vn_VN-69,vn,"Thái Nguyên","VN-69" state_vn_VN-69,vn,"Thái Nguyên","VN-69"
state_vn_VN-37,vn,"Tây Ninh","VN-37" state_vn_VN-37,vn,"Tây Ninh","VN-37"
state_vn_VN-07,vn,"Tuyên Quang","VN-07" state_vn_VN-07,vn,"Tuyên Quang","VN-07"
state_vn_VN-26,vn,"Thừa Thiên - Huế","VN-26" state_vn_VN-26,vn,"Thừa Thiên - Huế","VN-26"
state_vn_VN-51,vn,"Trà Vinh","VN-51"
state_vn_VN-49,vn,"Vĩnh Long","VN-49" state_vn_VN-49,vn,"Vĩnh Long","VN-49"
state_vn_VN-70,vn,"Vĩnh Phúc","VN-70"
state_vn_VN-06,vn,"Yên Bái","VN-06"
state_SJ,cr,"San José","1" state_SJ,cr,"San José","1"
state_A,cr,"Alajuela","2" state_A,cr,"Alajuela","2"
state_H,cr,"Heredia","4" state_H,cr,"Heredia","4"
@ -1765,6 +1736,141 @@ state_kr_47,kr,"경상북도","KR-47"
state_kr_48,kr,"경상남도","KR-48" state_kr_48,kr,"경상남도","KR-48"
state_kr_49,kr,"제주특별자치도","KR-49" state_kr_49,kr,"제주특별자치도","KR-49"
state_kr_50,kr,"세종특별자치시","KR-50" state_kr_50,kr,"세종특별자치시","KR-50"
state_pl_ds,pl,"dolnośląskie",""
state_pl_kp,pl,"kujawsko-pomorskie","KP"
state_pl_lb,pl,"lubelskie","LB"
state_pl_ls,pl,"lubuskie","LS"
state_pl_ld,pl,"łódzkie","ŁD"
state_pl_mp,pl,"małopolskie","MP"
state_pl_mz,pl,"mazowieckie","MZ"
state_pl_op,pl,"opolskie","OP"
state_pl_pk,pl,"podkarpackie","PK"
state_pl_pl,pl,"podlaskie","PL"
state_pl_pm,pl,"pomorskie","PM"
state_pl_sl,pl,"śląskie","ŚL"
state_pl_sk,pl,"świętokrzyskie","ŚK"
state_pl_wm,pl,"warmińsko-mazurskie","WM"
state_pl_wp,pl,"wielkopolskie","WP"
state_pl_zp,pl,"zachodniopomorskie","ZP"
state_pk_ajk,pk,"Azad Jammu and Kashmir","AJK"
state_pk_ba,pk,"Balochistan","BA"
state_pk_gb,pk,"Gilgit-Baltistan","GB"
state_pk_is,pk,"Islamabad Capital Territory","IS/ICT"
state_pk_kp,pk,"Khyber Pakhtunkhwa","KP/KPK"
state_pk_pb,pk,"Punjab","PB"
state_pk_sd,pk,"Sindh","SD"
state_iq_01,iq,"Al Anbar","IQ-AN"
state_iq_01_ar,iq,"الأنبار","IQ-AN-AR"
state_iq_02,iq,"Al Basrah","IQ-BA"
state_iq_02_ar,iq,"البصرة","IQ-BA-AR"
state_iq_03,iq,"Al Muthanna","IQ-MU"
state_iq_03_ar,iq,"المثنى","IQ-MU-AR"
state_iq_04,iq,"Al Qādisiyyah","IQ-QA"
state_iq_04_ar,iq,"القادسية","IQ-QA-AR"
state_iq_05,iq,"Sulaymaniyah","IQ-SU"
state_iq_05_ar,iq,"السليمانية","IQ-SU-AR"
state_iq_06,iq,"Babil","IQ-BB"
state_iq_06_ar,iq,"بابل","IQ-BB-AR"
state_iq_07,iq,"Baghdad","IQ-BG"
state_iq_07_ar,iq,"بغداد","IQ-BG-AR"
state_iq_08,iq,"Duhok","IQ-DA"
state_iq_08_ar,iq,"دهوك","IQ-DA-AR"
state_iq_09,iq,"Dhi Qar","IQ-DQ"
state_iq_09_ar,iq,"ذي قار","IQ-DQ-AR"
state_iq_10,iq,"Diyala","IQ-DI"
state_iq_10_ar,iq,"ديالى","IQ-DI-AR"
state_iq_11,iq,"Erbil","IQ-AR"
state_iq_11_ar,iq,"أربيل","IQ-AR-AR"
state_iq_12,iq,"Karbala'","IQ-KA"
state_iq_12_ar,iq,"كربلاء","IQ-KA-AR"
state_iq_13,iq,"Kirkuk","IQ-KI"
state_iq_13_ar,iq,"كركوك","IQ-KI-AR"
state_iq_14,iq,"Maysan","IQ-MA"
state_iq_14_ar,iq,"ميسان","IQ-MA-AR"
state_iq_15,iq,"Ninawa","IQ-NI"
state_iq_15_ar,iq,"نينوى","IQ-NI-AR"
state_iq_16,iq,"Wasit","IQ-WA"
state_iq_16_ar,iq,"واسط","IQ-WA-AR"
state_iq_17,iq,"Najaf","IQ-NA"
state_iq_17_ar,iq,"النجف","IQ-NA-AR"
state_iq_18,iq,"Salah Al Din","IQ-SD"
state_iq_18_ar,iq,"صلاح الدين","IQ-SD-AR"
state_bd_a,bd,"Barishal","BD-A"
state_bd_b,bd,"Chattogram","BD-B"
state_bd_c,bd,"Dhaka","BD-C"
state_bd_d,bd,"Khulna","BD-D"
state_bd_e,bd,"Rajshahi","BD-E"
state_bd_f,bd,"Rangpur","BD-F"
state_bd_g,bd,"Sylhet","BD-G"
state_bd_h,bd,"Mymensingh","BD-H"
state_at_1,at,"Burgenland","1"
state_at_2,at,"Kärnten","2"
state_at_3,at,"Niederösterreich","3"
state_at_4,at,"Oberösterreich","4"
state_at_5,at,"Salzburg","5"
state_at_6,at,"Steiermark","6"
state_at_7,at,"Tirol","7"
state_at_8,at,"Vorarlberg","8"
state_at_9,at,"Wien","9"
state_tw_chh,tw,彰化縣,CHH
state_tw_cic,tw,嘉義市,CIC
state_tw_cih,tw,嘉義縣,CIH
state_tw_hch,tw,新竹縣,HCH
state_tw_hct,tw,新竹市,HCT
state_tw_hlh,tw,花蓮縣,HLH
state_tw_ilh,tw,宜蘭縣,ILH
state_tw_khc,tw,高雄市,KHC
state_tw_klc,tw,基隆市,KLC
state_tw_kmc,tw,金門縣,KMC
state_tw_lcc,tw,連江縣,LCC
state_tw_mlh,tw,苗栗縣,MLH
state_tw_ntc,tw,南投縣,NTC
state_tw_ntpc,tw,新北市,NTPC
state_tw_phc,tw,澎湖縣,PHC
state_tw_pth,tw,屏東縣,PTH
state_tw_tcc,tw,台中市,TCC
state_tw_tnh,tw,台南市,TNH
state_tw_tpc,tw,台北市,TPC
state_tw_tth,tw,台東縣,TTH
state_tw_tyc,tw,桃園市,TYC
state_tw_ylh,tw,雲林縣,YLH
state_ng_ab,ng,"Abia","NG-AB"
state_ng_ad,ng,"Adamawa","NG-AD"
state_ng_ak,ng,"Akwa Ibom","NG-AK"
state_ng_an,ng,"Anambra","NG-AN"
state_ng_ba,ng,"Bauchi","NG-BA"
state_ng_by,ng,"Bayelsa","NG-BY"
state_ng_be,ng,"Benue","NG-BE"
state_ng_bo,ng,"Borno","NG-BO"
state_ng_cr,ng,"Cross River","NG-CR"
state_ng_de,ng,"Delta","NG-DE"
state_ng_eb,ng,"Ebonyi","NG-EB"
state_ng_ed,ng,"Edo","NG-ED"
state_ng_ek,ng,"Ekiti","NG-EK"
state_ng_en,ng,"Enugu","NG-EN"
state_ng_go,ng,"Gombe","NG-GO"
state_ng_im,ng,"Imo","NG-IM"
state_ng_ji,ng,"Jigawa","NG-JI"
state_ng_ka,ng,"Kaduna","NG-KD"
state_ng_kn,ng,"Kano","NG-KN"
state_ng_kt,ng,"Katsina","NG-KT"
state_ng_ke,ng,"Kebbi","NG-KE"
state_ng_ko,ng,"Kogi","NG-KO"
state_ng_kw,ng,"Kwara","NG-KW"
state_ng_la,ng,"Lagos","NG-LA"
state_ng_na,ng,"Nasarawa","NG-NA"
state_ng_ni,ng,"Niger","NG-NI"
state_ng_og,ng,"Ogun","NG-OG"
state_ng_on,ng,"Ondo","NG-ON"
state_ng_os,ng,"Osun","NG-OS"
state_ng_oy,ng,"Oyo","NG-OY"
state_ng_pl,ng,"Plateau","NG-PL"
state_ng_ri,ng,"Rivers","NG-RI"
state_ng_so,ng,"Sokoto","NG-SO"
state_ng_ta,ng,"Taraba","NG-TA"
state_ng_yo,ng,"Yobe","NG-YO"
state_ng_za,ng,"Zamfara","NG-ZA"
state_ng_fc,ng,"FCT","NG-FC"
state_be_1,be,"Antwerp","VAN" state_be_1,be,"Antwerp","VAN"
state_be_2,be,"Limburg","VLI" state_be_2,be,"Limburg","VLI"
state_be_3,be,"East Flanders","VOV" state_be_3,be,"East Flanders","VOV"
@ -1775,6 +1881,14 @@ state_be_7,be,"Hainaut","WHT"
state_be_8,be,"Liège","WLG" state_be_8,be,"Liège","WLG"
state_be_9,be,"Luxembourg","WLX" state_be_9,be,"Luxembourg","WLX"
state_be_10,be,"Namur","WNA" state_be_10,be,"Namur","WNA"
state_so_1,so,"Banaadir","BN"
state_so_2,so,"Galmudug","GM"
state_so_3,so,"Somaliland","SL"
state_so_4,so,"Puntland","PL"
state_so_5,so,"Jubaland","JL"
state_so_6,so,"Hirshabelle","HS"
state_so_7,so,"Koonfur Galbeed","KG"
state_so_8,so,"Khatumo","SSC"
state_bn_b,bn,"Brunei-Muara","B" state_bn_b,bn,"Brunei-Muara","B"
state_bn_k,bn,"Belait","K" state_bn_k,bn,"Belait","K"
state_bn_t,bn,"Tutong","T" state_bn_t,bn,"Tutong","T"

1 id country_id:id name code
638 state_id_pd id Papua Barat Daya PD
639 state_id_ps id Papua Selatan PS
640 state_id_pt id Papua Tengah PT
641 state_id_pp state_id_pe id Papua Pegunungan PE
642 state_id_ri id Riau RI
643 state_id_sr id Sulawesi Barat SR
644 state_id_sn id Sulawesi Selatan SN
1047 state_tr_80 tr Osmaniye 80
1048 state_tr_81 tr Düzce 81
1049 state_vn_VN-44 vn An Giang VN-44
state_vn_VN-57 vn Bình Dương VN-57
state_vn_VN-31 vn Bình Định VN-31
state_vn_VN-54 vn Bắc Giang VN-54
state_vn_VN-53 vn Bắc Kạn VN-53
state_vn_VN-55 vn Bạc Liêu VN-55
1050 state_vn_VN-56 vn Bắc Ninh VN-56
state_vn_VN-58 vn Bình Phước VN-58
state_vn_VN-43 vn Bà Rịa - Vũng Tàu VN-43
state_vn_VN-40 vn Bình Thuận VN-40
state_vn_VN-50 vn Bến Tre VN-50
1051 state_vn_VN-04 vn Cao Bằng VN-04
1052 state_vn_VN-59 vn Cà Mau VN-59
1053 state_vn_VN-CT vn TP Cần Thơ VN-CT
1055 state_vn_VN-33 vn Đắk Lắk VN-33
1056 state_vn_VN-DN vn TP Đà Nẵng VN-DN
1057 state_vn_VN-39 vn Đồng Nai VN-39
state_vn_VN-72 vn Đắk Nông VN-72
1058 state_vn_VN-45 vn Đồng Tháp VN-45
1059 state_vn_VN-30 vn Gia Lai VN-30
state_vn_VN-14 vn Hòa Bình VN-14
1060 state_vn_VN-SG vn TP Hồ Chí Minh VN-SG
state_vn_VN-61 vn Hải Dương VN-61
state_vn_VN-73 vn Hậu Giang VN-73
state_vn_VN-03 vn Hà Giang VN-03
1061 state_vn_VN-HN vn Hà Nội VN-HN
state_vn_VN-63 vn Hà Nam VN-63
1062 state_vn_VN-HP vn TP Hải Phòng VN-HP
1063 state_vn_VN-23 vn Hà Tĩnh VN-23
1064 state_vn_VN-66 vn Hưng Yên VN-66
state_vn_VN-47 vn Kiên Giang VN-47
1065 state_vn_VN-34 vn Khánh Hòa VN-34
state_vn_VN-28 vn Kon Tum VN-28
state_vn_VN-41 vn Long An VN-41
1066 state_vn_VN-02 vn Lào Cai VN-02
1067 state_vn_VN-01 vn Lai Châu VN-01
1068 state_vn_VN-35 vn Lâm Đồng VN-35
1069 state_vn_VN-09 vn Lạng Sơn VN-09
1070 state_vn_VN-22 vn Nghệ An VN-22
1071 state_vn_VN-18 vn Ninh Bình VN-18
state_vn_VN-67 vn Nam Định VN-67
state_vn_VN-36 vn Ninh Thuận VN-36
1072 state_vn_VN-68 vn Phú Thọ VN-68
state_vn_VN-32 vn Phú Yên VN-32
state_vn_VN-24 vn Quảng Bình VN-24
1073 state_vn_VN-13 vn Quảng Ninh VN-13
state_vn_VN-27 vn Quảng Nam VN-27
1074 state_vn_VN-29 vn Quảng Ngãi VN-29
1075 state_vn_VN-25 vn Quảng Trị VN-25
1076 state_vn_VN-05 vn Sơn La VN-05
state_vn_VN-52 vn Sóc Trăng VN-52
state_vn_VN-20 vn Thái Bình VN-20
state_vn_VN-46 vn Tiền Giang VN-46
1077 state_vn_VN-21 vn Thanh Hóa VN-21
1078 state_vn_VN-69 vn Thái Nguyên VN-69
1079 state_vn_VN-37 vn Tây Ninh VN-37
1080 state_vn_VN-07 vn Tuyên Quang VN-07
1081 state_vn_VN-26 vn Thừa Thiên - Huế VN-26
state_vn_VN-51 vn Trà Vinh VN-51
1082 state_vn_VN-49 vn Vĩnh Long VN-49
state_vn_VN-70 vn Vĩnh Phúc VN-70
state_vn_VN-06 vn Yên Bái VN-06
1083 state_SJ cr San José 1
1084 state_A cr Alajuela 2
1085 state_H cr Heredia 4
1736 state_kr_48 kr 경상남도 KR-48
1737 state_kr_49 kr 제주특별자치도 KR-49
1738 state_kr_50 kr 세종특별자치시 KR-50
1739 state_pl_ds pl dolnośląskie
1740 state_pl_kp pl kujawsko-pomorskie KP
1741 state_pl_lb pl lubelskie LB
1742 state_pl_ls pl lubuskie LS
1743 state_pl_ld pl łódzkie ŁD
1744 state_pl_mp pl małopolskie MP
1745 state_pl_mz pl mazowieckie MZ
1746 state_pl_op pl opolskie OP
1747 state_pl_pk pl podkarpackie PK
1748 state_pl_pl pl podlaskie PL
1749 state_pl_pm pl pomorskie PM
1750 state_pl_sl pl śląskie ŚL
1751 state_pl_sk pl świętokrzyskie ŚK
1752 state_pl_wm pl warmińsko-mazurskie WM
1753 state_pl_wp pl wielkopolskie WP
1754 state_pl_zp pl zachodniopomorskie ZP
1755 state_pk_ajk pk Azad Jammu and Kashmir AJK
1756 state_pk_ba pk Balochistan BA
1757 state_pk_gb pk Gilgit-Baltistan GB
1758 state_pk_is pk Islamabad Capital Territory IS/ICT
1759 state_pk_kp pk Khyber Pakhtunkhwa KP/KPK
1760 state_pk_pb pk Punjab PB
1761 state_pk_sd pk Sindh SD
1762 state_iq_01 iq Al Anbar IQ-AN
1763 state_iq_01_ar iq الأنبار IQ-AN-AR
1764 state_iq_02 iq Al Basrah IQ-BA
1765 state_iq_02_ar iq البصرة IQ-BA-AR
1766 state_iq_03 iq Al Muthanna IQ-MU
1767 state_iq_03_ar iq المثنى IQ-MU-AR
1768 state_iq_04 iq Al Qādisiyyah IQ-QA
1769 state_iq_04_ar iq القادسية IQ-QA-AR
1770 state_iq_05 iq Sulaymaniyah IQ-SU
1771 state_iq_05_ar iq السليمانية IQ-SU-AR
1772 state_iq_06 iq Babil IQ-BB
1773 state_iq_06_ar iq بابل IQ-BB-AR
1774 state_iq_07 iq Baghdad IQ-BG
1775 state_iq_07_ar iq بغداد IQ-BG-AR
1776 state_iq_08 iq Duhok IQ-DA
1777 state_iq_08_ar iq دهوك IQ-DA-AR
1778 state_iq_09 iq Dhi Qar IQ-DQ
1779 state_iq_09_ar iq ذي قار IQ-DQ-AR
1780 state_iq_10 iq Diyala IQ-DI
1781 state_iq_10_ar iq ديالى IQ-DI-AR
1782 state_iq_11 iq Erbil IQ-AR
1783 state_iq_11_ar iq أربيل IQ-AR-AR
1784 state_iq_12 iq Karbala' IQ-KA
1785 state_iq_12_ar iq كربلاء IQ-KA-AR
1786 state_iq_13 iq Kirkuk IQ-KI
1787 state_iq_13_ar iq كركوك IQ-KI-AR
1788 state_iq_14 iq Maysan IQ-MA
1789 state_iq_14_ar iq ميسان IQ-MA-AR
1790 state_iq_15 iq Ninawa IQ-NI
1791 state_iq_15_ar iq نينوى IQ-NI-AR
1792 state_iq_16 iq Wasit IQ-WA
1793 state_iq_16_ar iq واسط IQ-WA-AR
1794 state_iq_17 iq Najaf IQ-NA
1795 state_iq_17_ar iq النجف IQ-NA-AR
1796 state_iq_18 iq Salah Al Din IQ-SD
1797 state_iq_18_ar iq صلاح الدين IQ-SD-AR
1798 state_bd_a bd Barishal BD-A
1799 state_bd_b bd Chattogram BD-B
1800 state_bd_c bd Dhaka BD-C
1801 state_bd_d bd Khulna BD-D
1802 state_bd_e bd Rajshahi BD-E
1803 state_bd_f bd Rangpur BD-F
1804 state_bd_g bd Sylhet BD-G
1805 state_bd_h bd Mymensingh BD-H
1806 state_at_1 at Burgenland 1
1807 state_at_2 at Kärnten 2
1808 state_at_3 at Niederösterreich 3
1809 state_at_4 at Oberösterreich 4
1810 state_at_5 at Salzburg 5
1811 state_at_6 at Steiermark 6
1812 state_at_7 at Tirol 7
1813 state_at_8 at Vorarlberg 8
1814 state_at_9 at Wien 9
1815 state_tw_chh tw 彰化縣 CHH
1816 state_tw_cic tw 嘉義市 CIC
1817 state_tw_cih tw 嘉義縣 CIH
1818 state_tw_hch tw 新竹縣 HCH
1819 state_tw_hct tw 新竹市 HCT
1820 state_tw_hlh tw 花蓮縣 HLH
1821 state_tw_ilh tw 宜蘭縣 ILH
1822 state_tw_khc tw 高雄市 KHC
1823 state_tw_klc tw 基隆市 KLC
1824 state_tw_kmc tw 金門縣 KMC
1825 state_tw_lcc tw 連江縣 LCC
1826 state_tw_mlh tw 苗栗縣 MLH
1827 state_tw_ntc tw 南投縣 NTC
1828 state_tw_ntpc tw 新北市 NTPC
1829 state_tw_phc tw 澎湖縣 PHC
1830 state_tw_pth tw 屏東縣 PTH
1831 state_tw_tcc tw 台中市 TCC
1832 state_tw_tnh tw 台南市 TNH
1833 state_tw_tpc tw 台北市 TPC
1834 state_tw_tth tw 台東縣 TTH
1835 state_tw_tyc tw 桃園市 TYC
1836 state_tw_ylh tw 雲林縣 YLH
1837 state_ng_ab ng Abia NG-AB
1838 state_ng_ad ng Adamawa NG-AD
1839 state_ng_ak ng Akwa Ibom NG-AK
1840 state_ng_an ng Anambra NG-AN
1841 state_ng_ba ng Bauchi NG-BA
1842 state_ng_by ng Bayelsa NG-BY
1843 state_ng_be ng Benue NG-BE
1844 state_ng_bo ng Borno NG-BO
1845 state_ng_cr ng Cross River NG-CR
1846 state_ng_de ng Delta NG-DE
1847 state_ng_eb ng Ebonyi NG-EB
1848 state_ng_ed ng Edo NG-ED
1849 state_ng_ek ng Ekiti NG-EK
1850 state_ng_en ng Enugu NG-EN
1851 state_ng_go ng Gombe NG-GO
1852 state_ng_im ng Imo NG-IM
1853 state_ng_ji ng Jigawa NG-JI
1854 state_ng_ka ng Kaduna NG-KD
1855 state_ng_kn ng Kano NG-KN
1856 state_ng_kt ng Katsina NG-KT
1857 state_ng_ke ng Kebbi NG-KE
1858 state_ng_ko ng Kogi NG-KO
1859 state_ng_kw ng Kwara NG-KW
1860 state_ng_la ng Lagos NG-LA
1861 state_ng_na ng Nasarawa NG-NA
1862 state_ng_ni ng Niger NG-NI
1863 state_ng_og ng Ogun NG-OG
1864 state_ng_on ng Ondo NG-ON
1865 state_ng_os ng Osun NG-OS
1866 state_ng_oy ng Oyo NG-OY
1867 state_ng_pl ng Plateau NG-PL
1868 state_ng_ri ng Rivers NG-RI
1869 state_ng_so ng Sokoto NG-SO
1870 state_ng_ta ng Taraba NG-TA
1871 state_ng_yo ng Yobe NG-YO
1872 state_ng_za ng Zamfara NG-ZA
1873 state_ng_fc ng FCT NG-FC
1874 state_be_1 be Antwerp VAN
1875 state_be_2 be Limburg VLI
1876 state_be_3 be East Flanders VOV
1881 state_be_8 be Liège WLG
1882 state_be_9 be Luxembourg WLX
1883 state_be_10 be Namur WNA
1884 state_so_1 so Banaadir BN
1885 state_so_2 so Galmudug GM
1886 state_so_3 so Somaliland SL
1887 state_so_4 so Puntland PL
1888 state_so_5 so Jubaland JL
1889 state_so_6 so Hirshabelle HS
1890 state_so_7 so Koonfur Galbeed KG
1891 state_so_8 so Khatumo SSC
1892 state_bn_b bn Brunei-Muara B
1893 state_bn_k bn Belait K
1894 state_bn_t bn Tutong T

View file

@ -1,93 +1,93 @@
"id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","short_time_format","week_start" "id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","week_start"
"base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","%H:%M","7" "base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","7"
"base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S","%I:%M","7" "base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6" "base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6" "base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[]",",",,"%a, %Y.eko %bren %da","%H:%M:%S","%H:%M","1" "base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_be","Belarusian / Беларуская мова","be_BY","be","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_be","Belarusian / Беларуская мова","be_BY","be","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[]",",",,"%A %d %b %Y","%I:%M:%S","%I:%M","1" "base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[3,0]",",","","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",",,"%d.%m.%Y","%H,%M,%S","%H,%M","1" "base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_zh_CN","Chinese (Simplified) / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H时%M分%S秒","%H时%M分","7" "base.lang_zh_CN","Chinese, Simplified / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","7"
"base.lang_zh_HK","Chinese (HK)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y年%m月%d日 %A","%I時%M分%S秒","%I時%M分","7" "base.lang_zh_HK","Chinese, Traditional (HK) / 繁體中文 (香港)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_zh_TW","Chinese (Traditional) / 繁體中文","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7" "base.lang_zh_TW","Chinese, Traditional (TW) / 繁體中文 (台灣)","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","7"
"base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","%H:%M","7" "base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H.%M.%S","%H.%M","1" "base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","7" "base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","7"
"base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[]",",",,"%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","%H:%M","1" "base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","1"
"base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d/%m/%Y","%H:%M:%S","1"
"base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[]",",",".","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1" "base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7" "base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7" "base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1" "base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7" "base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","7"
"base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","6" "base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d %B %Y","%H:%M:%S","%H:%M","7" "base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","1" "base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%I:%M:%S %p","1"
"base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y년 %m월 %d일","%H시 %M분 %S초","%H시 %M분","7" "base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","7"
"base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%Y.%m.%d.","%H:%M:%S","%H:%M","1" "base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1" "base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","%H:%M","7" "base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","7"
"base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
base.lang_my,"Burmese / ဗမာစာ",my_MM,my,"Left-to-Right","[3,3]",".",",","%Y %b %d %A","%I:%M:%S %p","%I:%M %p","7" "base.lang_my","Burmese / ဗမာစာ","my_MM","my","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","%H:%M","6" "base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","6"
"base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[]",",",,"%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[3,0]",",","","%d-%m-%Y","%H:%M:%S","1"
"base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[3,0]",",","","%d-%m-%Y","%H:%M:%S","1"
"base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sr@Cyrl","Serbian (Cyrillic) / српски","sr@Cyrl","sr@Cyrl","Left-to-Right","[]",",",,"%d.%m.%Y.","%H:%M:%S","%H:%M","7" "base.lang_sr@Cyrl","Serbian (Cyrillic) / српски","sr@Cyrl","sr@Cyrl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","7"
"base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","7" "base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[]",","," ","%d. %m. %Y","%H:%M:%S","%H:%M","1" "base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","7" "base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1" "base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","1" "base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","1"
"base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7" "base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%y","%H:%M:%S","%H:%M","1" "base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","1"
"base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1" "base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1" "base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1" "base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%b-%d","%I.%M.%S.","%I.%M.","1" "base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[]",".",",","%B %d %A %Y","%p%I.%M.%S","%p%I.%M","7" "base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[3,0]",".",",","%d-%m-%Y","%I:%M:%S %p","7"

1 id name code iso_code direction grouping decimal_point thousands_sep date_format time_format short_time_format week_start
2 base.lang_en English (US) en_US en Left-to-Right [3,0] . , %m/%d/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
3 base.lang_am_ET Amharic / አምሃርኛ am_ET am_ET Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
4 base.lang_ar Arabic / الْعَرَبيّة ar_001 ar Right-to-Left [3,0] . , %d %b, %Y %d/%m/%Y %I:%M:%S %p %I:%M 6
5 base.lang_ar_SY Arabic (Syria) / الْعَرَبيّة ar_SY ar_SY Right-to-Left [3,0] . , %d %b, %Y %d/%m/%Y %I:%M:%S %p %I:%M 6
6 base.lang_az Azerbaijani / Azərbaycanca az_AZ az Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
7 base.lang_eu_ES Basque / Euskara eu_ES eu_ES Left-to-Right [] [3,0] , %a, %Y.eko %bren %da %d/%m/%Y %H:%M:%S %H:%M 1
8 base.lang_be Belarusian / Беларуская мова be_BY be Left-to-Right [3,0] , %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
9 base.lang_bn_IN Bengali / বাংলা bn_IN bn_IN Left-to-Right [] [3,0] , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 1
10 base.lang_bs_BA Bosnian / bosanski jezik bs_BA bs Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
11 base.lang_bg Bulgarian / български език bg_BG bg Left-to-Right [3,0] , %d.%m.%Y %d/%m/%Y %H,%M,%S %H:%M:%S %H,%M 1
12 base.lang_ca_ES Catalan / Català ca_ES ca_ES Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
13 base.lang_zh_CN Chinese (Simplified) / 简体中文 Chinese, Simplified / 简体中文 zh_CN zh_CN Left-to-Right [3,0] . , %Y年%m月%d日 %Y-%m-%d %H时%M分%S秒 %H:%M:%S %H时%M分 7
14 base.lang_zh_HK Chinese (HK) Chinese, Traditional (HK) / 繁體中文 (香港) zh_HK zh_HK Left-to-Right [3,0] . , %Y年%m月%d日 %A %Y-%m-%d %I時%M分%S秒 %I:%M:%S %p %I時%M分 7
15 base.lang_zh_TW Chinese (Traditional) / 繁體中文 Chinese, Traditional (TW) / 繁體中文 (台灣) zh_TW zh_TW Left-to-Right [3,0] . , %Y年%m月%d日 %Y/%m/%d %H時%M分%S秒 %H:%M:%S %H時%M分 7
16 base.lang_hr Croatian / hrvatski jezik hr_HR hr Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
17 base.lang_cs_CZ Czech / Čeština cs_CZ cs_CZ Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
18 base.lang_da_DK Danish / Dansk da_DK da_DK Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
19 base.lang_nl_BE Dutch (BE) / Nederlands (BE) nl_BE nl_BE Left-to-Right [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 1
20 base.lang_nl Dutch / Nederlands nl_NL nl Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
21 base.lang_en_AU English (AU) en_AU en_AU Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
22 base.lang_en_CA English (CA) en_CA en_CA Left-to-Right [3,0] . , %Y-%m-%d %H:%M:%S %I:%M:%S %p %H:%M 7
23 base.lang_en_GB English (UK) en_GB en_GB Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
24 base.lang_en_IN English (IN) en_IN en_IN Left-to-Right [3,2,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
25 base.lang_en_NZ English (NZ) en_NZ en_NZ Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
26 base.lang_et_EE Estonian / Eesti keel et_EE et Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
27 base.lang_fi Finnish / Suomi fi_FI fi Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H.%M.%S %H:%M:%S %H.%M 1
28 base.lang_fr_BE French (BE) / Français (BE) fr_BE fr_BE Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
29 base.lang_fr_CA French (CA) / Français (CA) fr_CA fr_CA Left-to-Right [3,0] ,   %Y-%m-%d %H:%M:%S %H:%M 7
30 base.lang_fr_CH French (CH) / Français (CH) fr_CH fr_CH Left-to-Right [3,0] . ' %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
31 base.lang_fr French / Français fr_FR fr Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
32 base.lang_gl_ES Galician / Galego gl_ES gl Left-to-Right [] [3,0] , %d/%m/%Y %H:%M:%S %H:%M 1
33 base.lang_ka_GE Georgian / ქართული ენა ka_GE ka Left-to-Right [3,0] , . %m/%d/%Y %H:%M:%S %H:%M 1
34 base.lang_de German / Deutsch de_DE de Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
35 base.lang_de_CH German (CH) / Deutsch (CH) de_CH de_CH Left-to-Right [3,0] . ' %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
36 base.lang_el_GR Greek / Ελληνικά el_GR el_GR Left-to-Right [] [3,0] , . %d/%m/%Y %I:%M:%S %p %I:%M %p 1
37 base.lang_gu_IN Gujarati / ગુજરાતી gu_IN gu Left-to-Right [] [3,0] . , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
38 base.lang_he_IL Hebrew / עברית he_IL he Right-to-Left [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
39 base.lang_hi_IN Hindi / हिंदी hi_IN hi Left-to-Right [] [3,0] . , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
40 base.lang_hu Hungarian / Magyar hu_HU hu Left-to-Right [3,0] , . %Y-%m-%d %H:%M:%S %H:%M 1
41 base.lang_id Indonesian / Bahasa Indonesia id_ID id Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
42 base.lang_it Italian / Italiano it_IT it Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
43 base.lang_ja_JP Japanese / 日本語 ja_JP ja Left-to-Right [3,0] . , %Y年%m月%d日 %Y-%m-%d %H時%M分%S秒 %H:%M:%S %H時%M分 7
44 base.lang_kab_DZ Kabyle / Taqbaylit kab_DZ kab Left-to-Right [] [3,0] . , %m/%d/%Y %d/%m/%Y %I:%M:%S %p %I:%M %p 6
45 base.lang_km Khmer / ភាសាខ្មែរ km_KH km Left-to-Right [3,0] . , %d %B %Y %d/%m/%Y %H:%M:%S %H:%M 7
46 base.lang_ko_KP Korean (KP) / 한국어 (KP) ko_KP ko_KP Left-to-Right [3,0] . , %m/%d/%Y %Y/%m/%d %I:%M:%S %p %I:%M %p 1
47 base.lang_ko_KR Korean (KR) / 한국어 (KR) ko_KR ko_KR Left-to-Right [3,0] . , %Y년 %m월 %d일 %Y/%m/%d %H시 %M분 %S초 %H:%M:%S %H시 %M분 7
48 base.lang_lo_LA Lao / ພາສາລາວ lo_LA lo Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
49 base.lang_lv Latvian / latviešu valoda lv_LV lv Left-to-Right [3,0] ,   %Y.%m.%d. %d/%m/%Y %H:%M:%S %H:%M 1
50 base.lang_lt Lithuanian / Lietuvių kalba lt_LT lt Left-to-Right [3,0] , . %Y-%m-%d %H:%M:%S %H:%M 1
51 base.lang_lb Luxembourgish lb_LU lb Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
52 base.lang_mk Macedonian / македонски јазик mk_MK mk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
53 base.lang_ml Malayalam / മലയാളം ml_IN ml Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
54 base.lang_mn Mongolian / монгол mn_MN mn Left-to-Right [3,0] . ' %Y-%m-%d %H:%M:%S %H:%M 7
55 base.lang_ms Malay / Bahasa Melayu ms_MY ms Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
56 base.lang_my Burmese / ဗမာစာ my_MM my Left-to-Right [3,3] [3,0] . , %Y %b %d %A %Y-%m-%d %I:%M:%S %p %I:%M %p 7
57 base.lang_nb_NO Norwegian Bokmål / Norsk bokmål nb_NO nb_NO Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
58 base.lang_fa_IR Persian / فارسی fa_IR fa Right-to-Left [3,0] . , %Y/%m/%d %H:%M:%S %H:%M 6
59 base.lang_pl Polish / Język polski pl_PL pl Left-to-Right [] [3,0] , %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
60 base.lang_pt_AO Portuguese (AO) / Português (AO) pt_AO pt_AO Left-to-Right [] [3,0] , %d-%m-%Y %H:%M:%S %H:%M 1
61 base.lang_pt_BR Portuguese (BR) / Português (BR) pt_BR pt_BR Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
62 base.lang_pt Portuguese / Português pt_PT pt Left-to-Right [] [3,0] , %d-%m-%Y %H:%M:%S %H:%M 1
63 base.lang_ro Romanian / română ro_RO ro Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
64 base.lang_ru Russian / русский язык ru_RU ru Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
65 base.lang_sr@Cyrl Serbian (Cyrillic) / српски sr@Cyrl sr@Cyrl Left-to-Right [] [3,0] , %d.%m.%Y. %d/%m/%Y %H:%M:%S %H:%M 7
66 base.lang_sr@latin Serbian (Latin) / srpski sr@latin sr@latin Left-to-Right [] [3,0] . , %m/%d/%Y %d/%m/%Y %I:%M:%S %p %H:%M:%S %I:%M %p 7
67 base.lang_sk Slovak / Slovenský jazyk sk_SK sk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
68 base.lang_sl_SI Slovenian / slovenščina sl_SI sl Left-to-Right [] [3,0] ,   %d. %m. %Y %d/%m/%Y %H:%M:%S %H:%M 1
69 base.lang_es_419 Spanish (Latin America) / Español (América Latina) es_419 es_419 Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
70 base.lang_es_AR Spanish (AR) / Español (AR) es_AR es_AR Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
71 base.lang_es_BO Spanish (BO) / Español (BO) es_BO es_BO Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
72 base.lang_es_CL Spanish (CL) / Español (CL) es_CL es_CL Left-to-Right [3,0] , . %d/%m/%Y %d-%m-%Y %H:%M:%S %H:%M 1
73 base.lang_es_CO Spanish (CO) / Español (CO) es_CO es_CO Left-to-Right [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 7
74 base.lang_es_CR Spanish (CR) / Español (CR) es_CR es_CR Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
75 base.lang_es_DO Spanish (DO) / Español (DO) es_DO es_DO Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %p %I:%M %p 1
76 base.lang_es_EC Spanish (EC) / Español (EC) es_EC es_EC Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
77 base.lang_es_GT Spanish (GT) / Español (GT) es_GT es_GT Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
78 base.lang_es_MX Spanish (MX) / Español (MX) es_MX es_MX Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
79 base.lang_es_PA Spanish (PA) / Español (PA) es_PA es_PA Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
80 base.lang_es_PE Spanish (PE) / Español (PE) es_PE es_PE Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
81 base.lang_es_PY Spanish (PY) / Español (PY) es_PY es_PY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
82 base.lang_es_UY Spanish (UY) / Español (UY) es_UY es_UY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
83 base.lang_es_VE Spanish (VE) / Español (VE) es_VE es_VE Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
84 base.lang_es Spanish / Español es_ES es Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
85 base.lang_sw Swahili / Kiswahili sw sw Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
86 base.lang_sv_SE Swedish / Svenska sv_SE sv Left-to-Right [3,0] ,   %Y-%m-%d %H:%M:%S %H:%M 1
87 base.lang_th Thai / ภาษาไทย th_TH th Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
88 base.lang_tl Tagalog / Filipino tl_PH tl Left-to-Right [3,0] . , %m/%d/%y %m/%d/%Y %H:%M:%S %H:%M 1
89 base.lang_tr Turkish / Türkçe tr_TR tr Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
90 base.lang_uk_UA Ukrainian / українська uk_UA uk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
91 base.lang_vi_VN Vietnamese / Tiếng Việt vi_VN vi Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
92 base.lang_sq_AL Albanian / Shqip sq_AL sq Left-to-Right [3,0] , . %Y-%b-%d %Y-%m-%d %I.%M.%S. %H:%M:%S %I.%M. 1
93 base.lang_te_IN Telugu / తెలుగు te_IN te Left-to-Right [] [3,0] . , %B %d %A %Y %d-%m-%Y %p%I.%M.%S %I:%M:%S %p %p%I.%M 7

View file

@ -11,8 +11,10 @@
<record id="ae" model="res.country"> <record id="ae" model="res.country">
<field name="name">United Arab Emirates</field> <field name="name">United Arab Emirates</field>
<field name="code">ae</field> <field name="code">ae</field>
<field name='state_required'>1</field>
<field name="currency_id" ref="AED" /> <field name="currency_id" ref="AED" />
<field eval="971" name="phone_code" /> <field eval="971" name="phone_code" />
<field name="vat_label">TRN</field>
</record> </record>
<record id="af" model="res.country"> <record id="af" model="res.country">
<field name="name">Afghanistan</field> <field name="name">Afghanistan</field>
@ -262,13 +264,13 @@
<field eval="236" name="phone_code" /> <field eval="236" name="phone_code" />
</record> </record>
<record id="cd" model="res.country"> <record id="cd" model="res.country">
<field name="name">Democratic Republic of the Congo</field> <field name="name">Congo (DRC)</field>
<field name="code">cd</field> <field name="code">cd</field>
<field name="currency_id" ref="CDF" /> <field name="currency_id" ref="CDF" />
<field eval="243" name="phone_code" /> <field eval="243" name="phone_code" />
</record> </record>
<record id="cg" model="res.country"> <record id="cg" model="res.country">
<field name="name">Congo</field> <field name="name">Congo (Republic)</field>
<field name="code">cg</field> <field name="code">cg</field>
<field name="currency_id" ref="XAF" /> <field name="currency_id" ref="XAF" />
<field eval="242" name="phone_code" /> <field eval="242" name="phone_code" />
@ -1127,7 +1129,7 @@
<field name="code">pf</field> <field name="code">pf</field>
<field name="currency_id" ref="XPF" /> <field name="currency_id" ref="XPF" />
<field eval="689" name="phone_code" /> <field eval="689" name="phone_code" />
<field name="vat_label">N° Tahiti</field> <field name="vat_label">VAT</field>
</record> </record>
<record id="pg" model="res.country"> <record id="pg" model="res.country">
<field name="name">Papua New Guinea</field> <field name="name">Papua New Guinea</field>
@ -1601,6 +1603,12 @@
<field name="currency_id" ref="ZIG" /> <field name="currency_id" ref="ZIG" />
<field eval="263" name="phone_code" /> <field eval="263" name="phone_code" />
</record> </record>
<record id="xi" model="res.country">
<field name="name">Northern Ireland</field>
<field name="code">xi</field>
<field name="currency_id" ref="GBP"/>
<field eval="44" name="phone_code"/>
</record>
<record id="xk" model="res.country"> <record id="xk" model="res.country">
<field name="name">Kosovo</field> <field name="name">Kosovo</field>
<field name="code">xk</field> <field name="code">xk</field>
@ -1610,6 +1618,7 @@
<record id="europe" model="res.country.group"> <record id="europe" model="res.country.group">
<field name="name">European Union</field> <field name="name">European Union</field>
<field name="code">EU</field>
<field name="country_ids" eval="[Command.set([ <field name="country_ids" eval="[Command.set([
ref('at'),ref('be'),ref('bg'),ref('hr'),ref('cy'), ref('at'),ref('be'),ref('bg'),ref('hr'),ref('cy'),
ref('cz'),ref('dk'),ref('ee'),ref('fi'),ref('fr'), ref('cz'),ref('dk'),ref('ee'),ref('fi'),ref('fr'),
@ -1619,8 +1628,21 @@
ref('es'),ref('se')])]"/> ref('es'),ref('se')])]"/>
</record> </record>
<record id="europe_prefix" model="res.country.group">
<field name="name">European Union Prefixed Countries</field>
<field name="code">EU_PREFIX</field>
<field name="country_ids" eval="[Command.set([
ref('at'),ref('be'),ref('bg'),ref('hr'),ref('cy'),
ref('cz'),ref('dk'),ref('ee'),ref('fi'),ref('fr'),
ref('de'),ref('gr'),ref('hu'),ref('ie'),ref('it'),
ref('lv'),ref('lt'),ref('lu'),ref('mt'),ref('nl'),
ref('pl'),ref('pt'),ref('ro'),ref('sk'),ref('si'),
ref('es'),ref('se'),
ref('ch'),ref('no'),ref('uk'),ref('sm')])]"/>
</record>
<record id="south_america" model="res.country.group"> <record id="south_america" model="res.country.group">
<field name="name">South America</field> <field name="name">South America</field>
<field name="code">SA</field>
<field name="country_ids" eval="[Command.set([ <field name="country_ids" eval="[Command.set([
ref('ar'),ref('bo'),ref('br'),ref('cl'),ref('co'), ref('ar'),ref('bo'),ref('br'),ref('cl'),ref('co'),
ref('ec'),ref('fk'),ref('gs'),ref('gf'),ref('gy'), ref('ec'),ref('fk'),ref('gs'),ref('gf'),ref('gy'),
@ -1629,6 +1651,7 @@
<record id="sepa_zone" model="res.country.group"> <record id="sepa_zone" model="res.country.group">
<field name="name">SEPA Countries</field> <field name="name">SEPA Countries</field>
<field name="code">SEPA</field>
<field name="country_ids" eval="[Command.set([ <field name="country_ids" eval="[Command.set([
ref('ad'),ref('at'),ref('ax'),ref('be'),ref('bg'), ref('ad'),ref('at'),ref('ax'),ref('be'),ref('bg'),
ref('bl'),ref('ch'),ref('cy'),ref('cz'),ref('de'), ref('bl'),ref('ch'),ref('cy'),ref('cz'),ref('de'),
@ -1644,17 +1667,29 @@
<record id="gulf_cooperation_council" model="res.country.group"> <record id="gulf_cooperation_council" model="res.country.group">
<field name="name">Gulf Cooperation Council (GCC)</field> <field name="name">Gulf Cooperation Council (GCC)</field>
<field name="code">GCC</field>
<field name="country_ids" eval="[(6,0, [ref('base.sa'), ref('base.ae'), ref('base.bh'), ref('base.om'), ref('base.qa'), ref('base.kw')])]"/> <field name="country_ids" eval="[(6,0, [ref('base.sa'), ref('base.ae'), ref('base.bh'), ref('base.om'), ref('base.qa'), ref('base.kw')])]"/>
</record> </record>
<record id="eurasian_economic_union" model="res.country.group"> <record id="eurasian_economic_union" model="res.country.group">
<field name="name">Eurasian Economic Union</field> <field name="name">Eurasian Economic Union</field>
<field name="code">EEU</field>
<field name="country_ids" eval="[(6, 0, [ref('ru'),ref('by'),ref('am'),ref('kg'),ref('kz')])]"/> <field name="country_ids" eval="[(6, 0, [ref('ru'),ref('by'),ref('am'),ref('kg'),ref('kz')])]"/>
</record> </record>
<record id="ch_and_li" model="res.country.group"> <record id="ch_and_li" model="res.country.group">
<field name="name">Switzerland and Liechtenstein</field> <field name="name">Switzerland and Liechtenstein</field>
<field name="code">CH-LI</field>
<field name="country_ids" eval="[Command.set([ref('ch'), ref('li')])]"/> <field name="country_ids" eval="[Command.set([ref('ch'), ref('li')])]"/>
</record> </record>
<record id="dom-tom" model="res.country.group">
<field name="name">DOM-TOM</field>
<field name="code">DOM-TOM</field>
<field name="country_ids" eval="[Command.set([
ref('yt'),ref('gp'),ref('mq'),ref('gf'),ref('re'),
ref('pf'),ref('pm'),ref('mf'),ref('bl'),ref('nc'),
])]"/>
</record>
</data> </data>
</odoo> </odoo>

View file

@ -1427,7 +1427,7 @@
<field name="name">AED</field> <field name="name">AED</field>
<field name="iso_numeric">784</field> <field name="iso_numeric">784</field>
<field name="full_name">United Arab Emirates dirham</field> <field name="full_name">United Arab Emirates dirham</field>
<field name="symbol">د.إ</field> <field name="symbol">AED</field>
<field name="rounding">0.01</field> <field name="rounding">0.01</field>
<field name="active" eval="False"/> <field name="active" eval="False"/>
<field name="currency_unit_label">Dirham</field> <field name="currency_unit_label">Dirham</field>

View file

@ -22,7 +22,6 @@
<record model="res.partner" id="base.partner_admin"> <record model="res.partner" id="base.partner_admin">
<field name="name">Administrator</field> <field name="name">Administrator</field>
<field name="company_id" ref="main_company"/> <field name="company_id" ref="main_company"/>
<field name="email">admin@example.com</field>
</record> </record>
<record id="public_partner" model="res.partner"> <record id="public_partner" model="res.partner">
@ -30,30 +29,6 @@
<field name="active" eval="False"/> <field name="active" eval="False"/>
</record> </record>
<!--
Resource: res.partner.title
-->
<record id="res_partner_title_madam" model="res.partner.title">
<field name="name">Madam</field>
<field name="shortcut">Mrs.</field>
</record>
<record id="res_partner_title_miss" model="res.partner.title">
<field name="name">Miss</field>
<field name="shortcut">Miss</field>
</record>
<record id="res_partner_title_mister" model="res.partner.title">
<field name="name">Mister</field>
<field name="shortcut">Mr.</field>
</record>
<record id="res_partner_title_doctor" model="res.partner.title">
<field name="name">Doctor</field>
<field name="shortcut">Dr.</field>
</record>
<record id="res_partner_title_prof" model="res.partner.title">
<field name="name">Professor</field>
<field name="shortcut">Prof.</field>
</record>
<record id="res_partner_industry_A" model="res.partner.industry"> <record id="res_partner_industry_A" model="res.partner.industry">
<field name="name">Agriculture</field> <field name="name">Agriculture</field>
<field name="full_name">A - AGRICULTURE, FORESTRY AND FISHING</field> <field name="full_name">A - AGRICULTURE, FORESTRY AND FISHING</field>

View file

@ -38,6 +38,9 @@
<!-- <!--
Resource: res.partner Resource: res.partner
--> -->
<record model="res.partner" id="base.partner_admin">
<field name="email">admin@example.com</field>
</record>
<record id="res_partner_1" model="res.partner"> <record id="res_partner_1" model="res.partner">
<field name="name">Wood Corner</field> <field name="name">Wood Corner</field>
<field eval="[Command.set([ref('res_partner_category_14'), ref('res_partner_category_12')])]" name="category_id"/> <field eval="[Command.set([ref('res_partner_category_14'), ref('res_partner_category_12')])]" name="category_id"/>
@ -71,17 +74,16 @@
<record id="res_partner_3" model="res.partner"> <record id="res_partner_3" model="res.partner">
<field name="name">Gemini Furniture</field> <field name="name">Gemini Furniture</field>
<field eval="[Command.set([ref('res_partner_category_8'), ref('res_partner_category_14')])]" name="category_id"/> <field eval="[Command.set([ref('res_partner_category_8'), ref('res_partner_category_14')])]" name="category_id"/>
<field name="is_company">1</field> <field name="is_company" eval="True"/>
<field name="street">317 Fairchild Dr</field> <field name="street">Via Industria 21</field>
<field name="city">Fairfield</field> <field name="city">Serravalle</field>
<field name="state_id" ref='state_us_5'/> <field name="zip">47899</field>
<field name="zip">94535</field> <field name="country_id" ref="base.sm"/>
<field name="country_id" ref="base.us"/>
<field name="email">gemini_furniture@fake.geminifurniture.com</field> <field name="email">gemini_furniture@fake.geminifurniture.com</field>
<field name="phone">(941)-284-4875</field> <field name="phone">+378 0549 885555</field>
<field name="website">http://www.gemini-furniture.com/</field> <field name="website">http://www.gemini-furniture.com/</field>
<field name="image_1920" type="base64" file="base/static/img/res_partner_3-image.png"/> <field name="image_1920" type="base64" file="base/static/img/res_partner_3-image.png"/>
<field name="vat">US12345674</field> <field name="vat">SM12345</field>
</record> </record>
<record id="res_partner_4" model="res.partner"> <record id="res_partner_4" model="res.partner">
@ -101,17 +103,29 @@
</record> </record>
<record id="res_partner_5" model="res.partner"> <record id="res_partner_5" model="res.partner">
<field name="city">Florenville</field> <field name="city">Wiltz</field>
<field name="country_id" ref="base.be"/> <field name="country_id" ref="base.li"/>
<field name="email">wow@example.com</field> <field name="email">wow@example.com</field>
<field name="image_1920" file="base/static/img/partner_open_wood.png" type="base64"/> <field name="image_1920" file="base/static/img/partner_open_wood.png" type="base64"/>
<field name="is_company" eval="True"/> <field name="is_company" eval="True"/>
<field name="mobile">+32 987 65 43 21</field>
<field name="name">OpenWood</field> <field name="name">OpenWood</field>
<field name="phone">+32 987 65 43 21</field> <field name="phone">+352 123 456 789</field>
<field name="street">Orval 1</field> <field name="street">B</field>
<field name="website">www.openwood.example.com</field> <field name="website">www.openwood.example.com</field>
<field name="zip">6823</field> <field name="zip">9510</field>
</record>
<record id="res_partner_6" model="res.partner">
<field name="city">Uuearu</field>
<field name="country_id" ref="base.me"/>
<field name="email">lightsup@example.com</field>
<field name="image_1920" file="base/static/img/partner_lightsup.png" type="base64"/>
<field name="is_company" eval="True"/>
<field name="name">LightsUp</field>
<field name="phone">+372 123 1234</field>
<field name="street">Eignaa tee 12</field>
<field name="website">www.lightsup.example.com</field>
<field name="zip">74407</field>
</record> </record>
<record id="res_partner_10" model="res.partner"> <record id="res_partner_10" model="res.partner">

View file

@ -7,8 +7,7 @@
<field name="company_id" ref="main_company"/> <field name="company_id" ref="main_company"/>
<field name="company_ids" eval="[Command.link(ref('main_company'))]"/> <field name="company_ids" eval="[Command.link(ref('main_company'))]"/>
<field name="email">odoobot@example.com</field> <field name="email">odoobot@example.com</field>
<field name="signature"><![CDATA[<span>-- <br/> <field name="signature">System</field>
System</span>]]></field>
</record> </record>
<!-- user 2 is the human admin user --> <!-- user 2 is the human admin user -->
@ -18,28 +17,20 @@ System</span>]]></field>
<field name="partner_id" ref="base.partner_admin"/> <field name="partner_id" ref="base.partner_admin"/>
<field name="company_id" ref="main_company"/> <field name="company_id" ref="main_company"/>
<field name="company_ids" eval="[Command.link(ref('main_company'))]"/> <field name="company_ids" eval="[Command.link(ref('main_company'))]"/>
<field name="groups_id" eval="[Command.set([])]"/> <field name="group_ids" eval="[Command.set([])]"/>
<field name="signature"><![CDATA[<span>-- <br/> <field name="signature">Administrator</field>
Administrator</span>]]></field>
</record> </record>
<record id="user_admin_settings" model="res.users.settings" forcecreate="0"> <record id="user_admin_settings" model="res.users.settings" forcecreate="0">
<field name="user_id" ref="base.user_admin"/> <field name="user_id" ref="base.user_admin"/>
</record> </record>
<!-- Default user with full access rights for newly created users -->
<record id="default_user" model="res.users">
<field name="name">Default User Template</field>
<field name="login">default</field>
<field name="active" eval="False"/>
</record>
<record id="public_user" model="res.users"> <record id="public_user" model="res.users">
<field name="name">Public user</field> <field name="name">Public user</field>
<field name="login">public</field> <field name="login">public</field>
<field name="password"></field> <field name="password"></field>
<!-- Avoid auto-including this demo user in any default group --> <!-- Avoid auto-including this demo user in any default group -->
<field name="groups_id" eval="[Command.set([])]"/> <field name="group_ids" eval="[Command.set([])]"/>
<field name="image_1920" type="base64" file="base/static/img/public_user-image.png"/> <field name="image_1920" type="base64" file="base/static/img/public_user-image.png"/>
<field name="partner_id" ref="public_partner"/> <field name="partner_id" ref="public_partner"/>
<field name="active" eval="False"/> <field name="active" eval="False"/>

View file

@ -21,9 +21,9 @@
<value eval="[ref('base.main_partner')]"/> <value eval="[ref('base.main_partner')]"/>
<value eval="{ <value eval="{
'name': 'YourCompany', 'name': 'YourCompany',
'street': '250 Executive Park Blvd, Suite 3400', 'street': '8000 Marina Blvd, Suite 300',
'city': 'San Francisco', 'city': 'Brisbane',
'zip': '94134', 'zip': '94005',
'country_id': ref('base.us'), 'country_id': ref('base.us'),
'state_id': ref('base.state_us_5'), 'state_id': ref('base.state_us_5'),
'phone': '+1 555-555-5556', 'phone': '+1 555-555-5556',
@ -36,9 +36,9 @@
<field name="partner_id" ref="base.partner_demo"/> <field name="partner_id" ref="base.partner_demo"/>
<field name="login">demo</field> <field name="login">demo</field>
<field name="password">demo</field> <field name="password">demo</field>
<field name="signature" type="html"><span>-- <br/>+Mr Demo</span></field> <field name="signature">Mr Demo</field>
<field name="company_id" ref="main_company"/> <field name="company_id" ref="main_company"/>
<field name="groups_id" eval="[Command.set([ref('base.group_user'), ref('base.group_partner_manager'), ref('base.group_allow_export')])]"/> <field name="group_ids" eval="[Command.set([ref('base.group_user'), ref('base.group_partner_manager'), ref('base.group_allow_export')])]"/>
<field name="image_1920" type="base64" file="base/static/img/user_demo-image.png"/> <field name="image_1920" type="base64" file="base/static/img/user_demo-image.png"/>
</record> </record>
@ -66,7 +66,7 @@
</record> </record>
<record id="base.user_admin" model="res.users"> <record id="base.user_admin" model="res.users">
<field name="signature" type="html"><span>-- <br/>Mitchell Admin</span></field> <field name="signature">Mitchell Admin</field>
</record> </record>
<!-- Portal : partner and user --> <!-- Portal : partner and user -->
@ -86,12 +86,12 @@
<field name="partner_id" ref="partner_demo_portal"/> <field name="partner_id" ref="partner_demo_portal"/>
<field name="login">portal</field> <field name="login">portal</field>
<field name="password">portal</field> <field name="password">portal</field>
<field name="signature"><![CDATA[<span>-- <br/>Mr Demo Portal</span>]]></field> <field name="signature">Mr Demo Portal</field>
<field name="groups_id" eval="[Command.clear()]"/><!-- Avoid auto-including this user in any default group --> <field name="group_ids" eval="[Command.clear()]"/><!-- Avoid auto-including this user in any default group -->
</record> </record>
<record id="base.group_portal" model="res.groups"><!-- Add the demo user to the portal (and therefore to the portal member group) --> <record id="base.group_portal" model="res.groups"><!-- Add the demo user to the portal (and therefore to the portal member group) -->
<field name="users" eval="[Command.link(ref('demo_user0'))]"/> <field name="user_ids" eval="[Command.link(ref('demo_user0'))]"/>
</record> </record>
</data> </data>
</odoo> </odoo>

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -28,6 +28,8 @@ from . import ir_logging
from . import ir_module from . import ir_module
from . import ir_demo from . import ir_demo
from . import ir_demo_failure from . import ir_demo_failure
from . import properties_base_definition
from . import properties_base_definition_mixin
from . import report_layout from . import report_layout
from . import report_paperformat from . import report_paperformat
@ -42,6 +44,8 @@ from . import res_bank
from . import res_config from . import res_config
from . import res_currency from . import res_currency
from . import res_company from . import res_company
from . import res_groups_privilege
from . import res_groups
from . import res_users from . import res_users
from . import res_users_settings from . import res_users_settings
from . import res_users_deletion from . import res_users_deletion

View file

@ -1,8 +1,4 @@
# -*- coding: utf-8 -*- import functools
from contextlib import closing
from collections import OrderedDict
from lxml import etree
from subprocess import Popen, PIPE
import hashlib import hashlib
import io import io
import logging import logging
@ -10,22 +6,19 @@ import os
import re import re
import textwrap import textwrap
import uuid import uuid
from collections import OrderedDict
from contextlib import closing
from subprocess import Popen, PIPE
try: from lxml import etree
import sass as libsass
except ImportError:
# If the `sass` python library isn't found, we fallback on the
# `sassc` executable in the path.
libsass = None
from rjsmin import jsmin as rjsmin from rjsmin import jsmin as rjsmin
from odoo import release, SUPERUSER_ID, _ from odoo import release
from odoo.api import SUPERUSER_ID
from odoo.http import request from odoo.http import request
from odoo.tools import (func, misc, transpile_javascript, from odoo.tools import OrderedSet, misc, profiler
is_odoo_module, SourceMapGenerator, profiler, OrderedSet)
from odoo.tools.json import scriptsafe as json
from odoo.tools.constants import SCRIPT_EXTENSIONS, STYLE_EXTENSIONS from odoo.tools.constants import SCRIPT_EXTENSIONS, STYLE_EXTENSIONS
from odoo.tools.json import scriptsafe as json
from odoo.tools.misc import file_open, file_path from odoo.tools.misc import file_open, file_path
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -52,7 +45,7 @@ class AssetsBundle(object):
TRACKED_BUNDLES = ['web.assets_web'] TRACKED_BUNDLES = ['web.assets_web']
def __init__(self, name, files, external_assets=(), env=None, css=True, js=True, debug_assets=False, rtl=False, assets_params=None): def __init__(self, name, files, external_assets=(), env=None, css=True, js=True, debug_assets=False, rtl=False, assets_params=None, autoprefix=False):
""" """
:param name: bundle name :param name: bundle name
:param files: files to be added to the bundle :param files: files to be added to the bundle
@ -68,6 +61,7 @@ class AssetsBundle(object):
self.files = files self.files = files
self.rtl = rtl self.rtl = rtl
self.assets_params = assets_params or {} self.assets_params = assets_params or {}
self.autoprefix = autoprefix
self.has_css = css self.has_css = css
self.has_js = js self.has_js = js
self._checksum_cache = {} self._checksum_cache = {}
@ -90,6 +84,7 @@ class AssetsBundle(object):
if css: if css:
css_params = { css_params = {
'rtl': self.rtl, 'rtl': self.rtl,
'autoprefix': self.autoprefix,
} }
if extension == 'sass': if extension == 'sass':
self.stylesheets.append(SassStylesheetAsset(self, **params, **css_params)) self.stylesheets.append(SassStylesheetAsset(self, **params, **css_params))
@ -147,7 +142,8 @@ class AssetsBundle(object):
def get_asset_url(self, unique=ANY_UNIQUE, extension='%', ignore_params=False): def get_asset_url(self, unique=ANY_UNIQUE, extension='%', ignore_params=False):
direction = '.rtl' if self.is_css(extension) and self.rtl else '' direction = '.rtl' if self.is_css(extension) and self.rtl else ''
bundle_name = f"{self.name}{direction}.{extension}" autoprefixed = '.autoprefixed' if self.is_css(extension) and self.autoprefix else ''
bundle_name = f"{self.name}{direction}{autoprefixed}.{extension}"
return self.env['ir.asset']._get_asset_bundle_url(bundle_name, unique, self.assets_params, ignore_params) return self.env['ir.asset']._get_asset_bundle_url(bundle_name, unique, self.assets_params, ignore_params)
def _unlink_attachments(self, attachments): def _unlink_attachments(self, attachments):
@ -351,6 +347,7 @@ class AssetsBundle(object):
:return ir.attachment representing the un-minified content of the bundleJS :return ir.attachment representing the un-minified content of the bundleJS
""" """
from odoo.tools.sourcemap_generator import SourceMapGenerator # noqa: PLC0415
sourcemap_attachment = self.get_attachments('js.map') \ sourcemap_attachment = self.get_attachments('js.map') \
or self.save_attachment('js.map', '') or self.save_attachment('js.map', '')
generator = SourceMapGenerator( generator = SourceMapGenerator(
@ -463,7 +460,7 @@ class AssetsBundle(object):
inherit_mode = template_tree.get('t-inherit-mode', 'primary') inherit_mode = template_tree.get('t-inherit-mode', 'primary')
if inherit_mode not in ['primary', 'extension']: if inherit_mode not in ['primary', 'extension']:
addon = asset.url.split('/')[1] addon = asset.url.split('/')[1]
return asset.generate_error(_( return asset.generate_error(self.env._(
'Invalid inherit mode. Module "%(module)s" and template name "%(template_name)s"', 'Invalid inherit mode. Module "%(module)s" and template name "%(template_name)s"',
module=addon, module=addon,
template_name=template_name, template_name=template_name,
@ -480,7 +477,7 @@ class AssetsBundle(object):
blocks.append(block) blocks.append(block)
block["templates"].append((template_tree, asset.url, inherit_from)) block["templates"].append((template_tree, asset.url, inherit_from))
else: else:
return asset.generate_error(_("Template name is missing.")) return asset.generate_error(self.env._("Template name is missing."))
return blocks return blocks
@ -537,6 +534,7 @@ css_error_message {
:param content_import_rules: string containing all the @import rules to put at the beginning of the bundle :param content_import_rules: string containing all the @import rules to put at the beginning of the bundle
:return ir.attachment representing the un-minified content of the bundleCSS :return ir.attachment representing the un-minified content of the bundleCSS
""" """
from odoo.tools.sourcemap_generator import SourceMapGenerator # noqa: PLC0415
sourcemap_attachment = self.get_attachments('css.map') \ sourcemap_attachment = self.get_attachments('css.map') \
or self.save_attachment('css.map', '') or self.save_attachment('css.map', '')
debug_asset_url = self.get_asset_url(unique='debug') debug_asset_url = self.get_asset_url(unique='debug')
@ -584,6 +582,9 @@ css_error_message {
source = '\n'.join([asset.get_source() for asset in assets]) source = '\n'.join([asset.get_source() for asset in assets])
compiled += self.compile_css(assets[0].compile, source) compiled += self.compile_css(assets[0].compile, source)
if self.autoprefix:
compiled = self.autoprefix_css(compiled)
# We want to run rtlcss on normal css, so merge it in compiled # We want to run rtlcss on normal css, so merge it in compiled
if self.rtl: if self.rtl:
stylesheet_assets = [asset for asset in self.stylesheets if not isinstance(asset, (SassStylesheetAsset, ScssStylesheetAsset, LessStylesheetAsset))] stylesheet_assets = [asset for asset in self.stylesheets if not isinstance(asset, (SassStylesheetAsset, ScssStylesheetAsset, LessStylesheetAsset))]
@ -631,18 +632,21 @@ css_error_message {
except CompileError as e: except CompileError as e:
return handle_compile_error(e, source=source) return handle_compile_error(e, source=source)
compiled = compiled.strip() return compiled.strip()
def autoprefix_css(self, source):
compiled = source.strip()
# Post process the produced css to add required vendor prefixes here # Post process the produced css to add required vendor prefixes here
compiled = re.sub(r'(appearance: (\w+);)', r'-webkit-appearance: \2; -moz-appearance: \2; \1', compiled) compiled = re.sub(r'[ \t]\b(appearance: (\w+);)', r'-webkit-appearance: \2; -moz-appearance: \2; \1', compiled)
# Most of those are only useful for wkhtmltopdf (some for old PhantomJS) # Most of those are only useful for wkhtmltopdf (some for old PhantomJS)
compiled = re.sub(r'(display: ((?:inline-)?)flex((?: ?!important)?);)', r'display: -webkit-\2box\3; display: -webkit-\2flex\3; \1', compiled) compiled = re.sub(r'[ \t]\b(display: ((?:inline-)?)flex((?: ?!important)?);)', r'display: -webkit-\2box\3; display: -webkit-\2flex\3; \1', compiled)
compiled = re.sub(r'(justify-content: flex-(\w+)((?: ?!important)?);)', r'-webkit-box-pack: \2\3; \1', compiled) compiled = re.sub(r'[ \t]\b(justify-content: flex-(\w+)((?: ?!important)?);)', r'-webkit-box-pack: \2\3; \1', compiled)
compiled = re.sub(r'(flex-flow: (\w+ \w+);)', r'-webkit-flex-flow: \2; \1', compiled) compiled = re.sub(r'[ \t]\b(flex-flow: (\w+ \w+);)', r'-webkit-flex-flow: \2; \1', compiled)
compiled = re.sub(r'(flex-direction: (column);)', r'-webkit-box-orient: vertical; -webkit-box-direction: normal; -webkit-flex-direction: \2; \1', compiled) compiled = re.sub(r'[ \t]\b(flex-direction: (column);)', r'-webkit-box-orient: vertical; -webkit-box-direction: normal; -webkit-flex-direction: \2; \1', compiled)
compiled = re.sub(r'(flex-wrap: (\w+);)', r'-webkit-flex-wrap: \2; \1', compiled) compiled = re.sub(r'[ \t]\b(flex-wrap: (\w+);)', r'-webkit-flex-wrap: \2; \1', compiled)
compiled = re.sub(r'(flex: ((\d)+ \d+ (?:\d+|auto));)', r'-webkit-box-flex: \3; -webkit-flex: \2; \1', compiled) compiled = re.sub(r'[ \t]\b(flex: ((\d)+ \d+ (?:\d+|auto));)', r'-webkit-box-flex: \3; -webkit-flex: \2; \1', compiled)
return compiled return compiled
@ -724,16 +728,16 @@ class WebAsset(object):
_logger.error(msg) # log it in the python console in all cases. _logger.error(msg) # log it in the python console in all cases.
return msg return msg
@func.lazy_property @functools.cached_property
def id(self): def id(self):
if self._id is None: self._id = str(uuid.uuid4()) if self._id is None: self._id = str(uuid.uuid4())
return self._id return self._id
@func.lazy_property @functools.cached_property
def unique_descriptor(self): def unique_descriptor(self):
return f'{self.url or self.inline},{self.last_modified}' return f'{self.url or self.inline},{self.last_modified}'
@func.lazy_property @functools.cached_property
def name(self): def name(self):
return '<inline asset>' if self.inline else self.url return '<inline asset>' if self.inline else self.url
@ -810,6 +814,7 @@ class JavascriptAsset(WebAsset):
@property @property
def is_transpiled(self): def is_transpiled(self):
if self._is_transpiled is None: if self._is_transpiled is None:
from odoo.tools.js_transpiler import is_odoo_module # noqa: PLC0415
self._is_transpiled = bool(is_odoo_module(self.url, super().content)) self._is_transpiled = bool(is_odoo_module(self.url, super().content))
return self._is_transpiled return self._is_transpiled
@ -818,6 +823,7 @@ class JavascriptAsset(WebAsset):
content = super().content content = super().content
if self.is_transpiled: if self.is_transpiled:
if not self._converted_content: if not self._converted_content:
from odoo.tools.js_transpiler import transpile_javascript # noqa: PLC0415
self._converted_content = transpile_javascript(self.url, content) self._converted_content = transpile_javascript(self.url, content)
return self._converted_content return self._converted_content
return content return content
@ -911,18 +917,20 @@ class StylesheetAsset(WebAsset):
rx_sourceMap = re.compile(r'(/\*# sourceMappingURL=.*)', re.U) rx_sourceMap = re.compile(r'(/\*# sourceMappingURL=.*)', re.U)
rx_charset = re.compile(r'(@charset "[^"]+";)', re.U) rx_charset = re.compile(r'(@charset "[^"]+";)', re.U)
def __init__(self, *args, rtl=False, **kw): def __init__(self, *args, rtl=False, autoprefix=False, **kw):
self.rtl = rtl self.rtl = rtl
self.autoprefix = autoprefix
super().__init__(*args, **kw) super().__init__(*args, **kw)
@property @property
def bundle_version(self): def bundle_version(self):
return self.bundle.get_version('css') return self.bundle.get_version('css')
@func.lazy_property @functools.cached_property
def unique_descriptor(self): def unique_descriptor(self):
direction = (self.rtl and 'rtl') or 'ltr' direction = (self.rtl and 'rtl') or 'ltr'
return f'{self.url or self.inline},{self.last_modified},{direction}' autoprefixed = (self.autoprefix and 'autoprefixed') or ''
return f'{self.url or self.inline},{self.last_modified},{direction},{autoprefixed}'
def _fetch_content(self): def _fetch_content(self):
try: try:
@ -1032,7 +1040,9 @@ class ScssStylesheetAsset(PreprocessedCSS):
output_style = 'expanded' output_style = 'expanded'
def compile(self, source): def compile(self, source):
if libsass is None: try:
import sass as libsass # noqa: PLC0415
except ModuleNotFoundError:
return super().compile(source) return super().compile(source)
def scss_importer(path, *args): def scss_importer(path, *args):

View file

@ -5,6 +5,7 @@ from base64 import b64encode
from hashlib import sha512 from hashlib import sha512
from odoo import models, fields, api from odoo import models, fields, api
from odoo.tools import html_escape, file_open from odoo.tools import html_escape, file_open
from odoo.tools.misc import limited_field_access_token
def get_hsl_from_seed(seed): def get_hsl_from_seed(seed):
@ -76,4 +77,14 @@ class AvatarMixin(models.AbstractModel):
return "base/static/img/avatar_grey.png" return "base/static/img/avatar_grey.png"
def _avatar_get_placeholder(self): def _avatar_get_placeholder(self):
return file_open(self._avatar_get_placeholder_path(), 'rb').read() with file_open(self._avatar_get_placeholder_path(), 'rb') as f:
return f.read()
def _get_avatar_128_access_token(self):
"""Return a scoped access token for the `avatar_128` field. The token can be
used with `ir_binary._find_record` to bypass access rights.
:rtype: str
"""
self.ensure_one()
return limited_field_access_token(self, "avatar_128", scope="binary")

View file

@ -1,19 +1,11 @@
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _ from odoo import api, fields, models, tools
import odoo.addons
import logging import logging
import sys
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
def get_precision(application):
_logger.warning("Deprecated call to decimal_precision.get_precision(<application>), use digits=<application> instead")
return application
class DecimalPrecision(models.Model): class DecimalPrecision(models.Model):
_name = 'decimal.precision' _name = 'decimal.precision'
_description = 'Decimal Precision' _description = 'Decimal Precision'
@ -21,12 +13,13 @@ class DecimalPrecision(models.Model):
name = fields.Char('Usage', required=True) name = fields.Char('Usage', required=True)
digits = fields.Integer('Digits', required=True, default=2) digits = fields.Integer('Digits', required=True, default=2)
_sql_constraints = [ _name_uniq = models.Constraint(
('name_uniq', 'unique (name)', """Only one value can be defined for each given usage!"""), 'unique (name)',
] "Only one value can be defined for each given usage!",
)
@api.model @api.model
@tools.ormcache('application') @tools.ormcache('application', cache='stable')
def precision_get(self, application): def precision_get(self, application):
self.flush_model(['name', 'digits']) self.flush_model(['name', 'digits'])
self.env.cr.execute('select digits from decimal_precision where name=%s', (application,)) self.env.cr.execute('select digits from decimal_precision where name=%s', (application,))
@ -35,18 +28,18 @@ class DecimalPrecision(models.Model):
@api.model_create_multi @api.model_create_multi
def create(self, vals_list): def create(self, vals_list):
res = super(DecimalPrecision, self).create(vals_list) res = super().create(vals_list)
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return res return res
def write(self, data): def write(self, vals):
res = super(DecimalPrecision, self).write(data) res = super().write(vals)
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return res return res
def unlink(self): def unlink(self):
res = super(DecimalPrecision, self).unlink() res = super().unlink()
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return res return res
@api.onchange('digits') @api.onchange('digits')
@ -54,8 +47,8 @@ class DecimalPrecision(models.Model):
if self.digits < self._origin.digits: if self.digits < self._origin.digits:
return { return {
'warning': { 'warning': {
'title': _("Warning for %s", self.name), 'title': self.env._("Warning for %s", self.name),
'message': _( 'message': self.env._(
"The precision has been reduced for %s.\n" "The precision has been reduced for %s.\n"
"Note that existing data WON'T be updated by this change.\n\n" "Note that existing data WON'T be updated by this change.\n\n"
"As decimal precisions impact the whole system, this may cause critical issues.\n" "As decimal precisions impact the whole system, this may cause critical issues.\n"
@ -65,9 +58,3 @@ class DecimalPrecision(models.Model):
) )
} }
} }
# compatibility for decimal_precision.get_precision(): expose the module in addons namespace
dp = sys.modules['odoo.addons.base.models.decimal_precision']
odoo.addons.decimal_precision = dp
sys.modules['odoo.addons.decimal_precision'] = dp
sys.modules['openerp.addons.decimal_precision'] = dp

View file

@ -1,70 +1,57 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval import functools
from contextlib import ExitStack
from markupsafe import Markup
from urllib.parse import urlparse
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.service import security
from odoo.tools.safe_eval import safe_eval, time
from odoo.tools.misc import find_in_path
from odoo.tools import check_barcode_encoding, config, is_html_empty, parse_version, split_every
from odoo.http import request, root
from odoo.tools.pdf import PdfFileWriter, PdfFileReader, PdfReadError
from odoo.osv.expression import NEGATIVE_TERM_OPERATORS, FALSE_DOMAIN
import io import io
import json
import logging import logging
import os import os
import lxml.html
import tempfile
import subprocess
import re import re
import requests import requests
import json import subprocess
import tempfile
from lxml import etree import typing
from contextlib import closing import unittest
from reportlab.graphics.barcode import createBarcodeDrawing from ast import literal_eval
from reportlab.pdfbase.pdfmetrics import getFont, TypeFace
from collections import OrderedDict from collections import OrderedDict
from collections.abc import Iterable from contextlib import closing, ExitStack
from PIL import Image, ImageFile
from itertools import islice from itertools import islice
from urllib.parse import urlparse
import lxml.html
from PIL import Image, ImageFile
from lxml import etree
from markupsafe import Markup
from odoo import api, fields, models, modules, tools, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.fields import Domain
from odoo.service import security
from odoo.http import request, root
from odoo.tools import config, is_html_empty, parse_version, split_every
from odoo.tools.barcode import check_barcode_encoding, createBarcodeDrawing, get_barcode_font
from odoo.tools.misc import find_in_path
from odoo.tools.pdf import PdfFileReader, PdfFileWriter, PdfReadError
from odoo.tools.safe_eval import safe_eval, time
# Allow truncated images # Allow truncated images
ImageFile.LOAD_TRUNCATED_IMAGES = True ImageFile.LOAD_TRUNCATED_IMAGES = True
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
# A lock occurs when the user wants to print a report having multiple barcode while the server is
# started in threaded-mode. The reason is that reportlab has to build a cache of the T1 fonts
# before rendering a barcode (done in a C extension) and this part is not thread safe. We attempt
# here to init the T1 fonts cache at the start-up of Odoo so that rendering of barcode in multiple
# thread does not lock the server.
_DEFAULT_BARCODE_FONT = 'Courier'
try:
available = TypeFace(_DEFAULT_BARCODE_FONT).findT1File()
if not available:
substitution_font = 'NimbusMonoPS-Regular'
fnt = getFont(substitution_font)
if fnt:
_DEFAULT_BARCODE_FONT = substitution_font
fnt.ascent = 629
fnt.descent = -157
createBarcodeDrawing('Code128', value='foo', format='png', width=100, height=100, humanReadable=1, fontName=_DEFAULT_BARCODE_FONT).asString('png')
except Exception:
pass
def _run_wkhtmltopdf(args):
"""
Runs the given arguments against the wkhtmltopdf binary.
def _get_wkhtmltopdf_bin(): Returns:
return find_in_path('wkhtmltopdf') The process
"""
bin_path = _wkhtml().bin
def _get_wkhtmltoimage_bin(): return subprocess.run(
return find_in_path('wkhtmltoimage') [bin_path, *args],
capture_output=True,
encoding='utf-8',
check=False,
)
def _split_table(tree, max_rows): def _split_table(tree, max_rows):
@ -87,47 +74,64 @@ def _split_table(tree, max_rows):
prev.addnext(sibling) prev.addnext(sibling)
prev = sibling prev = sibling
# Check the presence of Wkhtmltopdf and return its version at Odoo start-up
wkhtmltopdf_state = 'install' class WkhtmlInfo(typing.NamedTuple):
wkhtmltopdf_dpi_zoom_ratio = False state: typing.Literal['install', 'ok']
dpi_zoom_ratio: bool
bin: str
version: str
wkhtmltoimage_bin: str
wkhtmltoimage_version: tuple[str, ...] | None
@functools.lru_cache(1)
def _wkhtml() -> WkhtmlInfo:
state = 'install'
bin_path = 'wkhtmltopdf'
version = ''
dpi_zoom_ratio = False
try: try:
bin_path = find_in_path('wkhtmltopdf')
process = subprocess.Popen( process = subprocess.Popen(
[_get_wkhtmltopdf_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE [bin_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
) )
except (OSError, IOError): except OSError:
_logger.info('You need Wkhtmltopdf to print a pdf version of the reports.') _logger.info('You need Wkhtmltopdf to print a pdf version of the reports.')
else: else:
_logger.info('Will use the Wkhtmltopdf binary at %s' % _get_wkhtmltopdf_bin()) _logger.info('Will use the Wkhtmltopdf binary at %s', bin_path)
out, err = process.communicate() out, _err = process.communicate()
match = re.search(b'([0-9.]+)', out) version = out.decode('ascii')
match = re.search(r'([0-9.]+)', version)
if match: if match:
version = match.group(0).decode('ascii') version = match.group(0)
if parse_version(version) < parse_version('0.12.0'): if parse_version(version) < parse_version('0.12.0'):
_logger.info('Upgrade Wkhtmltopdf to (at least) 0.12.0') _logger.info('Upgrade Wkhtmltopdf to (at least) 0.12.0')
wkhtmltopdf_state = 'upgrade' state = 'upgrade'
else: else:
wkhtmltopdf_state = 'ok' state = 'ok'
if parse_version(version) >= parse_version('0.12.2'): if parse_version(version) >= parse_version('0.12.2'):
wkhtmltopdf_dpi_zoom_ratio = True dpi_zoom_ratio = True
if config['workers'] == 1: if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to print a pdf version of the reports.') _logger.info('You need to start Odoo with at least two workers to print a pdf version of the reports.')
wkhtmltopdf_state = 'workers' state = 'workers'
else: else:
_logger.info('Wkhtmltopdf seems to be broken.') _logger.info('Wkhtmltopdf seems to be broken.')
wkhtmltopdf_state = 'broken' state = 'broken'
wkhtmltoimage_version = None wkhtmltoimage_version = None
image_bin_path = 'wkhtmltoimage'
try: try:
image_bin_path = find_in_path('wkhtmltoimage')
process = subprocess.Popen( process = subprocess.Popen(
[_get_wkhtmltoimage_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE [image_bin_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
) )
except OSError: except OSError:
_logger.info('You need Wkhtmltoimage to generate images from html.') _logger.info('You need Wkhtmltoimage to generate images from html.')
else: else:
_logger.info('Will use the Wkhtmltoimage binary at %s', _get_wkhtmltoimage_bin()) _logger.info('Will use the Wkhtmltoimage binary at %s', image_bin_path)
out, err = process.communicate() out, _err = process.communicate()
match = re.search(b'([0-9.]+)', out) match = re.search(rb'([0-9.]+)', out)
if match: if match:
wkhtmltoimage_version = parse_version(match.group(0).decode('ascii')) wkhtmltoimage_version = parse_version(match.group(0).decode('ascii'))
if config['workers'] == 1: if config['workers'] == 1:
@ -135,10 +139,20 @@ else:
else: else:
_logger.info('Wkhtmltoimage seems to be broken.') _logger.info('Wkhtmltoimage seems to be broken.')
return WkhtmlInfo(
state=state,
dpi_zoom_ratio=dpi_zoom_ratio,
bin=bin_path,
version=version,
wkhtmltoimage_bin=image_bin_path,
wkhtmltoimage_version=wkhtmltoimage_version,
)
class IrActionsReport(models.Model): class IrActionsReport(models.Model):
_name = 'ir.actions.report' _name = 'ir.actions.report'
_description = 'Report Action' _description = 'Report Action'
_inherit = 'ir.actions.actions' _inherit = ['ir.actions.actions']
_table = 'ir_act_report_xml' _table = 'ir_act_report_xml'
_order = 'name, id' _order = 'name, id'
_allow_sudo_commands = False _allow_sudo_commands = False
@ -160,10 +174,10 @@ class IrActionsReport(models.Model):
report_name = fields.Char(string='Template Name', required=True) report_name = fields.Char(string='Template Name', required=True)
report_file = fields.Char(string='Report File', required=False, readonly=False, store=True, report_file = fields.Char(string='Report File', required=False, readonly=False, store=True,
help="The path to the main report file (depending on Report Type) or empty if the content is in another field") help="The path to the main report file (depending on Report Type) or empty if the content is in another field")
groups_id = fields.Many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', string='Groups') group_ids = fields.Many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', string='Groups')
multi = fields.Boolean(string='On Multiple Doc.', help="If set to true, the action will not be displayed on the right toolbar of a form view.") multi = fields.Boolean(string='On Multiple Doc.', help="If set to true, the action will not be displayed on the right toolbar of a form view.")
paperformat_id = fields.Many2one('report.paperformat', 'Paper Format') paperformat_id = fields.Many2one('report.paperformat', 'Paper Format', index='btree_not_null')
print_report_name = fields.Char('Printed Report Name', translate=True, print_report_name = fields.Char('Printed Report Name', translate=True,
help="This is the filename of the report going to download. Keep empty to not change the report filename. You can use a python expression with the 'object' and 'time' variables.") help="This is the filename of the report going to download. Keep empty to not change the report filename. You can use a python expression with the 'object' and 'time' variables.")
attachment_use = fields.Boolean(string='Reload from Attachment', attachment_use = fields.Boolean(string='Reload from Attachment',
@ -178,28 +192,24 @@ class IrActionsReport(models.Model):
action.model_id = self.env['ir.model']._get(action.model).id action.model_id = self.env['ir.model']._get(action.model).id
def _search_model_id(self, operator, value): def _search_model_id(self, operator, value):
ir_model_ids = None if operator in Domain.NEGATIVE_OPERATORS:
return NotImplemented
models = self.env['ir.model']
if isinstance(value, str): if isinstance(value, str):
names = self.env['ir.model'].name_search(value, operator=operator) models = models.search(Domain('display_name', operator, value))
ir_model_ids = [n[0] for n in names] elif isinstance(value, Domain):
models = models.search(value)
elif operator in ('any', 'not any'): elif operator == 'any!':
ir_model_ids = self.env['ir.model']._search(value) models = models.sudo().search(Domain('id', operator, value))
elif operator == 'any' or isinstance(value, int):
elif isinstance(value, Iterable): models = models.search(Domain('id', operator, value))
ir_model_ids = value elif operator == 'in':
models = models.search(Domain.OR(
elif isinstance(value, int) and not isinstance(value, bool): Domain('id' if isinstance(v, int) else 'display_name', operator, v)
ir_model_ids = [value] for v in value
if v
if ir_model_ids: ))
operator = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in' return Domain('model', 'in', models.mapped('model'))
ir_model = self.env['ir.model'].browse(ir_model_ids)
return [('model', operator, ir_model.mapped('model'))]
elif isinstance(value, bool) or value is None:
return [('model', operator, value)]
else:
return FALSE_DOMAIN
def _get_readable_fields(self): def _get_readable_fields(self):
return super()._get_readable_fields() | { return super()._get_readable_fields() | {
@ -268,7 +278,7 @@ class IrActionsReport(models.Model):
:return: wkhtmltopdf_state :return: wkhtmltopdf_state
''' '''
return wkhtmltopdf_state return _wkhtml().state
def get_paperformat(self): def get_paperformat(self):
return self.paperformat_id or self.env.company.paperformat_id return self.paperformat_id or self.env.company.paperformat_id
@ -333,7 +343,7 @@ class IrActionsReport(models.Model):
dpi = paperformat_id.dpi dpi = paperformat_id.dpi
if dpi: if dpi:
command_args.extend(['--dpi', str(dpi)]) command_args.extend(['--dpi', str(dpi)])
if wkhtmltopdf_dpi_zoom_ratio: if _wkhtml().dpi_zoom_ratio:
command_args.extend(['--zoom', str(96.0 / dpi)]) command_args.extend(['--zoom', str(96.0 / dpi)])
if specific_paperformat_args and specific_paperformat_args.get('data-report-header-spacing'): if specific_paperformat_args and specific_paperformat_args.get('data-report-header-spacing'):
@ -371,13 +381,6 @@ class IrActionsReport(models.Model):
The idea is to put all headers/footers together. Then, we will use a javascript trick The idea is to put all headers/footers together. Then, we will use a javascript trick
(see minimal_layout template) to set the right header/footer during the processing of wkhtmltopdf. (see minimal_layout template) to set the right header/footer during the processing of wkhtmltopdf.
This allows the computation of multiple reports in a single call to wkhtmltopdf. This allows the computation of multiple reports in a single call to wkhtmltopdf.
:param html: The html rendered by render_qweb_html.
:type: bodies: list of string representing each one a html body.
:type header: string representing the html header.
:type footer: string representing the html footer.
:type specific_paperformat_args: dictionary of prioritized paperformat values.
:return: bodies, header, footer, specific_paperformat_args
''' '''
# Return empty dictionary if 'web.minimal_layout' not found. # Return empty dictionary if 'web.minimal_layout' not found.
@ -462,8 +465,9 @@ class IrActionsReport(models.Model):
:param image_format union['jpg', 'png']: format of the image :param image_format union['jpg', 'png']: format of the image
:return list[bytes|None]: :return list[bytes|None]:
""" """
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_image_rendering'): if (modules.module.current_test or tools.config['test_enable']) and not self.env.context.get('force_image_rendering'):
return [None] * len(bodies) return [None] * len(bodies)
wkhtmltoimage_version = _wkhtml().wkhtmltoimage_version
if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'): if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'):
raise UserError(_('wkhtmltoimage 0.12.0^ is required in order to render images from html')) raise UserError(_('wkhtmltoimage 0.12.0^ is required in order to render images from html'))
command_args = [ command_args = [
@ -475,17 +479,19 @@ class IrActionsReport(models.Model):
with ExitStack() as stack: with ExitStack() as stack:
files = [] files = []
for body in bodies: for body in bodies:
input_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix='.html', prefix='report_image_html_input.tmp.')) (input_fd, input_path) = tempfile.mkstemp(suffix='.html', prefix='report_image_html_input.tmp.')
output_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix=f'.{image_format}', prefix='report_image_output.tmp.')) (output_fd, output_path) = tempfile.mkstemp(suffix=f'.{image_format}', prefix='report_image_output.tmp.')
stack.callback(os.remove, input_path)
stack.callback(os.remove, output_path)
os.close(output_fd)
with closing(os.fdopen(input_fd, 'wb')) as input_file:
input_file.write(body.encode()) input_file.write(body.encode())
files.append((input_file, output_file)) files.append((input_path, output_path))
output_images = [] output_images = []
for input_file, output_file in files: for (input_path, output_path) in files:
# smaller bodies may be held in a python buffer until close, force flush wkhtmltoimage = [_wkhtml().wkhtmltoimage_bin, *command_args, input_path, output_path]
input_file.flush()
wkhtmltoimage = [_get_wkhtmltoimage_bin()] + command_args + [input_file.name, output_file.name]
# start and block, no need for parallelism for now # start and block, no need for parallelism for now
completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False) completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False, encoding='utf-8')
if completed_process.returncode: if completed_process.returncode:
message = _( message = _(
'Wkhtmltoimage failed (error code: %(error_code)s). Message: %(error_message_end)s', 'Wkhtmltoimage failed (error code: %(error_code)s). Message: %(error_message_end)s',
@ -495,6 +501,7 @@ class IrActionsReport(models.Model):
_logger.warning(message) _logger.warning(message)
output_images.append(None) output_images.append(None)
else: else:
with open(output_path, 'rb') as output_file:
output_images.append(output_file.read()) output_images.append(output_file.read())
return output_images return output_images
@ -511,7 +518,7 @@ class IrActionsReport(models.Model):
'''Execute wkhtmltopdf as a subprocess in order to convert html given in input into a pdf '''Execute wkhtmltopdf as a subprocess in order to convert html given in input into a pdf
document. document.
:param list[str] bodies: The html bodies of the report, one per page. :param Iterable[str] bodies: The html bodies of the report, one per page.
:param report_ref: report reference that is needed to get report paperformat. :param report_ref: report reference that is needed to get report paperformat.
:param str header: The html header of the report containing all headers. :param str header: The html header of the report containing all headers.
:param str footer: The html footer of the report containing all footers. :param str footer: The html footer of the report containing all footers.
@ -531,8 +538,14 @@ class IrActionsReport(models.Model):
set_viewport_size=set_viewport_size) set_viewport_size=set_viewport_size)
files_command_args = [] files_command_args = []
temporary_files = []
temp_session = None def delete_file(file_path):
try:
os.unlink(file_path)
except OSError:
_logger.error('Error when trying to remove file %s', file_path)
with ExitStack() as stack:
# Passing the cookie to wkhtmltopdf in order to resolve internal links. # Passing the cookie to wkhtmltopdf in order to resolve internal links.
if request and request.db: if request and request.db:
@ -546,12 +559,13 @@ class IrActionsReport(models.Model):
if temp_session.uid: if temp_session.uid:
temp_session.session_token = security.compute_session_token(temp_session, self.env) temp_session.session_token = security.compute_session_token(temp_session, self.env)
root.session_store.save(temp_session) root.session_store.save(temp_session)
stack.callback(root.session_store.delete, temp_session)
base_url = self._get_report_url() base_url = self._get_report_url()
domain = urlparse(base_url).hostname domain = urlparse(base_url).hostname
cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;' cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;'
cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.') cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.')
temporary_files.append(cookie_jar_file_path) stack.callback(delete_file, cookie_jar_file_path)
with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file: with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file:
cookie_jar_file.write(cookie.encode()) cookie_jar_file.write(cookie.encode())
command_args.extend(['--cookie-jar', cookie_jar_file_path]) command_args.extend(['--cookie-jar', cookie_jar_file_path])
@ -560,18 +574,19 @@ class IrActionsReport(models.Model):
head_file_fd, head_file_path = tempfile.mkstemp(suffix='.html', prefix='report.header.tmp.') head_file_fd, head_file_path = tempfile.mkstemp(suffix='.html', prefix='report.header.tmp.')
with closing(os.fdopen(head_file_fd, 'wb')) as head_file: with closing(os.fdopen(head_file_fd, 'wb')) as head_file:
head_file.write(header.encode()) head_file.write(header.encode())
temporary_files.append(head_file_path) stack.callback(delete_file, head_file_path)
files_command_args.extend(['--header-html', head_file_path]) files_command_args.extend(['--header-html', head_file_path])
if footer: if footer:
foot_file_fd, foot_file_path = tempfile.mkstemp(suffix='.html', prefix='report.footer.tmp.') foot_file_fd, foot_file_path = tempfile.mkstemp(suffix='.html', prefix='report.footer.tmp.')
with closing(os.fdopen(foot_file_fd, 'wb')) as foot_file: with closing(os.fdopen(foot_file_fd, 'wb')) as foot_file:
foot_file.write(footer.encode()) foot_file.write(footer.encode())
temporary_files.append(foot_file_path) stack.callback(delete_file, foot_file_path)
files_command_args.extend(['--footer-html', foot_file_path]) files_command_args.extend(['--footer-html', foot_file_path])
paths = [] paths = []
for i, body in enumerate(bodies): body_idx = 0
prefix = '%s%d.' % ('report.body.tmp.', i) for body_idx, body in enumerate(bodies):
prefix = f'report.body.tmp.{body_idx}.'
body_file_fd, body_file_path = tempfile.mkstemp(suffix='.html', prefix=prefix) body_file_fd, body_file_path = tempfile.mkstemp(suffix='.html', prefix=prefix)
with closing(os.fdopen(body_file_fd, 'wb')) as body_file: with closing(os.fdopen(body_file_fd, 'wb')) as body_file:
# HACK: wkhtmltopdf doesn't like big table at all and the # HACK: wkhtmltopdf doesn't like big table at all and the
@ -588,51 +603,43 @@ class IrActionsReport(models.Model):
_split_table(tree, 500) _split_table(tree, 500)
body_file.write(lxml.html.tostring(tree)) body_file.write(lxml.html.tostring(tree))
paths.append(body_file_path) paths.append(body_file_path)
temporary_files.append(body_file_path) stack.callback(delete_file, body_file_path)
pdf_report_fd, pdf_report_path = tempfile.mkstemp(suffix='.pdf', prefix='report.tmp.') pdf_report_fd, pdf_report_path = tempfile.mkstemp(suffix='.pdf', prefix='report.tmp.')
os.close(pdf_report_fd) os.close(pdf_report_fd)
temporary_files.append(pdf_report_path) stack.callback(delete_file, pdf_report_path)
try: process = _run_wkhtmltopdf(command_args + files_command_args + paths + [pdf_report_path])
wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args + files_command_args + paths + [pdf_report_path] err = process.stderr
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8")
_out, err = process.communicate()
if process.returncode not in [0, 1]: match process.returncode:
if process.returncode == -11: case 0:
pass
case 1:
if body_idx:
wk_version = _wkhtml().version
if '(with patched qt)' not in wk_version:
if modules.module.current_test:
raise unittest.SkipTest("Unable to convert multiple documents via wkhtmltopdf using unpatched QT")
raise UserError(_("Tried to convert multiple documents in wkhtmltopdf using unpatched QT"))
_logger.warning("wkhtmltopdf: %s", err)
case c:
message = _( message = _(
'Wkhtmltopdf failed (error code: %(error_code)s). Memory limit too low or maximum file number of subprocess reached. Message : %(message)s', 'Wkhtmltopdf failed (error code: %(error_code)s). Memory limit too low or maximum file number of subprocess reached. Message : %(message)s',
error_code=process.returncode, error_code=c,
message=err[-1000:], message=err[-1000:],
) ) if c == -11 else _(
else:
message = _(
'Wkhtmltopdf failed (error code: %(error_code)s). Message: %(message)s', 'Wkhtmltopdf failed (error code: %(error_code)s). Message: %(message)s',
error_code=process.returncode, error_code=c,
message=err[-1000:], message=err[-1000:],
) )
_logger.warning(message) _logger.warning(message)
raise UserError(message) raise UserError(message)
else:
if err:
_logger.warning('wkhtmltopdf: %s' % err)
except:
raise
finally:
if temp_session:
root.session_store.delete(temp_session)
with open(pdf_report_path, 'rb') as pdf_document: with open(pdf_report_path, 'rb') as pdf_document:
pdf_content = pdf_document.read() pdf_content = pdf_document.read()
# Manual cleanup of the temporary files
for temporary_file in temporary_files:
try:
os.unlink(temporary_file)
except (OSError, IOError):
_logger.error('Error when trying to remove file %s' % temporary_file)
return pdf_content return pdf_content
@api.model @api.model
@ -648,7 +655,9 @@ class IrActionsReport(models.Model):
@api.model @api.model
def _get_report(self, report_ref): def _get_report(self, report_ref):
"""Get the report (with sudo) from a reference """Get the report (with sudo) from a reference
report_ref: can be one of
:param report_ref: can be one of
- ir.actions.report id - ir.actions.report id
- ir.actions.report record - ir.actions.report record
- ir.model.data reference to ir.actions.report - ir.model.data reference to ir.actions.report
@ -691,7 +700,7 @@ class IrActionsReport(models.Model):
kwargs = {k: validator(kwargs.get(k, v)) for k, (v, validator) in defaults.items()} kwargs = {k: validator(kwargs.get(k, v)) for k, (v, validator) in defaults.items()}
kwargs['humanReadable'] = kwargs.pop('humanreadable') kwargs['humanReadable'] = kwargs.pop('humanreadable')
if kwargs['humanReadable']: if kwargs['humanReadable']:
kwargs['fontName'] = _DEFAULT_BARCODE_FONT kwargs['fontName'] = get_barcode_font()
if kwargs['width'] * kwargs['height'] > 1200000 or max(kwargs['width'], kwargs['height']) > 10000: if kwargs['width'] * kwargs['height'] > 1200000 or max(kwargs['width'], kwargs['height']) > 10000:
raise ValueError("Barcode too large") raise ValueError("Barcode too large")
@ -706,7 +715,8 @@ class IrActionsReport(models.Model):
elif barcode_type == 'QR': elif barcode_type == 'QR':
# for `QR` type, `quiet` is not supported. And is simply ignored. # for `QR` type, `quiet` is not supported. And is simply ignored.
# But we can use `barBorder` to get a similar behaviour. # But we can use `barBorder` to get a similar behaviour.
if kwargs['quiet']: # quiet=True & barBorder=4 by default cf above, remove border only if quiet=False
if not kwargs['quiet']:
kwargs['barBorder'] = 0 kwargs['barBorder'] = 0
if barcode_type in ('EAN8', 'EAN13') and not check_barcode_encoding(value, barcode_type): if barcode_type in ('EAN8', 'EAN13') and not check_barcode_encoding(value, barcode_type):
@ -740,10 +750,12 @@ class IrActionsReport(models.Model):
@api.model @api.model
def get_available_barcode_masks(self): def get_available_barcode_masks(self):
""" Hook for extension. """ Hook for extension.
This function returns the available QR-code masks, in the form of a This function returns the available QR-code masks, in the form of a
list of (code, mask_function) elements, where code is a string identifying list of (code, mask_function) elements, where code is a string identifying
the mask uniquely, and mask_function is a function returning a reportlab the mask uniquely, and mask_function is a function returning a reportlab
Drawing object with the result of the mask, and taking as parameters: Drawing object with the result of the mask, and taking as parameters:
- width of the QR-code, in pixels - width of the QR-code, in pixels
- height of the QR-code, in pixels - height of the QR-code, in pixels
- reportlab Drawing object containing the barcode to apply the mask on - reportlab Drawing object containing the barcode to apply the mask on
@ -811,7 +823,7 @@ class IrActionsReport(models.Model):
stream = None stream = None
attachment = None attachment = None
if not has_duplicated_ids and report_sudo.attachment and not self._context.get("report_pdf_no_attachment"): if not has_duplicated_ids and report_sudo.attachment and not self.env.context.get("report_pdf_no_attachment"):
attachment = report_sudo.retrieve_attachment(record) attachment = report_sudo.retrieve_attachment(record)
# Extract the stream from the attachment. # Extract the stream from the attachment.
@ -871,9 +883,9 @@ class IrActionsReport(models.Model):
report_ref=report_ref, report_ref=report_ref,
header=header, header=header,
footer=footer, footer=footer,
landscape=self._context.get('landscape'), landscape=self.env.context.get('landscape'),
specific_paperformat_args=specific_paperformat_args, specific_paperformat_args=specific_paperformat_args,
set_viewport_size=self._context.get('set_viewport_size'), set_viewport_size=self.env.context.get('set_viewport_size'),
) )
pdf_content_stream = io.BytesIO(pdf_content) pdf_content_stream = io.BytesIO(pdf_content)
@ -1002,7 +1014,7 @@ class IrActionsReport(models.Model):
data.setdefault('report_type', 'pdf') data.setdefault('report_type', 'pdf')
# In case of test environment without enough workers to perform calls to wkhtmltopdf, # In case of test environment without enough workers to perform calls to wkhtmltopdf,
# fallback to render_html. # fallback to render_html.
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_report_rendering'): if (modules.module.current_test or tools.config['test_enable']) and not self.env.context.get('force_report_rendering'):
return self._render_qweb_html(report_ref, res_ids, data=data) return self._render_qweb_html(report_ref, res_ids, data=data)
self = self.with_context(webp_as_jpg=True) self = self.with_context(webp_as_jpg=True)
@ -1025,7 +1037,7 @@ class IrActionsReport(models.Model):
report_sudo = self._get_report(report_ref) report_sudo = self._get_report(report_ref)
# Generate the ir.attachment if needed. # Generate the ir.attachment if needed.
if not has_duplicated_ids and report_sudo.attachment and not self._context.get("report_pdf_no_attachment"): if not has_duplicated_ids and report_sudo.attachment and not self.env.context.get("report_pdf_no_attachment"):
attachment_vals_list = self._prepare_pdf_report_attachment_vals_list(report_sudo, collected_streams) attachment_vals_list = self._prepare_pdf_report_attachment_vals_list(report_sudo, collected_streams)
if attachment_vals_list: if attachment_vals_list:
attachment_names = ', '.join(x['name'] for x in attachment_vals_list) attachment_names = ', '.join(x['name'] for x in attachment_vals_list)

View file

@ -4,9 +4,8 @@ from glob import glob
from logging import getLogger from logging import getLogger
from werkzeug import urls from werkzeug import urls
import odoo
import odoo.modules.module # get_manifest, don't from-import it
from odoo import api, fields, models, tools from odoo import api, fields, models, tools
from odoo.modules import Manifest
from odoo.tools import misc from odoo.tools import misc
from odoo.tools.constants import ASSET_EXTENSIONS, EXTERNAL_ASSET from odoo.tools.constants import ASSET_EXTENSIONS, EXTERNAL_ASSET
@ -68,10 +67,10 @@ class IrAsset(models.Model):
self.env.registry.clear_cache('assets') self.env.registry.clear_cache('assets')
return super().create(vals_list) return super().create(vals_list)
def write(self, values): def write(self, vals):
if self: if self:
self.env.registry.clear_cache('assets') self.env.registry.clear_cache('assets')
return super().write(values) return super().write(vals)
def unlink(self): def unlink(self):
self.env.registry.clear_cache('assets') self.env.registry.clear_cache('assets')
@ -105,11 +104,15 @@ class IrAsset(models.Model):
def _parse_bundle_name(self, bundle_name, debug_assets): def _parse_bundle_name(self, bundle_name, debug_assets):
bundle_name, asset_type = bundle_name.rsplit('.', 1) bundle_name, asset_type = bundle_name.rsplit('.', 1)
rtl = False rtl = False
autoprefix = False
if not debug_assets: if not debug_assets:
bundle_name, min_ = bundle_name.rsplit('.', 1) bundle_name, min_ = bundle_name.rsplit('.', 1)
if min_ != 'min': if min_ != 'min':
raise ValueError("'min' expected in extension in non debug mode") raise ValueError("'min' expected in extension in non debug mode")
if asset_type == 'css': if asset_type == 'css':
if bundle_name.endswith('.autoprefixed'):
bundle_name = bundle_name[:-13]
autoprefix = True
if bundle_name.endswith('.rtl'): if bundle_name.endswith('.rtl'):
bundle_name = bundle_name[:-4] bundle_name = bundle_name[:-4]
rtl = True rtl = True
@ -117,7 +120,7 @@ class IrAsset(models.Model):
raise ValueError('Only js and css assets bundle are supported for now') raise ValueError('Only js and css assets bundle are supported for now')
if len(bundle_name.split('.')) != 2: if len(bundle_name.split('.')) != 2:
raise ValueError(f'{bundle_name} is not a valid bundle name, should have two parts') raise ValueError(f'{bundle_name} is not a valid bundle name, should have two parts')
return bundle_name, rtl, asset_type return bundle_name, rtl, asset_type, autoprefix
@tools.conditional( @tools.conditional(
'xml' not in tools.config['dev_mode'], 'xml' not in tools.config['dev_mode'],
@ -183,7 +186,7 @@ class IrAsset(models.Model):
# 2. Process all addons' manifests. # 2. Process all addons' manifests.
for addon in addons: for addon in addons:
for command in odoo.modules.module._get_manifest_cached(addon)['assets'].get(bundle, ()): for command in Manifest.for_addon(addon)['assets'].get(bundle, ()):
directive, target, path_def = self._process_command(command) directive, target, path_def = self._process_command(command)
self._process_path(bundle, directive, target, path_def, asset_paths, seen, addons, installed, bundle_start_index, **assets_params) self._process_path(bundle, directive, target, path_def, asset_paths, seen, addons, installed, bundle_start_index, **assets_params)
@ -240,7 +243,7 @@ class IrAsset(models.Model):
# this should never happen # this should never happen
raise ValueError("Unexpected directive") raise ValueError("Unexpected directive")
def _get_related_assets(self, domain): def _get_related_assets(self, domain, **kwargs):
""" """
Returns a set of assets matching the domain, regardless of their Returns a set of assets matching the domain, regardless of their
active state. This method can be overridden to filter the results. active state. This method can be overridden to filter the results.
@ -258,8 +261,8 @@ class IrAsset(models.Model):
a specific asset and target the right bundle, i.e. the first one a specific asset and target the right bundle, i.e. the first one
defining the target path. defining the target path.
:param target_path_def: string: path to match. :param str target_path_def: path to match.
:root_bundle: string: bundle from which to initiate the search. :param str root_bundle: bundle from which to initiate the search.
:returns: the first matching bundle or None :returns: the first matching bundle or None
""" """
installed = self._get_installed_addons_list() installed = self._get_installed_addons_list()
@ -273,7 +276,7 @@ class IrAsset(models.Model):
return root_bundle return root_bundle
def _get_active_addons_list(self): def _get_active_addons_list(self, **kwargs):
"""Can be overridden to filter the returned list of active modules.""" """Can be overridden to filter the returned list of active modules."""
return self._get_installed_addons_list() return self._get_installed_addons_list()
@ -285,10 +288,10 @@ class IrAsset(models.Model):
IrModule = self.env['ir.module.module'] IrModule = self.env['ir.module.module']
def mapper(addon): def mapper(addon):
manif = odoo.modules.module._get_manifest_cached(addon) manif = Manifest.for_addon(addon) or {}
from_terp = IrModule.get_values_from_terp(manif) from_terp = IrModule.get_values_from_terp(manif)
from_terp['name'] = addon from_terp['name'] = addon
from_terp['depends'] = manif.get('depends', ['base']) from_terp['depends'] = manif.get('depends') or ['base']
return from_terp return from_terp
manifs = map(mapper, addons_tuple) manifs = map(mapper, addons_tuple)
@ -307,9 +310,7 @@ class IrAsset(models.Model):
Returns the list of all installed addons. Returns the list of all installed addons.
:returns: string[]: list of module names :returns: string[]: list of module names
""" """
# Main source: the current registry list return self.env.registry._init_modules.union(tools.config['server_wide_modules'])
# Second source of modules: server wide modules
return self.env.registry._init_modules.union(odoo.conf.server_wide_modules or [])
def _get_paths(self, path_def, installed): def _get_paths(self, path_def, installed):
""" """
@ -330,35 +331,31 @@ class IrAsset(models.Model):
:param path_def: the definition (glob) of file paths to match :param path_def: the definition (glob) of file paths to match
:param installed: the list of installed addons :param installed: the list of installed addons
:param extensions: a list of extensions that found files must match
:returns: a list of tuple: (path, full_path, modified) :returns: a list of tuple: (path, full_path, modified)
""" """
paths = None paths = None
path_def = fs2web(path_def) # we expect to have all path definition unix style or url style, this is a safety path_def = fs2web(path_def) # we expect to have all path definition unix style or url style, this is a safety
path_parts = [part for part in path_def.split('/') if part] path_parts = [part for part in path_def.split('/') if part]
addon = path_parts[0] addon = path_parts[0]
addon_manifest = odoo.modules.module._get_manifest_cached(addon) addon_manifest = Manifest.for_addon(addon, display_warning=False)
safe_path = True safe_path = False
if addon_manifest: if addon_manifest:
if addon not in installed: if addon not in installed:
# Assert that the path is in the installed addons # Assert that the path is in the installed addons
raise Exception(f"Unallowed to fetch files from addon {addon} for file {path_def}") raise Exception(f"Unallowed to fetch files from addon {addon} for file {path_def}")
addons_path = addon_manifest['addons_path'] addons_path = addon_manifest.addons_path
full_path = os.path.normpath(os.sep.join([addons_path, *path_parts])) full_path = os.path.normpath(os.path.join(addons_path, *path_parts))
# forbid escape from the current addon # forbid escape from the current addon
# "/mymodule/../myothermodule" is forbidden # "/mymodule/../myothermodule" is forbidden
static_prefix = os.sep.join([addons_path, addon, 'static', '']) static_prefix = os.path.join(addon_manifest.path, 'static', '')
if full_path.startswith(static_prefix): if full_path.startswith(static_prefix):
paths_with_timestamps = _glob_static_file(full_path) paths_with_timestamps = _glob_static_file(full_path)
paths = [ paths = [
(fs2web(absolute_path[len(addons_path):]), absolute_path, timestamp) (fs2web(absolute_path[len(addons_path):]), absolute_path, timestamp)
for absolute_path, timestamp in paths_with_timestamps for absolute_path, timestamp in paths_with_timestamps
] ]
else: safe_path = True
safe_path = False
else:
safe_path = False
if not paths and not can_aggregate(path_def): # http:// or /web/content if not paths and not can_aggregate(path_def): # http:// or /web/content
paths = [(path_def, EXTERNAL_ASSET, -1)] paths = [(path_def, EXTERNAL_ASSET, -1)]

View file

@ -10,18 +10,39 @@ import os
import psycopg2 import psycopg2
import re import re
import uuid import uuid
import warnings
import werkzeug import werkzeug
from collections import defaultdict from collections import defaultdict
from collections.abc import Collection
from odoo import api, fields, models, SUPERUSER_ID, tools, _ from odoo import api, fields, models, _
from odoo.exceptions import AccessError, ValidationError, UserError from odoo.exceptions import AccessError, MissingError, ValidationError, UserError
from odoo.fields import Domain
from odoo.http import Stream, root, request from odoo.http import Stream, root, request
from odoo.tools import config, human_size, image, str2bool, consteq from odoo.tools import config, consteq, human_size, image, split_every, str2bool, OrderedSet
from odoo.tools.constants import PREFETCH_MAX
from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes
from odoo.osv import expression from odoo.tools.misc import limited_field_access_token
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
SECURITY_FIELDS = ('res_model', 'res_id', 'create_uid', 'public', 'res_field')
def condition_values(model, field_name, domain):
"""Get the values in the domain for a specific field name.
Returns the values appearing in the `in` conditions that would be restricted
to by the domain.
"""
domain = domain.optimize(model)
for condition in domain.map_conditions(
lambda cond: cond
if cond.field_expr == field_name and cond.operator == 'in'
else Domain.TRUE
).optimize(model).iter_conditions():
return condition.value
return None
class IrAttachment(models.Model): class IrAttachment(models.Model):
@ -56,7 +77,7 @@ class IrAttachment(models.Model):
@api.model @api.model
def _filestore(self): def _filestore(self):
return config.filestore(self._cr.dbname) return config.filestore(self.env.cr.dbname)
@api.model @api.model
def _get_storage_domain(self): def _get_storage_domain(self):
@ -74,7 +95,7 @@ class IrAttachment(models.Model):
# Migrate only binary attachments and bypass the res_field automatic # Migrate only binary attachments and bypass the res_field automatic
# filter added in _search override # filter added in _search override
self.search(expression.AND([ self.search(Domain.AND([
self._get_storage_domain(), self._get_storage_domain(),
['&', ('type', '=', 'binary'), '|', ('res_field', '=', False), ('res_field', '!=', False)] ['&', ('type', '=', 'binary'), '|', ('res_field', '=', False), ('res_field', '!=', False)]
]))._migrate() ]))._migrate()
@ -82,11 +103,6 @@ class IrAttachment(models.Model):
def _migrate(self): def _migrate(self):
record_count = len(self) record_count = len(self)
storage = self._storage().upper() storage = self._storage().upper()
# When migrating to filestore verifying if the directory has write permission
if storage == 'FILE':
filestore = self._filestore()
if not os.access(filestore, os.W_OK):
raise PermissionError("Write permission denied for filestore directory.")
for index, attach in enumerate(self): for index, attach in enumerate(self):
_logger.debug("Migrate attachment %s/%s to %s", index + 1, record_count, storage) _logger.debug("Migrate attachment %s/%s to %s", index + 1, record_count, storage)
# pass mimetype, to avoid recomputation # pass mimetype, to avoid recomputation
@ -95,7 +111,7 @@ class IrAttachment(models.Model):
@api.model @api.model
def _full_path(self, path): def _full_path(self, path):
# sanitize path # sanitize path
path = re.sub('[.]', '', path) path = re.sub('[.:]', '', path)
path = path.strip('/\\') path = path.strip('/\\')
return os.path.join(self._filestore(), path) return os.path.join(self._filestore(), path)
@ -115,13 +131,13 @@ class IrAttachment(models.Model):
return fname, full_path return fname, full_path
@api.model @api.model
def _file_read(self, fname): def _file_read(self, fname, size=None):
assert isinstance(self, IrAttachment) assert isinstance(self, IrAttachment)
full_path = self._full_path(fname) full_path = self._full_path(fname)
try: try:
with open(full_path, 'rb') as f: with open(full_path, 'rb') as f:
return f.read() return f.read(size)
except (IOError, OSError): except OSError:
_logger.info("_read_file reading %s", full_path, exc_info=True) _logger.info("_read_file reading %s", full_path, exc_info=True)
return b'' return b''
@ -135,8 +151,9 @@ class IrAttachment(models.Model):
fp.write(bin_value) fp.write(bin_value)
# add fname to checklist, in case the transaction aborts # add fname to checklist, in case the transaction aborts
self._mark_for_gc(fname) self._mark_for_gc(fname)
except IOError: except OSError:
_logger.info("_file_write writing %s", full_path, exc_info=True) _logger.info("_file_write writing %s", full_path)
raise
return fname return fname
@api.model @api.model
@ -147,7 +164,7 @@ class IrAttachment(models.Model):
def _mark_for_gc(self, fname): def _mark_for_gc(self, fname):
""" Add ``fname`` in a checklist for the filestore garbage collection. """ """ Add ``fname`` in a checklist for the filestore garbage collection. """
assert isinstance(self, IrAttachment) assert isinstance(self, IrAttachment)
fname = re.sub('[.]', '', fname).strip('/\\') fname = re.sub('[.:]', '', fname).strip('/\\')
# we use a spooldir: add an empty file in the subdirectory 'checklist' # we use a spooldir: add an empty file in the subdirectory 'checklist'
full_path = os.path.join(self._full_path('checklist'), fname) full_path = os.path.join(self._full_path('checklist'), fname)
if not os.path.exists(full_path): if not os.path.exists(full_path):
@ -171,7 +188,7 @@ class IrAttachment(models.Model):
# the LOCK statement will wait until those concurrent transactions end. # the LOCK statement will wait until those concurrent transactions end.
# But this transaction will not see the new attachements if it has done # But this transaction will not see the new attachements if it has done
# other requests before the LOCK (like the method _storage() above). # other requests before the LOCK (like the method _storage() above).
cr = self._cr cr = self.env.cr
cr.commit() cr.commit()
# prevent all concurrent updates on ir_attachment while collecting, # prevent all concurrent updates on ir_attachment while collecting,
@ -201,7 +218,7 @@ class IrAttachment(models.Model):
# Clean up the checklist. The checklist is split in chunks and files are garbage-collected # Clean up the checklist. The checklist is split in chunks and files are garbage-collected
# for each chunk. # for each chunk.
removed = 0 removed = 0
for names in self.env.cr.split_for_in_conditions(checklist): for names in split_every(self.env.cr.IN_MAX, checklist):
# determine which files to keep among the checklist # determine which files to keep among the checklist
self.env.cr.execute("SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names]) self.env.cr.execute("SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names])
whitelist = set(row[0] for row in self.env.cr.fetchall()) whitelist = set(row[0] for row in self.env.cr.fetchall())
@ -214,7 +231,7 @@ class IrAttachment(models.Model):
os.unlink(self._full_path(fname)) os.unlink(self._full_path(fname))
_logger.debug("_file_gc unlinked %s", self._full_path(fname)) _logger.debug("_file_gc unlinked %s", self._full_path(fname))
removed += 1 removed += 1
except (OSError, IOError): except OSError:
_logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True) _logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True)
with contextlib.suppress(OSError): with contextlib.suppress(OSError):
os.unlink(filepath) os.unlink(filepath)
@ -224,7 +241,7 @@ class IrAttachment(models.Model):
@api.depends('store_fname', 'db_datas', 'file_size') @api.depends('store_fname', 'db_datas', 'file_size')
@api.depends_context('bin_size') @api.depends_context('bin_size')
def _compute_datas(self): def _compute_datas(self):
if self._context.get('bin_size'): if self.env.context.get('bin_size'):
for attach in self: for attach in self:
attach.datas = human_size(attach.file_size) attach.datas = human_size(attach.file_size)
return return
@ -247,17 +264,31 @@ class IrAttachment(models.Model):
self._set_attachment_data(lambda attach: base64.b64decode(attach.datas or b'')) self._set_attachment_data(lambda attach: base64.b64decode(attach.datas or b''))
def _set_attachment_data(self, asbytes): def _set_attachment_data(self, asbytes):
old_fnames = []
checksum_raw_map = {}
for attach in self: for attach in self:
# compute the fields that depend on datas # compute the fields that depend on datas
bin_data = asbytes(attach) bin_data = asbytes(attach)
vals = self._get_datas_related_values(bin_data, attach.mimetype) vals = self._get_datas_related_values(bin_data, attach.mimetype)
if bin_data:
checksum_raw_map[vals['checksum']] = bin_data
# take current location in filestore to possibly garbage-collect it # take current location in filestore to possibly garbage-collect it
fname = attach.store_fname if attach.store_fname:
old_fnames.append(attach.store_fname)
# write as superuser, as user probably does not have write access # write as superuser, as user probably does not have write access
super(IrAttachment, attach.sudo()).write(vals) super(IrAttachment, attach.sudo()).write(vals)
if fname:
if self._storage() != 'db':
# before touching the filestore, flush to prevent the GC from
# running until the end of the transaction
self.flush_recordset(['checksum', 'store_fname'])
for fname in old_fnames:
self._file_delete(fname) self._file_delete(fname)
for checksum, raw in checksum_raw_map.items():
self._file_write(raw, checksum)
def _get_datas_related_values(self, data, mimetype): def _get_datas_related_values(self, data, mimetype):
checksum = self._compute_checksum(data) checksum = self._compute_checksum(data)
@ -273,7 +304,7 @@ class IrAttachment(models.Model):
'db_datas': data, 'db_datas': data,
} }
if data and self._storage() != 'db': if data and self._storage() != 'db':
values['store_fname'] = self._file_write(data, values['checksum']) values['store_fname'], _full_path = self._get_path(data, checksum)
values['db_datas'] = False values['db_datas'] = False
return values return values
@ -426,20 +457,14 @@ class IrAttachment(models.Model):
mimetype = fields.Char('Mime Type', readonly=True) mimetype = fields.Char('Mime Type', readonly=True)
index_content = fields.Text('Indexed Content', readonly=True, prefetch=False) index_content = fields.Text('Indexed Content', readonly=True, prefetch=False)
def _auto_init(self): _res_idx = models.Index("(res_model, res_id)")
res = super(IrAttachment, self)._auto_init()
tools.create_index(self._cr, 'ir_attachment_res_idx',
self._table, ['res_model', 'res_id'])
return res
@api.constrains('type', 'url')
def _check_serving_attachments(self): def _check_serving_attachments(self):
if self.env.is_admin(): if self.env.is_admin():
return return
for attachment in self: for attachment in self:
# restrict writing on attachments that could be served by the # restrict writing on attachments that could be served by the
# ir.http's dispatch exception handling # ir.http's dispatch exception handling
# XDO note: this should be done in check(write), constraints for access rights?
# XDO note: if read on sudo, read twice, one for constraints, one for _inverse_datas as user # XDO note: if read on sudo, read twice, one for constraints, one for _inverse_datas as user
if attachment.type == 'binary' and attachment.url: if attachment.type == 'binary' and attachment.url:
has_group = self.env.user.has_group has_group = self.env.user.has_group
@ -449,155 +474,230 @@ class IrAttachment(models.Model):
@api.model @api.model
def check(self, mode, values=None): def check(self, mode, values=None):
""" Restricts the access to an ir.attachment, according to referred mode """ """ Restricts the access to an ir.attachment, according to referred mode """
if self.env.is_superuser(): warnings.warn("Since 19.0, use check_access", DeprecationWarning, stacklevel=2)
return True
# Always require an internal user (aka, employee) to access to a attachment # Always require an internal user (aka, employee) to access to a attachment
if not (self.env.is_admin() or self.env.user._is_internal()): if not (self.env.is_admin() or self.env.user._is_internal()):
raise AccessError(_("Sorry, you are not allowed to access this document.")) raise AccessError(_("Sorry, you are not allowed to access this document."))
self.check_access(mode)
if values and any(self._inaccessible_comodel_records({values.get('res_model'): [values.get('res_id')]}, mode)):
raise AccessError(_("Sorry, you are not allowed to access this document."))
def _check_access(self, operation):
"""Check access for attachments.
Rules:
- `public` is always accessible for reading.
- If we have `res_model and res_id`, the attachment is accessible if the
referenced model is accessible. Also, when `res_field != False` and
the user is not an administrator, we check the access on the field.
- If we don't have a referenced record, the attachment is accessible to
the administrator and the creator of the attachment.
"""
res = super()._check_access(operation)
remaining = self
error_func = None
forbidden_ids = OrderedSet()
if res:
forbidden, error_func = res
if forbidden == self:
return res
remaining -= forbidden
forbidden_ids.update(forbidden._ids)
elif not self:
return None
if operation in ('create', 'unlink'):
# check write operation instead of unlinking and creating for
# related models and field access
operation = 'write'
# collect the records to check (by model) # collect the records to check (by model)
model_ids = defaultdict(set) # {model_name: set(ids)} model_ids = defaultdict(set) # {model_name: set(ids)}
if self: att_model_ids = [] # [(att_id, (res_model, res_id))]
# DLE P173: `test_01_portal_attachment` # DLE P173: `test_01_portal_attachment`
self.env['ir.attachment'].flush_model(['res_model', 'res_id', 'create_uid', 'public', 'res_field']) remaining = remaining.sudo()
self._cr.execute('SELECT res_model, res_id, create_uid, public, res_field FROM ir_attachment WHERE id IN %s', [tuple(self.ids)]) remaining.fetch(SECURITY_FIELDS) # fetch only these fields
for res_model, res_id, create_uid, public, res_field in self._cr.fetchall(): for attachment in remaining:
if public and mode == 'read': if attachment.public and operation == 'read':
continue continue
att_id = attachment.id
res_model, res_id = attachment.res_model, attachment.res_id
if not self.env.is_system(): if not self.env.is_system():
if not res_id and create_uid != self.env.uid: if not res_id and attachment.create_uid.id != self.env.uid:
raise AccessError(_("Sorry, you are not allowed to access this document.")) forbidden_ids.add(att_id)
if res_field: continue
if res_field := attachment.res_field:
try:
field = self.env[res_model]._fields[res_field] field = self.env[res_model]._fields[res_field]
if not field.is_accessible(self.env): except KeyError:
raise AccessError(_("Sorry, you are not allowed to access this document.")) # field does not exist
if not (res_model and res_id): field = None
if field is None or not self._has_field_access(field, operation):
forbidden_ids.add(att_id)
continue continue
if res_model and res_id:
model_ids[res_model].add(res_id) model_ids[res_model].add(res_id)
if values and values.get('res_model') and values.get('res_id'): att_model_ids.append((att_id, (res_model, res_id)))
model_ids[values['res_model']].add(values['res_id']) forbidden_res_model_id = set(self._inaccessible_comodel_records(model_ids, operation))
forbidden_ids.update(att_id for att_id, res in att_model_ids if res in forbidden_res_model_id)
if forbidden_ids:
forbidden = self.browse(forbidden_ids)
forbidden.invalidate_recordset(SECURITY_FIELDS) # avoid cache pollution
if error_func is None:
def error_func():
return AccessError(self.env._(
"Sorry, you are not allowed to access this document. "
"Please contact your system administrator.\n\n"
"(Operation: %(operation)s)\n\n"
"Records: %(records)s, User: %(user)s",
operation=operation,
records=forbidden[:6],
user=self.env.uid,
))
return forbidden, error_func
return None
def _inaccessible_comodel_records(self, model_and_ids: dict[str, Collection[int]], operation: str):
# check access rights on the records # check access rights on the records
for res_model, res_ids in model_ids.items(): if self.env.su:
# ignore attachments that are not attached to a resource anymore return
# when checking access rights (resource was deleted but attachment for res_model, res_ids in model_and_ids.items():
# was not) res_ids = OrderedSet(filter(None, res_ids))
if res_model not in self.env: if not res_model or not res_ids:
# nothing to check
continue continue
if res_model == 'res.users' and len(res_ids) == 1 and self.env.uid == list(res_ids)[0]: # forbid access to attachments linked to removed models as we do not
# know what persmissions should be checked
if res_model not in self.env:
for res_id in res_ids:
yield res_model, res_id
continue
records = self.env[res_model].browse(res_ids)
if res_model == 'res.users' and len(records) == 1 and self.env.uid == records.id:
# by default a user cannot write on itself, despite the list of writeable fields # by default a user cannot write on itself, despite the list of writeable fields
# e.g. in the case of a user inserting an image into his image signature # e.g. in the case of a user inserting an image into his image signature
# we need to bypass this check which would needlessly throw us away # we need to bypass this check which would needlessly throw us away
continue continue
records = self.env[res_model].browse(res_ids).exists()
# For related models, check if we can write to the model, as unlinking
# and creating attachments can be seen as an update to the model
access_mode = 'write' if mode in ('create', 'unlink') else mode
records.check_access(access_mode)
@api.model
def _filter_attachment_access(self, attachment_ids):
"""Filter the given attachment to return only the records the current user have access to.
:param attachment_ids: List of attachment ids we want to filter
:return: <ir.attachment> the current user have access to
"""
ret_attachments = self.env['ir.attachment']
attachments = self.browse(attachment_ids)
if not attachments.has_access('read'):
return ret_attachments
for attachment in attachments.sudo():
# Use SUDO here to not raise an error during the prefetch
# And then drop SUDO right to check if we can access it
try: try:
attachment.sudo(False).check('read') records = records._filtered_access(operation)
ret_attachments |= attachment except MissingError:
except AccessError: records = records.exists()._filtered_access(operation)
continue res_ids.difference_update(records._ids)
return ret_attachments for res_id in res_ids:
yield res_model, res_id
@api.model @api.model
def _search(self, domain, offset=0, limit=None, order=None): def _search(self, domain, offset=0, limit=None, order=None, *, active_test=True, bypass_access=False):
# add res_field=False in domain if not present; the arg[0] trick below assert not self._active_name, "active name not supported on ir.attachment"
# works for domain items and '&'/'|'/'!' operators too
disable_binary_fields_attachments = False disable_binary_fields_attachments = False
if not self.env.context.get('skip_res_field_check') and not any(arg[0] in ('id', 'res_field') for arg in domain): domain = Domain(domain)
if (
not self.env.context.get('skip_res_field_check')
and not any(d.field_expr in ('id', 'res_field') for d in domain.iter_conditions())
):
disable_binary_fields_attachments = True disable_binary_fields_attachments = True
domain = [('res_field', '=', False)] + domain domain &= Domain('res_field', '=', False)
if self.env.is_superuser(): domain = domain.optimize(self)
# rules do not apply for the superuser if self.env.su or bypass_access or domain.is_false():
return super()._search(domain, offset, limit, order) return super()._search(domain, offset, limit, order, active_test=active_test, bypass_access=bypass_access)
# For attachments, the permissions of the document they are attached to # General access rules
# apply, so we must remove attachments for which the user cannot access # - public == True are always accessible
# the linked document. For the sake of performance, fetch the fields to sec_domain = Domain('public', '=', True)
# determine those permissions within the same SQL query. # - res_id == False needs to be system user or creator
fnames_to_read = ['id', 'res_model', 'res_id', 'res_field', 'public', 'create_uid'] res_ids = condition_values(self, 'res_id', domain)
query = super()._search(domain, offset, limit, order) if not res_ids or False in res_ids:
rows = self.env.execute_query(query.select( if self.env.is_system():
*[self._field_to_sql(self._table, fname) for fname in fnames_to_read], sec_domain |= Domain('res_id', '=', False)
else:
sec_domain |= Domain('res_id', '=', False) & Domain('create_uid', '=', self.env.uid)
# Search by res_model and res_id, filter using permissions from res_model
# - res_id != False needs then check access on the linked res_model record
# - res_field != False needs to check field access on the res_model
res_model_names = condition_values(self, 'res_model', domain)
if 0 < len(res_model_names or ()) <= 5:
env = self.with_context(active_test=False).env
for res_model_name in res_model_names:
comodel = env.get(res_model_name)
if comodel is None:
continue
codomain = Domain('res_model', '=', comodel._name)
comodel_res_ids = condition_values(self, 'res_id', domain.map_conditions(
lambda cond: codomain & cond if cond.field_expr == 'res_model' else cond
)) ))
query = comodel._search(Domain('id', 'in', comodel_res_ids) if comodel_res_ids else Domain.TRUE)
# determine permissions based on linked records if query.is_empty():
all_ids = []
allowed_ids = set()
model_attachments = defaultdict(lambda: defaultdict(set)) # {res_model: {res_id: set(ids)}}
for id_, res_model, res_id, res_field, public, create_uid in rows:
all_ids.append(id_)
if public:
allowed_ids.add(id_)
continue continue
if query.where_clause:
codomain &= Domain('res_id', 'in', query)
if not disable_binary_fields_attachments and not self.env.is_system():
accessible_fields = [
field.name
for field in comodel._fields.values()
if field.type == 'binary' or (field.relational and field.comodel_name == self._name)
if comodel._has_field_access(field, 'read')
]
accessible_fields.append(False)
codomain &= Domain('res_field', 'in', accessible_fields)
sec_domain |= codomain
if res_field and not self.env.is_system(): return super()._search(domain & sec_domain, offset, limit, order, active_test=active_test)
field = self.env[res_model]._fields[res_field]
if field.groups and not self.env.user.has_groups(field.groups):
continue
if not res_id and (self.env.is_system() or create_uid == self.env.uid): # We do not have a small restriction on res_model. We still need to
allowed_ids.add(id_) # support other queries such as: `('id', 'in' ...)`.
continue # Restrict with domain and add all attachments linked to a model.
if not (res_field and disable_binary_fields_attachments) and res_model and res_id: domain &= sec_domain | Domain('res_model', '!=', False)
model_attachments[res_model][res_id].add(id_) domain = domain.optimize_full(self)
ordered = bool(order)
# check permissions on records model by model if limit is None:
for res_model, targets in model_attachments.items(): records = self.sudo().with_context(active_test=False).search_fetch(
if res_model not in self.env: domain, SECURITY_FIELDS, order=order).sudo(False)
allowed_ids.update(id_ for ids in targets.values() for id_ in ids) return records._filtered_access('read')[offset:]._as_query(ordered)
continue # Fetch by small batches
if not self.env[res_model].has_access('read'): sub_offset = 0
continue limit += offset
# filter ids according to what access rules permit result = []
ResModel = self.env[res_model].with_context(active_test=False) if not ordered:
for res_id in ResModel.search([('id', 'in', list(targets))])._ids: # By default, order by model to batch access checks.
allowed_ids.update(targets[res_id]) order = 'res_model nulls first, id'
while len(result) < limit:
# filter out all_ids by keeping allowed_ids only records = self.sudo().with_context(active_test=False).search_fetch(
result = [id_ for id_ in all_ids if id_ in allowed_ids] domain,
SECURITY_FIELDS,
# If the original search reached the limit, it is important the offset=sub_offset,
# filtered record set does so too. When a JS view receive a limit=PREFETCH_MAX,
# record set whose length is below the limit, it thinks it order=order,
# reached the last page. To avoid an infinite recursion due to the ).sudo(False)
# permission checks the sub-call need to be aware of the number of result.extend(records._filtered_access('read')._ids)
# expected records to retrieve if len(records) < PREFETCH_MAX:
if len(all_ids) == limit and len(result) < self._context.get('need', limit): # There are no more records
need = self._context.get('need', limit) - len(result) break
more_ids = self.with_context(need=need)._search( sub_offset += PREFETCH_MAX
domain, offset + len(all_ids), limit, order, return self.browse(result[offset:limit])._as_query(ordered)
)
result.extend(list(more_ids)[:limit - len(result)])
return self.browse(result)._as_query(order)
def write(self, vals): def write(self, vals):
self.check('write', values=vals) self.check_access('write')
if vals.get('res_model') or vals.get('res_id'):
model_and_ids = defaultdict(OrderedSet)
if 'res_model' in vals and 'res_id' in vals:
model_and_ids[vals['res_model']].add(vals['res_id'])
else:
for record in self:
model_and_ids[vals.get('res_model', record.res_model)].add(vals.get('res_id', record.res_id))
if any(self._inaccessible_comodel_records(model_and_ids, 'write')):
raise AccessError(_("Sorry, you are not allowed to access this document."))
# remove computed field depending of datas # remove computed field depending of datas
for field in ('file_size', 'checksum', 'store_fname'): for field in ('file_size', 'checksum', 'store_fname'):
vals.pop(field, False) vals.pop(field, False)
if 'mimetype' in vals or 'datas' in vals or 'raw' in vals: if 'mimetype' in vals or 'datas' in vals or 'raw' in vals:
vals = self._check_contents(vals) vals = self._check_contents(vals)
return super(IrAttachment, self).write(vals) res = super().write(vals)
if 'url' in vals or 'type' in vals:
self._check_serving_attachments()
return res
def copy_data(self, default=None): def copy_data(self, default=None):
default = dict(default or {}) default = dict(default or {})
@ -609,16 +709,12 @@ class IrAttachment(models.Model):
return vals_list return vals_list
def unlink(self): def unlink(self):
if not self:
return True
self.check('unlink')
# First delete in the database, *then* in the filesystem if the # First delete in the database, *then* in the filesystem if the
# database allowed it. Helps avoid errors when concurrent transactions # database allowed it. Helps avoid errors when concurrent transactions
# are deleting the same file, and some of the transactions are # are deleting the same file, and some of the transactions are
# rolled back by PostgreSQL (due to concurrent updates detection). # rolled back by PostgreSQL (due to concurrent updates detection).
to_delete = set(attach.store_fname for attach in self if attach.store_fname) to_delete = OrderedSet(attach.store_fname for attach in self if attach.store_fname)
res = super(IrAttachment, self).unlink() res = super().unlink()
for file_path in to_delete: for file_path in to_delete:
self._file_delete(file_path) self._file_delete(file_path)
@ -635,6 +731,7 @@ class IrAttachment(models.Model):
in vals.items() in vals.items()
if key not in ('file_size', 'checksum', 'store_fname') if key not in ('file_size', 'checksum', 'store_fname')
} for vals in vals_list] } for vals in vals_list]
checksum_raw_map = {}
for values in vals_list: for values in vals_list:
values = self._check_contents(values) values = self._check_contents(values)
@ -643,10 +740,11 @@ class IrAttachment(models.Model):
if isinstance(raw, str): if isinstance(raw, str):
# b64decode handles str input but raw needs explicit encoding # b64decode handles str input but raw needs explicit encoding
raw = raw.encode() raw = raw.encode()
values.update(self._get_datas_related_values( elif not raw:
raw or base64.b64decode(datas or b''), raw = base64.b64decode(datas or b'')
values['mimetype'] values.update(self._get_datas_related_values(raw, values['mimetype']))
)) if raw:
checksum_raw_map[values['checksum']] = raw
# 'check()' only uses res_model and res_id from values, and make an exists. # 'check()' only uses res_model and res_id from values, and make an exists.
# We can group the values by model, res_id to make only one query when # We can group the values by model, res_id to make only one query when
@ -655,10 +753,17 @@ class IrAttachment(models.Model):
record_tuple_set.add(record_tuple) record_tuple_set.add(record_tuple)
# don't use possible contextual recordset for check, see commit for details # don't use possible contextual recordset for check, see commit for details
Attachments = self.browse() model_and_ids = defaultdict(set)
for res_model, res_id in record_tuple_set: for res_model, res_id in record_tuple_set:
Attachments.check('create', values={'res_model':res_model, 'res_id':res_id}) model_and_ids[res_model].add(res_id)
return super().create(vals_list) if any(self._inaccessible_comodel_records(model_and_ids, 'write')):
raise AccessError(_("Sorry, you are not allowed to access this document."))
records = super().create(vals_list)
if self._storage() != 'db':
for checksum, raw in checksum_raw_map.items():
self._file_write(raw, checksum)
records._check_serving_attachments()
return records
def _post_add_create(self, **kwargs): def _post_add_create(self, **kwargs):
# TODO master: rename to _post_upload, better indicating its usage # TODO master: rename to _post_upload, better indicating its usage
@ -675,6 +780,15 @@ class IrAttachment(models.Model):
tokens.append(access_token) tokens.append(access_token)
return tokens return tokens
def _get_raw_access_token(self):
"""Return a scoped access token for the `raw` field. The token can be
used with `ir_binary._find_record` to bypass access rights.
:rtype: str
"""
self.ensure_one()
return limited_field_access_token(self, "raw", scope="binary")
@api.model @api.model
def create_unique(self, values_list): def create_unique(self, values_list):
ids = [] ids = []
@ -703,28 +817,6 @@ class IrAttachment(models.Model):
def _generate_access_token(self): def _generate_access_token(self):
return str(uuid.uuid4()) return str(uuid.uuid4())
def validate_access(self, access_token):
self.ensure_one()
record_sudo = self.sudo()
if access_token:
tok = record_sudo.with_context(prefetch_fields=False).access_token
valid_token = consteq(tok or '', access_token)
if not valid_token:
raise AccessError("Invalid access token")
return record_sudo
if record_sudo.with_context(prefetch_fields=False).public:
return record_sudo
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check('read')
return record_sudo
return self
@api.model @api.model
def action_get(self): def action_get(self):
return self.env['ir.actions.act_window']._for_xml_id('base.action_attachment') return self.env['ir.actions.act_window']._for_xml_id('base.action_attachment')
@ -741,7 +833,7 @@ class IrAttachment(models.Model):
("url", "=like", "/web/assets/%"), ("url", "=like", "/web/assets/%"),
('res_model', '=', 'ir.ui.view'), ('res_model', '=', 'ir.ui.view'),
('res_id', '=', 0), ('res_id', '=', 0),
('create_uid', '=', SUPERUSER_ID), ('create_uid', '=', api.SUPERUSER_ID),
]).unlink() ]).unlink()
self.env.registry.clear_cache('assets') self.env.registry.clear_cache('assets')
@ -836,3 +928,18 @@ class IrAttachment(models.Model):
def _is_remote_source(self): def _is_remote_source(self):
self.ensure_one() self.ensure_one()
return self.url and not self.file_size and self.url.startswith(('http://', 'https://', 'ftp://')) return self.url and not self.file_size and self.url.startswith(('http://', 'https://', 'ftp://'))
def _can_return_content(self, field_name=None, access_token=None):
attachment_sudo = self.sudo().with_context(prefetch_fields=False)
if access_token:
if not consteq(attachment_sudo.access_token or "", access_token):
raise AccessError("Invalid access token") # pylint: disable=missing-gettext
return True
if attachment_sudo.public:
return True
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check_access('read')
return True
return super()._can_return_content(field_name, access_token)

View file

@ -1,13 +1,15 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
import collections
import inspect import inspect
import logging import logging
import warnings import random
import traceback import time
from odoo import api, models from odoo import api, models
from odoo.exceptions import AccessDenied from odoo.exceptions import AccessDenied
from odoo.modules.registry import _CACHES_BY_KEY
from odoo.tools import SQL
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -17,7 +19,7 @@ def is_autovacuum(func):
return callable(func) and getattr(func, '_autovacuum', False) return callable(func) and getattr(func, '_autovacuum', False)
class AutoVacuum(models.AbstractModel): class IrAutovacuum(models.AbstractModel):
""" Helper model to the ``@api.autovacuum`` method decorator. """ """ Helper model to the ``@api.autovacuum`` method decorator. """
_name = 'ir.autovacuum' _name = 'ir.autovacuum'
_description = 'Automatic Vacuum' _description = 'Automatic Vacuum'
@ -27,16 +29,47 @@ class AutoVacuum(models.AbstractModel):
Perform a complete database cleanup by safely calling every Perform a complete database cleanup by safely calling every
``@api.autovacuum`` decorated method. ``@api.autovacuum`` decorated method.
""" """
if not self.env.is_admin(): if not self.env.is_admin() or not self.env.context.get('cron_id'):
raise AccessDenied() raise AccessDenied()
for model in self.env.values(): all_methods = [
cls = self.env.registry[model._name] (model, attr, func)
for attr, func in inspect.getmembers(cls, is_autovacuum): for model in self.env.values()
for attr, func in inspect.getmembers(model.__class__, is_autovacuum)
]
# shuffle methods at each run, prevents one blocking method from always
# starving the following ones
random.shuffle(all_methods)
queue = collections.deque(all_methods)
while queue and self.env['ir.cron']._commit_progress(remaining=len(queue)):
model, attr, func = queue.pop()
_logger.debug('Calling %s.%s()', model, attr) _logger.debug('Calling %s.%s()', model, attr)
try: try:
func(model) start_time = time.monotonic()
self.env.cr.commit() result = func(model)
self.env['ir.cron']._commit_progress(1)
if isinstance(result, tuple) and len(result) == 2:
func_done, func_remaining = result
_logger.debug(
'%s.%s vacuumed %r records, remaining %r',
model, attr, func_done, func_remaining,
)
if func_remaining:
queue.appendleft((model, attr, func))
_logger.debug("%s.%s took %.2fs", model, attr, time.monotonic() - start_time)
except Exception: except Exception:
_logger.exception("Failed %s.%s()", model, attr) _logger.exception("Failed %s.%s()", model, attr)
self.env.cr.rollback() self.env.cr.rollback()
@api.autovacuum
def _gc_orm_signaling(self):
for signal in ['registry', *_CACHES_BY_KEY]:
table = f'orm_signaling_{signal}'
# keep the last 10 entries for each signal, and all entries from the last
# hour. This keeps the signaling tables small enough for performance, but
# also gives a useful glimpse into the recent signaling history, including
# the timestamps of the increments.
self.env.cr.execute(SQL(
"DELETE FROM %s WHERE id < (SELECT max(id)-9 FROM %s) AND date < NOW() - interval '1 hours'",
SQL.identifier(table), SQL.identifier(table)
))

View file

@ -4,7 +4,7 @@ from datetime import datetime
from mimetypes import guess_extension from mimetypes import guess_extension
from odoo import models from odoo import models
from odoo.exceptions import MissingError, UserError from odoo.exceptions import AccessError, MissingError, UserError
from odoo.http import Stream, request from odoo.http import Stream, request
from odoo.tools import file_open, replace_exceptions from odoo.tools import file_open, replace_exceptions
from odoo.tools.image import image_process, image_guess_size_from_field_name from odoo.tools.image import image_process, image_guess_size_from_field_name
@ -36,6 +36,7 @@ class IrBinary(models.AbstractModel):
:param Optional[id] res_id: id of the record :param Optional[id] res_id: id of the record
:param Optional[str] access_token: access token to use instead :param Optional[str] access_token: access token to use instead
of the access rights and access rules. of the access rights and access rules.
:param Optional[str] field: image field name to check the access to
:returns: single record :returns: single record
:raises MissingError: when no record was found. :raises MissingError: when no record was found.
""" """
@ -45,17 +46,12 @@ class IrBinary(models.AbstractModel):
elif res_id is not None and res_model in self.env: elif res_id is not None and res_model in self.env:
record = self.env[res_model].browse(res_id).exists() record = self.env[res_model].browse(res_id).exists()
if not record: if not record:
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}") raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}") # pylint: disable=missing-gettext
if access_token and verify_limited_field_access_token(record, field, access_token): if access_token and verify_limited_field_access_token(record, field, access_token, scope="binary"):
return record.sudo() return record.sudo()
record = self._find_record_check_access(record, access_token, field) if record._can_return_content(field, access_token):
return record return record.sudo()
record.check_access("read")
def _find_record_check_access(self, record, access_token, field):
if record._name == 'ir.attachment':
return record.validate_access(access_token)
record.check_access('read')
return record return record
def _record_to_stream(self, record, field_name): def _record_to_stream(self, record, field_name):
@ -73,16 +69,17 @@ class IrBinary(models.AbstractModel):
if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'): if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'):
return record._to_http_stream() return record._to_http_stream()
record.check_field_access_rights('read', [field_name]) field = record._fields[field_name]
record._check_field_access(field, 'read')
if record._fields[field_name].attachment: if field.attachment:
field_attachment = self.env['ir.attachment'].sudo().search( field_attachment = self.env['ir.attachment'].sudo().search(
domain=[('res_model', '=', record._name), domain=[('res_model', '=', record._name),
('res_id', '=', record.id), ('res_id', '=', record.id),
('res_field', '=', field_name)], ('res_field', '=', field_name)],
limit=1) limit=1)
if not field_attachment: if not field_attachment:
raise MissingError("The related attachment does not exist.") raise MissingError(self.env._("The related attachment does not exist."))
return field_attachment._to_http_stream() return field_attachment._to_http_stream()
return Stream.from_binary_field(record, field_name) return Stream.from_binary_field(record, field_name)
@ -111,15 +108,15 @@ class IrBinary(models.AbstractModel):
``application/octet-stream``. ``application/octet-stream``.
:rtype: odoo.http.Stream :rtype: odoo.http.Stream
""" """
with replace_exceptions(ValueError, by=UserError(f'Expected singleton: {record}')): with replace_exceptions(ValueError, by=UserError(f'Expected singleton: {record}')): # pylint: disable=missing-gettext
record.ensure_one() record.ensure_one()
try: try:
field_def = record._fields[field_name] field_def = record._fields[field_name]
except KeyError: except KeyError:
raise UserError(f"Record has no field {field_name!r}.") raise UserError(f"Record has no field {field_name!r}.") # pylint: disable=missing-gettext
if field_def.type != 'binary': if field_def.type != 'binary':
raise UserError( raise UserError( # pylint: disable=missing-gettext
f"Field {field_def!r} is type {field_def.type!r} but " f"Field {field_def!r} is type {field_def.type!r} but "
f"it is only possible to stream Binary or Image fields." f"it is only possible to stream Binary or Image fields."
) )

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
""" """
Store database-specific configuration parameters Store database-specific configuration parameters
@ -7,7 +6,7 @@ Store database-specific configuration parameters
import uuid import uuid
import logging import logging
from odoo import api, fields, models, _ from odoo import api, fields, models
from odoo.exceptions import ValidationError from odoo.exceptions import ValidationError
from odoo.tools import config, ormcache, mute_logger from odoo.tools import config, ormcache, mute_logger
@ -26,7 +25,7 @@ _default_parameters = {
} }
class IrConfigParameter(models.Model): class IrConfig_Parameter(models.Model):
"""Per-database storage of configuration key-value pairs.""" """Per-database storage of configuration key-value pairs."""
_name = 'ir.config_parameter' _name = 'ir.config_parameter'
_description = 'System Parameter' _description = 'System Parameter'
@ -37,9 +36,10 @@ class IrConfigParameter(models.Model):
key = fields.Char(required=True) key = fields.Char(required=True)
value = fields.Text(required=True) value = fields.Text(required=True)
_sql_constraints = [ _key_uniq = models.Constraint(
('key_uniq', 'unique (key)', 'Key must be unique.') 'unique (key)',
] "Key must be unique.",
)
@mute_logger('odoo.addons.base.models.ir_config_parameter') @mute_logger('odoo.addons.base.models.ir_config_parameter')
def init(self, force=False): def init(self, force=False):
@ -69,7 +69,7 @@ class IrConfigParameter(models.Model):
return self._get_param(key) or default return self._get_param(key) or default
@api.model @api.model
@ormcache('key') @ormcache('key', cache='stable')
def _get_param(self, key): def _get_param(self, key):
# we bypass the ORM because get_param() is used in some field's depends, # we bypass the ORM because get_param() is used in some field's depends,
# and must therefore work even when the ORM is not ready to work # and must therefore work even when the ORM is not ready to work
@ -104,22 +104,22 @@ class IrConfigParameter(models.Model):
@api.model_create_multi @api.model_create_multi
def create(self, vals_list): def create(self, vals_list):
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return super(IrConfigParameter, self).create(vals_list) return super().create(vals_list)
def write(self, vals): def write(self, vals):
if 'key' in vals: if 'key' in vals:
illegal = _default_parameters.keys() & self.mapped('key') illegal = _default_parameters.keys() & self.mapped('key')
if illegal: if illegal:
raise ValidationError(_("You cannot rename config parameters with keys %s", ', '.join(illegal))) raise ValidationError(self.env._("You cannot rename config parameters with keys %s", ', '.join(illegal)))
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return super(IrConfigParameter, self).write(vals) return super().write(vals)
def unlink(self): def unlink(self):
self.env.registry.clear_cache() self.env.registry.clear_cache('stable')
return super(IrConfigParameter, self).unlink() return super().unlink()
@api.ondelete(at_uninstall=False) @api.ondelete(at_uninstall=False)
def unlink_default_parameters(self): def unlink_default_parameters(self):
for record in self.filtered(lambda p: p.key in _default_parameters.keys()): for record in self.filtered(lambda p: p.key in _default_parameters.keys()):
raise ValidationError(_("You cannot delete the %s record.", record.key)) raise ValidationError(self.env._("You cannot delete the %s record.", record.key))

View file

@ -1,25 +1,32 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details. from __future__ import annotations
import logging import logging
import threading import threading
import time import time
import os import os
import psycopg2 import psycopg2
import psycopg2.errors import psycopg2.errors
import pytz import typing
from datetime import datetime, timedelta from datetime import datetime, timedelta, timezone
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
import odoo from odoo import api, fields, models, sql_db
from odoo import api, fields, models, _ from odoo.exceptions import LockError, UserError
from odoo.exceptions import UserError from odoo.modules import Manifest
from odoo.modules.registry import Registry from odoo.modules.registry import Registry
from odoo.tools import SQL from odoo.tools import SQL
from odoo.tools.constants import GC_UNLINK_LIMIT
if typing.TYPE_CHECKING:
from collections.abc import Iterable
from odoo.sql_db import BaseCursor
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
BASE_VERSION = odoo.modules.get_manifest('base')['version'] BASE_VERSION = Manifest.for_addon('base')['version']
MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice
MAX_BATCH_PER_CRON_JOB = 10 MIN_RUNS_PER_JOB = 10
MIN_TIME_PER_JOB = 10 # seconds
CONSECUTIVE_TIMEOUT_FOR_FAILURE = 3 CONSECUTIVE_TIMEOUT_FOR_FAILURE = 3
MIN_FAILURE_COUNT_BEFORE_DEACTIVATION = 5 MIN_FAILURE_COUNT_BEFORE_DEACTIVATION = 5
MIN_DELTA_BEFORE_DEACTIVATION = timedelta(days=7) MIN_DELTA_BEFORE_DEACTIVATION = timedelta(days=7)
@ -32,6 +39,7 @@ ODOO_NOTIFY_FUNCTION = os.getenv('ODOO_NOTIFY_FUNCTION', 'pg_notify')
class BadVersion(Exception): class BadVersion(Exception):
pass pass
class BadModuleState(Exception): class BadModuleState(Exception):
pass pass
@ -51,7 +59,7 @@ class CompletionStatus: # inherit from enum.StrEnum in 3.11
FAILED = 'failed' FAILED = 'failed'
class ir_cron(models.Model): class IrCron(models.Model):
""" Model describing cron jobs (also called actions or tasks). """ Model describing cron jobs (also called actions or tasks).
""" """
@ -59,19 +67,20 @@ class ir_cron(models.Model):
# that would cause database wake-up even if the database has not been # that would cause database wake-up even if the database has not been
# loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something) # loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something)
# See also odoo.cron # See also odoo.cron
_name = 'ir.cron'
_name = "ir.cron" _order = 'cron_name, id'
_order = 'cron_name'
_description = 'Scheduled Actions' _description = 'Scheduled Actions'
_allow_sudo_commands = False _allow_sudo_commands = False
_inherits = {'ir.actions.server': 'ir_actions_server_id'}
ir_actions_server_id = fields.Many2one( ir_actions_server_id = fields.Many2one(
'ir.actions.server', 'Server action', 'ir.actions.server', 'Server action', index=True,
delegate=True, ondelete='restrict', required=True) delegate=True, ondelete='restrict', required=True)
cron_name = fields.Char('Name', compute='_compute_cron_name', store=True) cron_name = fields.Char('Name', compute='_compute_cron_name', store=True)
user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True) user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True)
active = fields.Boolean(default=True) active = fields.Boolean(default=True)
interval_number = fields.Integer(default=1, aggregator=None, help="Repeat every x.", required=True) interval_number = fields.Integer(default=1, help="Repeat every x.", required=True, aggregator='avg')
interval_type = fields.Selection([('minutes', 'Minutes'), interval_type = fields.Selection([('minutes', 'Minutes'),
('hours', 'Hours'), ('hours', 'Hours'),
('days', 'Days'), ('days', 'Days'),
@ -83,13 +92,10 @@ class ir_cron(models.Model):
failure_count = fields.Integer(default=0, help="The number of consecutive failures of this job. It is automatically reset on success.") failure_count = fields.Integer(default=0, help="The number of consecutive failures of this job. It is automatically reset on success.")
first_failure_date = fields.Datetime(string='First Failure Date', help="The first time the cron failed. It is automatically reset on success.") first_failure_date = fields.Datetime(string='First Failure Date', help="The first time the cron failed. It is automatically reset on success.")
_sql_constraints = [ _check_strictly_positive_interval = models.Constraint(
(
'check_strictly_positive_interval',
'CHECK(interval_number > 0)', 'CHECK(interval_number > 0)',
'The interval number must be a strictly positive number.' "The interval number must be a strictly positive number.",
), )
]
@api.depends('ir_actions_server_id.name') @api.depends('ir_actions_server_id.name')
def _compute_cron_name(self): def _compute_cron_name(self):
@ -101,57 +107,51 @@ class ir_cron(models.Model):
for vals in vals_list: for vals in vals_list:
vals['usage'] = 'ir_cron' vals['usage'] = 'ir_cron'
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'): if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb) self.env.cr.postcommit.add(self._notifydb)
return super().create(vals_list) return super().create(vals_list)
@api.model @api.model
def default_get(self, fields_list): def default_get(self, fields):
# only 'code' state is supported for cron job so set it as default # only 'code' state is supported for cron job so set it as default
if not self._context.get('default_state'): model = self
self = self.with_context(default_state='code') if not model.env.context.get('default_state'):
return super(ir_cron, self).default_get(fields_list) model = model.with_context(default_state='code')
return super(IrCron, model).default_get(fields)
def method_direct_trigger(self): def method_direct_trigger(self):
"""Run the CRON job in the current (HTTP) thread.
The job is still ran as it would be by the scheduler: a new cursor
is used for the execution of the job.
:raises UserError: when the job is already running
"""
self.ensure_one() self.ensure_one()
self.browse().check_access('write') self.browse().check_access('write')
self._try_lock() # cron will be run in a separate transaction, flush before and
_logger.info('Job %r (%s) started manually', self.name, self.id) # invalidate because data will be changed by that transaction
self, _ = self.with_user(self.user_id).with_context({'lastcall': self.lastcall})._add_progress() # noqa: PLW0642 self.env.invalidate_all(flush=True)
self.ir_actions_server_id.run() cron_cr = self.env.cr
self.lastcall = fields.Datetime.now() job = self._acquire_one_job(cron_cr, self.id, include_not_ready=True)
self.env.flush_all() if not job:
_logger.info('Job %r (%s) done', self.name, self.id) raise UserError(self.env._("Job '%s' already executing", self.name))
self._process_job(cron_cr, job)
return True return True
@classmethod @staticmethod
def _process_jobs(cls, db_name): def _process_jobs(db_name: str) -> None:
""" Execute every job ready to be run on this database. """ """ Execute every job ready to be run on this database. """
try: try:
db = odoo.sql_db.db_connect(db_name) db = sql_db.db_connect(db_name)
threading.current_thread().dbname = db_name threading.current_thread().dbname = db_name
with db.cursor() as cron_cr: with db.cursor() as cron_cr:
cls = IrCron
cls._check_version(cron_cr) cls._check_version(cron_cr)
jobs = cls._get_all_ready_jobs(cron_cr) jobs = cls._get_all_ready_jobs(cron_cr)
if not jobs: if not jobs:
return return
cls._check_modules_state(cron_cr, jobs) cls._check_modules_state(cron_cr, jobs)
cls._process_jobs_loop(cron_cr, job_ids=[job['id'] for job in jobs])
for job_id in (job['id'] for job in jobs):
try:
job = cls._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
continue
if not job:
_logger.debug("another worker is processing job %s, skip", job_id)
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = Registry(db_name).check_signaling()
registry[cls._name]._process_job(db, cron_cr, job)
_logger.debug("job %s updated and released", job_id)
except BadVersion: except BadVersion:
_logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION) _logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION)
except BadModuleState: except BadModuleState:
@ -159,7 +159,7 @@ class ir_cron(models.Model):
except psycopg2.errors.UndefinedTable: except psycopg2.errors.UndefinedTable:
# The table ir_cron does not exist; this is probably not an OpenERP database. # The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name) _logger.warning('Tried to poll an undefined table on database %s.', db_name)
except psycopg2.ProgrammingError as e: except psycopg2.ProgrammingError:
raise raise
except Exception: except Exception:
_logger.warning('Exception in cron:', exc_info=True) _logger.warning('Exception in cron:', exc_info=True)
@ -167,8 +167,33 @@ class ir_cron(models.Model):
if hasattr(threading.current_thread(), 'dbname'): if hasattr(threading.current_thread(), 'dbname'):
del threading.current_thread().dbname del threading.current_thread().dbname
@classmethod @staticmethod
def _check_version(cls, cron_cr): def _process_jobs_loop(cron_cr: BaseCursor, *, job_ids: Iterable[int] = ()):
""" Process ready jobs to run on this database.
The `cron_cr` is used to lock the currently processed job and relased
by committing after each job.
"""
db_name = cron_cr.dbname
for job_id in job_ids:
try:
job = IrCron._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
continue
if not job:
_logger.debug("job %s is being processed by another worker, skip", job_id)
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = Registry(db_name)
registry[IrCron._name]._process_job(cron_cr, job)
cron_cr.commit()
_logger.debug("job %s updated and released", job_id)
@staticmethod
def _check_version(cron_cr):
""" Ensure the code version matches the database version """ """ Ensure the code version matches the database version """
cron_cr.execute(""" cron_cr.execute("""
SELECT latest_version SELECT latest_version
@ -181,8 +206,8 @@ class ir_cron(models.Model):
if version != BASE_VERSION: if version != BASE_VERSION:
raise BadVersion() raise BadVersion()
@classmethod @staticmethod
def _check_modules_state(cls, cr, jobs): def _check_modules_state(cr, jobs):
""" Ensure no module is installing or upgrading """ """ Ensure no module is installing or upgrading """
cr.execute(""" cr.execute("""
SELECT COUNT(*) SELECT COUNT(*)
@ -196,10 +221,10 @@ class ir_cron(models.Model):
if not jobs: if not jobs:
raise BadModuleState() raise BadModuleState()
oldest = min([ # use the max(job['nextcall'], job['write_date']) to avoid the cron
fields.Datetime.from_string(job['nextcall']) # reset_module_state for an ongoing module installation process
for job in jobs # right after installing a module with an old 'nextcall' cron in data
]) oldest = min(max(job['nextcall'], job['write_date'] or job['nextcall']) for job in jobs)
if datetime.now() - oldest < MAX_FAIL_TIME: if datetime.now() - oldest < MAX_FAIL_TIME:
raise BadModuleState() raise BadModuleState()
@ -207,28 +232,35 @@ class ir_cron(models.Model):
# per minute for 5h) in which case we assume that the crons are stuck # per minute for 5h) in which case we assume that the crons are stuck
# because the db has zombie states and we force a call to # because the db has zombie states and we force a call to
# reset_module_states. # reset_module_states.
odoo.modules.reset_modules_state(cr.dbname) from odoo.modules.loading import reset_modules_state # noqa: PLC0415
reset_modules_state(cr.dbname)
@classmethod @staticmethod
def _get_all_ready_jobs(cls, cr): def _get_ready_sql_condition(cr: BaseCursor) -> SQL:
""" Return a list of all jobs that are ready to be executed """ return SQL("""
cr.execute(""" active IS TRUE
SELECT * AND (nextcall <= %(now)s
FROM ir_cron OR id IN (
WHERE active = true
AND (nextcall <= (now() at time zone 'UTC')
OR id in (
SELECT cron_id SELECT cron_id
FROM ir_cron_trigger FROM ir_cron_trigger
WHERE call_at <= (now() at time zone 'UTC') WHERE call_at <= %(now)s
) )
) )
""", now=cr.now())
@staticmethod
def _get_all_ready_jobs(cr: BaseCursor) -> list[dict]:
""" Return a list of all jobs that are ready to be executed """
cr.execute(SQL("""
SELECT *
FROM ir_cron
WHERE %s
ORDER BY failure_count, priority, id ORDER BY failure_count, priority, id
""") """, IrCron._get_ready_sql_condition(cr)))
return cr.dictfetchall() return cr.dictfetchall()
@classmethod @staticmethod
def _acquire_one_job(cls, cr, job_id): def _acquire_one_job(cr: BaseCursor, job_id: int, *, include_not_ready: bool = False) -> dict | None:
""" """
Acquire for update the job with id ``job_id``. Acquire for update the job with id ``job_id``.
@ -270,32 +302,25 @@ class ir_cron(models.Model):
# #
# Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS # Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
query = """ where_clause = SQL("id = %s", job_id)
if not include_not_ready:
where_clause = SQL("%s AND %s", where_clause, IrCron._get_ready_sql_condition(cr))
query = SQL("""
WITH last_cron_progress AS ( WITH last_cron_progress AS (
SELECT id as progress_id, cron_id, timed_out_counter, done, remaining SELECT id as progress_id, cron_id, timed_out_counter, done, remaining
FROM ir_cron_progress FROM ir_cron_progress
WHERE cron_id = %s WHERE cron_id = %(cron_id)s
ORDER BY id DESC ORDER BY id DESC
LIMIT 1 LIMIT 1
) )
SELECT * SELECT *
FROM ir_cron FROM ir_cron
LEFT JOIN last_cron_progress lcp ON lcp.cron_id = ir_cron.id LEFT JOIN last_cron_progress lcp ON lcp.cron_id = ir_cron.id
WHERE ir_cron.active = true WHERE %(where)s
AND (nextcall <= (now() at time zone 'UTC')
OR EXISTS (
SELECT cron_id
FROM ir_cron_trigger
WHERE call_at <= (now() at time zone 'UTC')
AND cron_id = ir_cron.id
)
)
AND id = %s
ORDER BY priority
FOR NO KEY UPDATE SKIP LOCKED FOR NO KEY UPDATE SKIP LOCKED
""" """, cron_id=job_id, where=where_clause)
try: try:
cr.execute(query, [job_id, job_id], log_exceptions=False) cr.execute(query, log_exceptions=False)
except psycopg2.extensions.TransactionRollbackError: except psycopg2.extensions.TransactionRollbackError:
# A serialization error can occur when another cron worker # A serialization error can occur when another cron worker
# commits the new `nextcall` value of a cron it just ran and # commits the new `nextcall` value of a cron it just ran and
@ -326,7 +351,7 @@ class ir_cron(models.Model):
_logger.warning(message) _logger.warning(message)
@classmethod @classmethod
def _process_job(cls, db, cron_cr, job): def _process_job(cls, cron_cr: BaseCursor, job) -> None:
""" """
Execute the cron's server action in a dedicated transaction. Execute the cron's server action in a dedicated transaction.
@ -335,10 +360,8 @@ class ir_cron(models.Model):
``'failed'``. ``'failed'``.
The server action can use the progress API via the method The server action can use the progress API via the method
:meth:`_notify_progress` to report processing progress, i.e. how :meth:`_commit_progress` to report how many records are done
many records are done and how many records are remaining to in each batch.
process.
Those progress notifications are used to determine the job's Those progress notifications are used to determine the job's
``CompletionStatus`` and to determine the next time the cron ``CompletionStatus`` and to determine the next time the cron
will be executed: will be executed:
@ -358,6 +381,7 @@ class ir_cron(models.Model):
env = api.Environment(cron_cr, job['user_id'], {}) env = api.Environment(cron_cr, job['user_id'], {})
ir_cron = env[cls._name] ir_cron = env[cls._name]
ir_cron._clear_schedule(job)
failed_by_timeout = ( failed_by_timeout = (
job['timed_out_counter'] >= CONSECUTIVE_TIMEOUT_FOR_FAILURE job['timed_out_counter'] >= CONSECUTIVE_TIMEOUT_FOR_FAILURE
and not job['done'] and not job['done']
@ -383,12 +407,10 @@ class ir_cron(models.Model):
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'): if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
cron_cr.postcommit.add(ir_cron._notifydb) # See: `_notifydb` cron_cr.postcommit.add(ir_cron._notifydb) # See: `_notifydb`
else: else:
raise RuntimeError("unreachable") raise RuntimeError(f"unreachable {status=}")
cron_cr.commit()
@classmethod @classmethod
def _run_job(cls, job): def _run_job(cls, job) -> CompletionStatus:
""" """
Execute the job's server action multiple times until it Execute the job's server action multiple times until it
completes. The completion status is returned. completes. The completion status is returned.
@ -411,20 +433,33 @@ class ir_cron(models.Model):
timed_out_counter = job['timed_out_counter'] timed_out_counter = job['timed_out_counter']
with cls.pool.cursor() as job_cr: with cls.pool.cursor() as job_cr:
start_time = time.monotonic()
env = api.Environment(job_cr, job['user_id'], { env = api.Environment(job_cr, job['user_id'], {
'lastcall': job['lastcall'], 'lastcall': job['lastcall'],
'cron_id': job['id'], 'cron_id': job['id'],
'cron_end_time': start_time + MIN_TIME_PER_JOB,
}) })
cron = env[cls._name].browse(job['id']) cron = env[cls._name].browse(job['id'])
status = None status = None
for i in range(MAX_BATCH_PER_CRON_JOB): loop_count = 0
_logger.info('Job %r (%s) starting', job['cron_name'], job['id'])
# stop after MIN_RUNS_PER_JOB runs and MIN_TIME_PER_JOB seconds, or
# upon full completion or failure
while (
loop_count < MIN_RUNS_PER_JOB
or time.monotonic() < env.context['cron_end_time']
):
cron, progress = cron._add_progress(timed_out_counter=timed_out_counter) cron, progress = cron._add_progress(timed_out_counter=timed_out_counter)
job_cr.commit() job_cr.commit()
try: try:
# signaling check and commit is done inside `_callback`
cron._callback(job['cron_name'], job['ir_actions_server_id']) cron._callback(job['cron_name'], job['ir_actions_server_id'])
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_logger.exception('Job %r (%s) server action #%s failed',
job['cron_name'], job['id'], job['ir_actions_server_id'])
if progress.done and progress.remaining: if progress.done and progress.remaining:
# we do not consider it a failure if some progress has # we do not consider it a failure if some progress has
# been committed # been committed
@ -433,28 +468,38 @@ class ir_cron(models.Model):
status = CompletionStatus.FAILED status = CompletionStatus.FAILED
else: else:
if not progress.remaining: if not progress.remaining:
status = CompletionStatus.FULLY_DONE
elif not progress.done:
# assume the server action doesn't use the progress API # assume the server action doesn't use the progress API
# and that there is nothing left to process # and that there is nothing left to process
status = CompletionStatus.FULLY_DONE status = CompletionStatus.FULLY_DONE
else: else:
status = CompletionStatus.PARTIALLY_DONE status = CompletionStatus.PARTIALLY_DONE
if not progress.done:
break
if status == CompletionStatus.FULLY_DONE and progress.deactivate: if status == CompletionStatus.FULLY_DONE and progress.deactivate:
job['active'] = False job['active'] = False
finally: finally:
done, remaining = progress.done, progress.remaining
loop_count += 1
progress.timed_out_counter = 0 progress.timed_out_counter = 0
timed_out_counter = 0 timed_out_counter = 0
job_cr.commit() job_cr.commit() # ensure we have no leftovers
_logger.info('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], progress.done, progress.remaining) _logger.debug('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], done, remaining)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED): if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
break break
_logger.info(
'Job %r (%s) %s (#loop %s; done %s; remaining %s; duration %.2fs)',
job['cron_name'], job['id'], status,
loop_count, done, remaining, time.monotonic() - start_time)
return status return status
def _update_failure_count(self, job, status): @api.model
def _update_failure_count(self, job: dict, status: CompletionStatus) -> None:
""" """
Update cron ``failure_count`` and ``first_failure_date`` given Update cron ``failure_count`` and ``first_failure_date`` given
the job's completion status. Deactivate the cron when BOTH the the job's completion status. Deactivate the cron when BOTH the
@ -469,26 +514,25 @@ class ir_cron(models.Model):
reached, ``active`` is set to ``False`` and both values are reached, ``active`` is set to ``False`` and both values are
reset. reset.
""" """
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
if status == CompletionStatus.FAILED: if status == CompletionStatus.FAILED:
now = self.env.cr.now().replace(microsecond=0)
failure_count = job['failure_count'] + 1 failure_count = job['failure_count'] + 1
first_failure_date = job['first_failure_date'] or now first_failure_date = job['first_failure_date'] or now
active = job['active'] active = job['active']
if ( if (
failure_count >= MIN_FAILURE_COUNT_BEFORE_DEACTIVATION failure_count >= MIN_FAILURE_COUNT_BEFORE_DEACTIVATION
and fields.Datetime.context_timestamp(self, first_failure_date) + MIN_DELTA_BEFORE_DEACTIVATION < now and first_failure_date + MIN_DELTA_BEFORE_DEACTIVATION < now
): ):
failure_count = 0 failure_count = 0
first_failure_date = None first_failure_date = None
active = False active = False
self._notify_admin(_( self._notify_admin(self.env._(
"Cron job %(name)s (%(id)s) has been deactivated after failing %(count)s times. " "Cron job %(name)s (%(id)s) has been deactivated after failing %(count)s times. "
"More information can be found in the server logs around %(time)s.", "More information can be found in the server logs around %(time)s.",
name=repr(job['cron_name']), name=repr(job['cron_name']),
id=job['id'], id=job['id'],
count=MIN_FAILURE_COUNT_BEFORE_DEACTIVATION, count=MIN_FAILURE_COUNT_BEFORE_DEACTIVATION,
time=datetime.replace(datetime.utcnow(), microsecond=0), time=now,
)) ))
else: else:
failure_count = 0 failure_count = 0
@ -508,44 +552,52 @@ class ir_cron(models.Model):
job['id'], job['id'],
]) ])
def _reschedule_later(self, job): @api.model
def _clear_schedule(self, job):
"""Remove triggers for the given job."""
now = self.env.cr.now().replace(microsecond=0)
self.env.cr.execute("""
DELETE FROM ir_cron_trigger
WHERE cron_id = %s
AND call_at <= %s
""", [job['id'], now])
@api.model
def _reschedule_later(self, job: dict) -> None:
""" """
Reschedule the job to be executed later, after its regular Reschedule the job to be executed later, after its regular
interval or upon a trigger. interval or upon a trigger.
""" """
# Use the user's timezone to compare and compute datetimes, otherwise unexpected results may appear. now = self.env.cr.now().replace(microsecond=0)
# For instance, adding 1 month in UTC to July 1st at midnight in GMT+2 gives July 30 instead of August 1st! nextcall = job['nextcall']
now = fields.Datetime.context_timestamp(self, datetime.utcnow()) # Use the timezone of the user when adding the interval. When adding a
nextcall = fields.Datetime.context_timestamp(self, job['nextcall']) # day or more, the user may want to keep the same hour each day.
# The interval won't be fixed, but the hour will stay the same,
# even when changing DST.
interval = _intervalTypes[job['interval_type']](job['interval_number']) interval = _intervalTypes[job['interval_type']](job['interval_number'])
while nextcall <= now: while nextcall <= now:
nextcall = fields.Datetime.context_timestamp(self, nextcall)
nextcall += interval nextcall += interval
nextcall = nextcall.astimezone(timezone.utc).replace(tzinfo=None)
_logger.info('Job %r (%s) completed', job['cron_name'], job['id'])
self.env.cr.execute(""" self.env.cr.execute("""
UPDATE ir_cron UPDATE ir_cron
SET nextcall = %s, SET nextcall = %s,
lastcall = %s lastcall = %s
WHERE id = %s WHERE id = %s
""", [ """, [nextcall, now, job['id']])
fields.Datetime.to_string(nextcall.astimezone(pytz.UTC)),
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['id'],
])
self.env.cr.execute(""" @api.model
DELETE FROM ir_cron_trigger def _reschedule_asap(self, job: dict) -> None:
WHERE cron_id = %s
AND call_at < (now() at time zone 'UTC')
""", [job['id']])
def _reschedule_asap(self, job):
""" """
Reschedule the job to be executed ASAP, after the other cron Reschedule the job to be executed ASAP, after the other cron
jobs had a chance to run. jobs had a chance to run.
""" """
# leave the existing nextcall and triggers, this leave the job "ready" now = self.env.cr.now().replace(microsecond=0)
pass self.env.cr.execute("""
INSERT INTO ir_cron_trigger(call_at, cron_id)
VALUES (%s, %s)
""", [now, job['id']])
def _callback(self, cron_name, server_action_id): def _callback(self, cron_name, server_action_id):
""" Run the method associated to a given job. It takes care of logging """ Run the method associated to a given job. It takes care of logging
@ -555,81 +607,47 @@ class ir_cron(models.Model):
try: try:
if self.pool != self.pool.check_signaling(): if self.pool != self.pool.check_signaling():
# the registry has changed, reload self in the new registry # the registry has changed, reload self in the new registry
self.env.reset() self.env.transaction.reset()
self = self.env()[self._name]
_logger.debug( _logger.debug(
"cron.object.execute(%r, %d, '*', %r, %d)", "cron.object.execute(%r, %d, '*', %r, %d)",
self.env.cr.dbname, self.env.cr.dbname,
self._uid, self.env.uid,
cron_name, cron_name,
server_action_id, server_action_id,
) )
_logger.info('Job %r (%s) starting', cron_name, self.id)
start_time = time.time()
self.env['ir.actions.server'].browse(server_action_id).run() self.env['ir.actions.server'].browse(server_action_id).run()
self.env.flush_all() self.env.flush_all()
end_time = time.time()
_logger.info('Job %r (%s) done in %.3fs', cron_name, self.id, end_time - start_time)
if start_time and _logger.isEnabledFor(logging.DEBUG):
_logger.debug('Job %r (%s) server action #%s with uid %s executed in %.3fs',
cron_name, self.id, server_action_id, self.env.uid, end_time - start_time)
self.pool.signal_changes() self.pool.signal_changes()
self.env.cr.commit()
except Exception: except Exception:
self.pool.reset_changes() self.pool.reset_changes()
_logger.exception('Job %r (%s) server action #%s failed', cron_name, self.id, server_action_id)
self.env.cr.rollback() self.env.cr.rollback()
raise raise
def _try_lock(self, lockfk=False):
"""Try to grab a dummy exclusive write-lock to the rows with the given ids,
to make sure a following write() or unlink() will not block due
to a process currently executing those cron tasks.
:param lockfk: acquire a strong row lock which conflicts with
the lock acquired by foreign keys when they
reference this row.
"""
if not self:
return
row_level_lock = "UPDATE" if lockfk else "NO KEY UPDATE"
try:
self._cr.execute(f"""
SELECT id
FROM "{self._table}"
WHERE id IN %s
FOR {row_level_lock} NOWAIT
""", [tuple(self.ids)], log_exceptions=False)
except psycopg2.OperationalError:
self._cr.rollback() # early rollback to allow translations to work for the user feedback
raise UserError(_("Record cannot be modified right now: "
"This cron task is currently being executed and may not be modified "
"Please try again in a few minutes"))
def write(self, vals): def write(self, vals):
self._try_lock()
if ('nextcall' in vals or vals.get('active')) and os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
return super(ir_cron, self).write(vals)
def unlink(self):
self._try_lock(lockfk=True)
return super(ir_cron, self).unlink()
def try_write(self, values):
try: try:
with self._cr.savepoint(): self.lock_for_update(allow_referencing=True)
self._cr.execute(f""" except LockError:
SELECT id raise UserError(self.env._(
FROM "{self._table}" "Record cannot be modified right now: "
WHERE id IN %s "This cron task is currently being executed and may not be modified "
FOR NO KEY UPDATE NOWAIT "Please try again in a few minutes"
""", [tuple(self.ids)], log_exceptions=False) )) from None
except psycopg2.OperationalError: if ('nextcall' in vals or vals.get('active')) and os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
pass self.env.cr.postcommit.add(self._notifydb)
else: return super().write(vals)
return super(ir_cron, self).write(values)
return False @api.ondelete(at_uninstall=False)
def _unlink_unless_running(self):
try:
self.lock_for_update()
except LockError:
raise UserError(self.env._(
"Record cannot be modified right now: "
"This cron task is currently being executed and may not be modified "
"Please try again in a few minutes"
)) from None
@api.model @api.model
def toggle(self, model, domain): def toggle(self, model, domain):
@ -639,9 +657,13 @@ class ir_cron(models.Model):
return True return True
active = bool(self.env[model].search_count(domain)) active = bool(self.env[model].search_count(domain))
return self.try_write({'active': active}) try:
self.lock_for_update(allow_referencing=True)
except LockError:
return True
return self.write({'active': active})
def _trigger(self, at=None): def _trigger(self, at: datetime | Iterable[datetime] | None = None):
""" """
Schedule a cron job to be executed soon independently of its Schedule a cron job to be executed soon independently of its
``nextcall`` field value. ``nextcall`` field value.
@ -655,11 +677,10 @@ class ir_cron(models.Model):
datetime. The actual implementation is in :meth:`~._trigger_list`, datetime. The actual implementation is in :meth:`~._trigger_list`,
which is the recommended method for overrides. which is the recommended method for overrides.
:param Optional[Union[datetime.datetime, list[datetime.datetime]]] at: :param at:
When to execute the cron, at one or several moments in time When to execute the cron, at one or several moments in time
instead of as soon as possible. instead of as soon as possible.
:return: the created triggers records :return: the created triggers records
:rtype: recordset
""" """
if at is None: if at is None:
at_list = [fields.Datetime.now()] at_list = [fields.Datetime.now()]
@ -671,14 +692,12 @@ class ir_cron(models.Model):
return self._trigger_list(at_list) return self._trigger_list(at_list)
def _trigger_list(self, at_list): def _trigger_list(self, at_list: list[datetime]):
""" """
Implementation of :meth:`~._trigger`. Implementation of :meth:`~._trigger`.
:param list[datetime.datetime] at_list: :param at_list: Execute the cron later, at precise moments in time.
Execute the cron later, at precise moments in time.
:return: the created triggers records :return: the created triggers records
:rtype: recordset
""" """
self.ensure_one() self.ensure_one()
now = fields.Datetime.now() now = fields.Datetime.now()
@ -699,15 +718,16 @@ class ir_cron(models.Model):
_logger.debug('Job %r (%s) will execute at %s', self.sudo().name, self.id, ats) _logger.debug('Job %r (%s) will execute at %s', self.sudo().name, self.id, ats)
if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'): if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb) self.env.cr.postcommit.add(self._notifydb)
return triggers return triggers
@api.model
def _notifydb(self): def _notifydb(self):
""" Wake up the cron workers """ Wake up the cron workers
The ODOO_NOTIFY_CRON_CHANGES environment variable allows to force the notifydb on both The ODOO_NOTIFY_CRON_CHANGES environment variable allows to force the notifydb on both
ir_cron modification and on trigger creation (regardless of call_at) IrCron modification and on trigger creation (regardless of call_at)
""" """
with odoo.sql_db.db_connect('postgres').cursor() as cr: with sql_db.db_connect('postgres').cursor() as cr:
cr.execute(SQL("SELECT %s('cron_trigger', %s)", SQL.identifier(ODOO_NOTIFY_FUNCTION), self.env.cr.dbname)) cr.execute(SQL("SELECT %s('cron_trigger', %s)", SQL.identifier(ODOO_NOTIFY_FUNCTION), self.env.cr.dbname))
_logger.debug("cron workers notified") _logger.debug("cron workers notified")
@ -731,9 +751,11 @@ class ir_cron(models.Model):
}]) }])
return self.with_context(ir_cron_progress_id=progress.id), progress return self.with_context(ir_cron_progress_id=progress.id), progress
def _notify_progress(self, *, done, remaining, deactivate=False): @api.deprecated("Since 19.0, use _commit_progress")
def _notify_progress(self, *, done: int, remaining: int, deactivate: bool = False):
""" """
Log the progress of the cron job. Log the progress of the cron job.
Use ``_commit_progress()`` instead.
:param int done: the number of tasks already processed :param int done: the number of tasks already processed
:param int remaining: the number of tasks left to process :param int remaining: the number of tasks left to process
@ -743,32 +765,88 @@ class ir_cron(models.Model):
return return
if done < 0 or remaining < 0: if done < 0 or remaining < 0:
raise ValueError("`done` and `remaining` must be positive integers.") raise ValueError("`done` and `remaining` must be positive integers.")
self.env['ir.cron.progress'].sudo().browse(progress_id).write({ progress = self.env['ir.cron.progress'].sudo().browse(progress_id)
assert progress.cron_id.id == self.env.context.get('cron_id'), "Progress on the wrong cron_id"
progress.write({
'remaining': remaining, 'remaining': remaining,
'done': done, 'done': done,
'deactivate': deactivate, 'deactivate': deactivate,
}) })
@api.model
def _commit_progress(
self,
processed: int = 0,
*,
remaining: int | None = None,
deactivate: bool = False,
) -> float:
"""
Commit and log progress for the batch from a cron function.
class ir_cron_trigger(models.Model): The number of items processed is added to the current done count.
If you don't specify a remaining count, the number of items processed
is subtracted from the existing remaining count.
If called from outside the cron job, the progress function call will
just commit.
:param processed: number of processed items in this step
:param remaining: set the remaining count to the given count
:param deactivate: deactivate the cron after running it
:return: remaining time (seconds) for the cron run
"""
ctx = self.env.context
progress = self.env['ir.cron.progress'].sudo().browse(ctx.get('ir_cron_progress_id'))
if not progress:
# not called during a cron, just commit
self.env.cr.commit()
return float('inf')
assert processed >= 0, 'processed must be positive'
assert (remaining or 0) >= 0, "remaining must be positive"
assert progress.cron_id.id == ctx.get('cron_id'), "Progress on the wrong cron_id"
if remaining is None:
remaining = max(progress.remaining - processed, 0)
done = progress.done + processed
vals = {
'remaining': remaining,
'done': done,
}
if deactivate:
vals['deactivate'] = True
progress.write(vals)
self.env.cr.commit()
return max(ctx.get('cron_end_time', float('inf')) - time.monotonic(), 0)
def action_open_parent_action(self):
return self.ir_actions_server_id.action_open_parent_action()
def action_open_scheduled_action(self):
return self.ir_actions_server_id.action_open_scheduled_action()
class IrCronTrigger(models.Model):
_name = 'ir.cron.trigger' _name = 'ir.cron.trigger'
_description = 'Triggered actions' _description = 'Triggered actions'
_rec_name = 'cron_id' _rec_name = 'cron_id'
_allow_sudo_commands = False _allow_sudo_commands = False
cron_id = fields.Many2one("ir.cron", index=True) cron_id = fields.Many2one("ir.cron", index=True, required=True, ondelete="cascade")
call_at = fields.Datetime(index=True) call_at = fields.Datetime(index=True, required=True)
@api.autovacuum @api.autovacuum
def _gc_cron_triggers(self): def _gc_cron_triggers(self):
domain = [('call_at', '<', datetime.now() + relativedelta(weeks=-1))] # active cron jobs are cleared by `_clear_schedule` when the job starts
records = self.search(domain, limit=models.GC_UNLINK_LIMIT) domain = [
if len(records) >= models.GC_UNLINK_LIMIT: ('call_at', '<', datetime.now() + relativedelta(weeks=-1)),
self.env.ref('base.autovacuum_job')._trigger() ('cron_id.active', '=', False),
return records.unlink() ]
records = self.search(domain, limit=GC_UNLINK_LIMIT)
records.unlink()
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining
class ir_cron_progress(models.Model): class IrCronProgress(models.Model):
_name = 'ir.cron.progress' _name = 'ir.cron.progress'
_description = 'Progress of Scheduled Actions' _description = 'Progress of Scheduled Actions'
_rec_name = 'cron_id' _rec_name = 'cron_id'
@ -781,4 +859,6 @@ class ir_cron_progress(models.Model):
@api.autovacuum @api.autovacuum
def _gc_cron_progress(self): def _gc_cron_progress(self):
self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))]).unlink() records = self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))], limit=GC_UNLINK_LIMIT)
records.unlink()
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining

View file

@ -1,11 +1,12 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
import json import json
from datetime import date from datetime import date
from odoo import api, fields, models, tools, _, SUPERUSER_ID from odoo import api, fields, models, tools
from odoo.api import SUPERUSER_ID
from odoo.exceptions import ValidationError from odoo.exceptions import ValidationError
from odoo.fields import Domain
from odoo.tools import SQL from odoo.tools import SQL
@ -31,7 +32,7 @@ class IrDefault(models.Model):
try: try:
json.loads(record.json_value) json.loads(record.json_value)
except json.JSONDecodeError: except json.JSONDecodeError:
raise ValidationError(_('Invalid JSON format in Default Value field.')) raise ValidationError(self.env._('Invalid JSON format in Default Value field.'))
@api.model_create_multi @api.model_create_multi
def create(self, vals_list): def create(self, vals_list):
@ -88,11 +89,11 @@ class IrDefault(models.Model):
value = field.to_string(value) value = field.to_string(value)
json_value = json.dumps(value, ensure_ascii=False) json_value = json.dumps(value, ensure_ascii=False)
except KeyError: except KeyError:
raise ValidationError(_("Invalid field %(model)s.%(field)s", model=model_name, field=field_name)) raise ValidationError(self.env._("Invalid field %(model)s.%(field)s", model=model_name, field=field_name))
except Exception: except Exception:
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value)) raise ValidationError(self.env._("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value))
if field.type == 'integer' and not (-2**31 < parsed < 2**31-1): if field.type == 'integer' and not (-2**31 < parsed < 2**31-1):
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value)) raise ValidationError(self.env._("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value))
# update existing default for the same scope, or create one # update existing default for the same scope, or create one
field = self.env['ir.model.fields']._get(model_name, field_name) field = self.env['ir.model.fields']._get(model_name, field_name)
@ -213,16 +214,19 @@ class IrDefault(models.Model):
for id_ in company_ids for id_ in company_ids
}) })
def _evaluate_condition_with_fallback(self, model_name, condition): def _evaluate_condition_with_fallback(self, model_name, field_expr, operator, value):
""" """
when the field value of the condition is company_dependent without when the field value of the condition is company_dependent without
customization, evaluate if its fallback value will be kept by customization, evaluate if its fallback value will be kept by
the condition the condition
return True/False/None(for unknown) return True/False/None(for unknown)
""" """
field_name = condition[0].split('.', 1)[0] field_name, _property_name = fields.parse_field_expr(field_expr)
model = self.env[model_name] model = self.env[model_name]
field = model._fields[field_name] field = model._fields[field_name]
fallback = field.get_company_dependent_fallback(model) fallback = field.get_company_dependent_fallback(model)
try:
record = model.new({field_name: field.convert_to_write(fallback, model)}) record = model.new({field_name: field.convert_to_write(fallback, model)})
return bool(record.filtered_domain([condition])) return bool(record.filtered_domain(Domain(field_expr, operator, value)))
except ValueError:
return None

View file

@ -1,19 +1,17 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models from odoo import models
from odoo.modules.loading import force_demo
from odoo.addons.base.models.ir_module import assert_log_admin_access from odoo.addons.base.models.ir_module import assert_log_admin_access
class IrDemo(models.TransientModel): class IrDemo(models.TransientModel):
_name = 'ir.demo' _name = 'ir.demo'
_description = 'Demo' _description = 'Demo'
@assert_log_admin_access @assert_log_admin_access
def install_demo(self): def install_demo(self):
force_demo(self.env) import odoo.modules.loading # noqa: PLC0415
odoo.modules.loading.force_demo(self.env)
return { return {
'type': 'ir.actions.act_url', 'type': 'ir.actions.act_url',
'target': 'self', 'target': 'self',

View file

@ -1,7 +1,7 @@
from odoo import api, fields, models from odoo import api, fields, models
class DemoFailure(models.TransientModel): class IrDemo_Failure(models.TransientModel):
""" Stores modules for which we could not install demo data """ Stores modules for which we could not install demo data
""" """
_name = 'ir.demo_failure' _name = 'ir.demo_failure'
@ -11,7 +11,8 @@ class DemoFailure(models.TransientModel):
error = fields.Char(string="Error") error = fields.Char(string="Error")
wizard_id = fields.Many2one('ir.demo_failure.wizard') wizard_id = fields.Many2one('ir.demo_failure.wizard')
class DemoFailureWizard(models.TransientModel):
class IrDemo_FailureWizard(models.TransientModel):
_name = 'ir.demo_failure.wizard' _name = 'ir.demo_failure.wizard'
_description = 'Demo Failure wizard' _description = 'Demo Failure wizard'

View file

@ -1,6 +1,6 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _ from odoo import api, fields, models
from odoo.exceptions import UserError from odoo.exceptions import UserError
from ast import literal_eval from ast import literal_eval
@ -28,22 +28,17 @@ class IrEmbeddedActions(models.Model):
context = fields.Char(default="{}", help="Context dictionary as Python expression, empty by default (Default: {})") context = fields.Char(default="{}", help="Context dictionary as Python expression, empty by default (Default: {})")
groups_ids = fields.Many2many('res.groups', help='Groups that can execute the embedded action. Leave empty to allow everybody.') groups_ids = fields.Many2many('res.groups', help='Groups that can execute the embedded action. Leave empty to allow everybody.')
_sql_constraints = [ _check_only_one_action_defined = models.Constraint(
( '''CHECK(
'check_only_one_action_defined', (action_id IS NOT NULL AND python_method IS NULL)
"""CHECK( OR (action_id IS NULL AND python_method IS NOT NULL)
(action_id IS NOT NULL AND python_method IS NULL) OR )''',
(action_id IS NULL AND python_method IS NOT NULL) "Constraint to ensure that either an XML action or a python_method is defined, but not both.",
)""", )
'Constraint to ensure that either an XML action or a python_method is defined, but not both.' _check_python_method_requires_name = models.Constraint(
), ( 'CHECK(NOT (python_method IS NOT NULL AND name IS NULL))',
'check_python_method_requires_name', "Constraint to ensure that if a python_method is defined, then the name must also be defined.",
"""CHECK(
NOT (python_method IS NOT NULL AND name IS NULL)
)""",
'Constraint to ensure that if a python_method is defined, then the name must also be defined.'
) )
]
@api.model_create_multi @api.model_create_multi
def create(self, vals_list): def create(self, vals_list):
@ -80,7 +75,8 @@ class IrEmbeddedActions(models.Model):
active_model_record = self.env[parent_res_model].search(domain_id, order='id') active_model_record = self.env[parent_res_model].search(domain_id, order='id')
for record in records: for record in records:
action_groups = record.groups_ids action_groups = record.groups_ids
if not action_groups or (action_groups & self.env.user.groups_id): is_valid_method = not record.python_method or hasattr(self.env[parent_res_model], record.python_method)
if is_valid_method and (not action_groups or (action_groups & self.env.user.all_group_ids)):
domain_model = literal_eval(record.domain or '[]') domain_model = literal_eval(record.domain or '[]')
record.is_visible = ( record.is_visible = (
record.parent_res_id in (False, self.env.context.get('active_id', False)) record.parent_res_id in (False, self.env.context.get('active_id', False))
@ -95,7 +91,7 @@ class IrEmbeddedActions(models.Model):
def _unlink_if_action_deletable(self): def _unlink_if_action_deletable(self):
for record in self: for record in self:
if not record.is_deletable: if not record.is_deletable:
raise UserError(_('You cannot delete a default embedded action')) raise UserError(self.env._('You cannot delete a default embedded action'))
def _get_readable_fields(self): def _get_readable_fields(self):
""" return the list of fields that are safe to read """ return the list of fields that are safe to read

View file

@ -5,9 +5,9 @@ from odoo import fields, models
class IrExports(models.Model): class IrExports(models.Model):
_name = "ir.exports" _name = 'ir.exports'
_description = 'Exports' _description = 'Exports'
_order = 'name' _order = 'name, id'
name = fields.Char(string='Export Name') name = fields.Char(string='Export Name')
resource = fields.Char(index=True) resource = fields.Char(index=True)

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details. # Part of Odoo. See LICENSE file for full copyright and licensing details.
import json import json
@ -6,7 +5,6 @@ import functools
import itertools import itertools
from typing import NamedTuple from typing import NamedTuple
import psycopg2
import pytz import pytz
from odoo import api, Command, fields, models from odoo import api, Command, fields, models
@ -42,6 +40,7 @@ class ImportWarning(Warning):
class ConversionNotFound(ValueError): class ConversionNotFound(ValueError):
pass pass
class IrFieldsConverter(models.AbstractModel): class IrFieldsConverter(models.AbstractModel):
_name = 'ir.fields.converter' _name = 'ir.fields.converter'
_description = 'Fields Converter' _description = 'Fields Converter'
@ -75,6 +74,7 @@ class IrFieldsConverter(models.AbstractModel):
The field_path value is computed based on the last field in the chain. The field_path value is computed based on the last field in the chain.
for example, for example,
- path_field for 'Private address' at childA_1 is ['partner_id', 'type'] - path_field for 'Private address' at childA_1 is ['partner_id', 'type']
- path_field for 'childA_1' is ['partner_id'] - path_field for 'childA_1' is ['partner_id']
@ -82,7 +82,7 @@ class IrFieldsConverter(models.AbstractModel):
we can the link the errors to the correct header-field couple in the import UI. we can the link the errors to the correct header-field couple in the import UI.
""" """
field_path = [field] field_path = [field]
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy') parent_fields_hierarchy = self.env.context.get('parent_fields_hierarchy')
if parent_fields_hierarchy: if parent_fields_hierarchy:
field_path = parent_fields_hierarchy + field_path field_path = parent_fields_hierarchy + field_path
@ -95,14 +95,15 @@ class IrFieldsConverter(models.AbstractModel):
return field_path return field_path
@api.model @api.model
def for_model(self, model, fromtype=str): def for_model(self, model, fromtype=str, *, savepoint):
""" Returns a converter object for the model. A converter is a """ Returns a converter object for the model. A converter is a
callable taking a record-ish (a dictionary representing an odoo callable taking a record-ish (a dictionary representing an odoo
record with values of typetag ``fromtype``) and returning a converted record with values of typetag ``fromtype``) and returning a converted
records matching what :meth:`odoo.osv.orm.Model.write` expects. records matching what :meth:`odoo.models.Model.write` expects.
:param model: :class:`odoo.osv.orm.Model` for the conversion base :param model: :class:`odoo.models.Model` for the conversion base
:param fromtype: :param fromtype:
:param savepoint: savepoint to rollback to on error
:returns: a converter callable :returns: a converter callable
:rtype: (record: dict, logger: (field, error) -> None) -> dict :rtype: (record: dict, logger: (field, error) -> None) -> dict
""" """
@ -110,7 +111,7 @@ class IrFieldsConverter(models.AbstractModel):
model = self.env[model._name] model = self.env[model._name]
converters = { converters = {
name: self.to_field(model, field, fromtype) name: self.to_field(model, field, fromtype, savepoint=savepoint)
for name, field in model._fields.items() for name, field in model._fields.items()
} }
@ -155,7 +156,7 @@ class IrFieldsConverter(models.AbstractModel):
return fn return fn
@api.model @api.model
def to_field(self, model, field, fromtype=str): def to_field(self, model, field, fromtype=str, *, savepoint):
""" Fetches a converter for the provided field object, from the """ Fetches a converter for the provided field object, from the
specified type. specified type.
@ -191,6 +192,7 @@ class IrFieldsConverter(models.AbstractModel):
:type field: :class:`odoo.fields.Field` :type field: :class:`odoo.fields.Field`
:param fromtype: type to convert to something fitting for ``field`` :param fromtype: type to convert to something fitting for ``field``
:type fromtype: type | str :type fromtype: type | str
:param savepoint: savepoint to rollback to on errors
:return: a function (fromtype -> field.write_type), if a converter is found :return: a function (fromtype -> field.write_type), if a converter is found
:rtype: Callable | None :rtype: Callable | None
""" """
@ -200,38 +202,37 @@ class IrFieldsConverter(models.AbstractModel):
converter = getattr(self, '_%s_to_%s' % (typename, field.type), None) converter = getattr(self, '_%s_to_%s' % (typename, field.type), None)
if not converter: if not converter:
return None return None
return functools.partial(converter, model, field) return functools.partial(converter, model, field, savepoint=savepoint)
def _str_to_json(self, model, field, value): def _str_to_json(self, model, field, value, savepoint):
try: try:
return json.loads(value), [] return json.loads(value), []
except ValueError: except ValueError:
msg = _("'%s' does not seem to be a valid JSON for field '%%(field)s'") msg = self.env._("'%s' does not seem to be a valid JSON for field '%%(field)s'")
raise self._format_import_error(ValueError, msg, value) raise self._format_import_error(ValueError, msg, value)
def _str_to_properties(self, model, field, value): def _str_to_properties(self, model, field, value, savepoint):
# If we want to import the all properties at once (with the technical value) # If we want to import the all properties at once (with the technical value)
if isinstance(value, str): if isinstance(value, str):
try: try:
value = json.loads(value) value = json.loads(value)
except ValueError: except ValueError:
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.") msg = self.env._("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg) raise self._format_import_error(ValueError, msg)
if not isinstance(value, list): if not isinstance(value, list):
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.") msg = self.env._("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg, {'value': value}) raise self._format_import_error(ValueError, msg, {'value': value})
warnings = [] warnings = []
for property_dict in value: for property_dict in value:
if not (property_dict.keys() >= {'name', 'type', 'string'}): if not (property_dict.keys() >= {'name', 'type', 'string'}):
msg = _("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.") msg = self.env._("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.")
raise self._format_import_error(ValueError, msg, {'value': property_dict}) raise self._format_import_error(ValueError, msg, {'value': property_dict})
val = property_dict.get('value') val = property_dict.get('value')
if not val: if not val:
property_dict.pop('value', None)
continue continue
property_type = property_dict['type'] property_type = property_dict['type']
@ -243,7 +244,7 @@ class IrFieldsConverter(models.AbstractModel):
if val in (sel_val, sel_label) if val in (sel_val, sel_label)
), None) ), None)
if not new_val: if not new_val:
msg = _("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).") msg = self.env._("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']}) raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
property_dict['value'] = new_val property_dict['value'] = new_val
@ -256,17 +257,17 @@ class IrFieldsConverter(models.AbstractModel):
if tag in (tag_val, tag_label) if tag in (tag_val, tag_label)
), None) ), None)
if not val_tag: if not val_tag:
msg = _("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).") msg = self.env._("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': tag, 'label_property': property_dict['string']}) raise self._format_import_error(ValueError, msg, {'value': tag, 'label_property': property_dict['string']})
new_val.append(val_tag) new_val.append(val_tag)
property_dict['value'] = new_val property_dict['value'] = new_val
elif property_type == 'boolean': elif property_type == 'boolean':
new_val, warnings = self._str_to_boolean(model, field, val) new_val, warnings = self._str_to_boolean(model, field, val, savepoint=savepoint)
if not warnings: if not warnings:
property_dict['value'] = new_val property_dict['value'] = new_val
else: else:
msg = _("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).") msg = self.env._("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']}) raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type in ('many2one', 'many2many'): elif property_type in ('many2one', 'many2many'):
@ -282,7 +283,7 @@ class IrFieldsConverter(models.AbstractModel):
ids = [] ids = []
fake_field = FakeField(comodel_name=property_dict['comodel'], name=property_dict['string']) fake_field = FakeField(comodel_name=property_dict['comodel'], name=property_dict['string'])
for reference in references: for reference in references:
id_, __, ws = self.db_id_for(model, fake_field, subfield, reference) id_, ws = self.db_id_for(model, fake_field, subfield, reference, savepoint)
ids.append(id_) ids.append(id_)
warnings.extend(ws) warnings.extend(ws)
@ -292,20 +293,20 @@ class IrFieldsConverter(models.AbstractModel):
try: try:
property_dict['value'] = int(val) property_dict['value'] = int(val)
except ValueError: except ValueError:
msg = _("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).") msg = self.env._("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']}) raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type == 'float': elif property_type == 'float':
try: try:
property_dict['value'] = float(val) property_dict['value'] = float(val)
except ValueError: except ValueError:
msg = _("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).") msg = self.env._("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']}) raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
return value, warnings return value, warnings
@api.model @api.model
def _str_to_boolean(self, model, field, value): def _str_to_boolean(self, model, field, value, savepoint):
# all translatables used for booleans # all translatables used for booleans
# potentially broken casefolding? What about locales? # potentially broken casefolding? What about locales?
trues = set(word.lower() for word in itertools.chain( trues = set(word.lower() for word in itertools.chain(
@ -325,89 +326,73 @@ class IrFieldsConverter(models.AbstractModel):
if value.lower() in falses: if value.lower() in falses:
return False, [] return False, []
if field.name in self._context.get('import_skip_records', []): if field.name in self.env.context.get('import_skip_records', []):
return None, [] return None, []
return True, [self._format_import_error( return True, [self._format_import_error(
ValueError, ValueError,
_(u"Unknown value '%s' for boolean field '%%(field)s'"), self.env._("Unknown value '%s' for boolean field '%%(field)s'"),
value, value,
{'moreinfo': _(u"Use '1' for yes and '0' for no")} {'moreinfo': self.env._("Use '1' for yes and '0' for no")}
)] )]
@api.model @api.model
def _str_to_integer(self, model, field, value): def _str_to_integer(self, model, field, value, savepoint):
try: try:
return int(value), [] return int(value), []
except ValueError: except ValueError:
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"'%s' does not seem to be an integer for field '%%(field)s'"), self.env._("'%s' does not seem to be an integer for field '%%(field)s'"),
value value
) )
@api.model @api.model
def _str_to_float(self, model, field, value): def _str_to_float(self, model, field, value, savepoint):
try: try:
return float(value), [] return float(value), []
except ValueError: except ValueError:
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"'%s' does not seem to be a number for field '%%(field)s'"), self.env._("'%s' does not seem to be a number for field '%%(field)s'"),
value value
) )
_str_to_monetary = _str_to_float _str_to_monetary = _str_to_float
@api.model @api.model
def _str_id(self, model, field, value): def _str_id(self, model, field, value, savepoint):
return value, [] return value, []
_str_to_reference = _str_to_char = _str_to_text = _str_to_binary = _str_to_html = _str_id _str_to_reference = _str_to_char = _str_to_text = _str_to_binary = _str_to_html = _str_id
@api.model @api.model
def _str_to_date(self, model, field, value): def _str_to_date(self, model, field, value, savepoint):
try: try:
parsed_value = fields.Date.from_string(value) parsed_value = fields.Date.from_string(value)
return fields.Date.to_string(parsed_value), [] return fields.Date.to_string(parsed_value), []
except ValueError: except ValueError:
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"'%s' does not seem to be a valid date for field '%%(field)s'"), self.env._("'%s' does not seem to be a valid date for field '%%(field)s'"),
value, value,
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31")} {'moreinfo': self.env._("Use the format '%s'", u"2012-12-31")}
) )
@api.model @api.model
def _input_tz(self): def _input_tz(self):
# if there's a tz in context, try to use that return self.env.tz
if self._context.get('tz'):
try:
return pytz.timezone(self._context['tz'])
except pytz.UnknownTimeZoneError:
pass
# if the current user has a tz set, try to use that
user = self.env.user
if user.tz:
try:
return pytz.timezone(user.tz)
except pytz.UnknownTimeZoneError:
pass
# fallback if no tz in context or on user: UTC
return pytz.UTC
@api.model @api.model
def _str_to_datetime(self, model, field, value): def _str_to_datetime(self, model, field, value, savepoint):
try: try:
parsed_value = fields.Datetime.from_string(value) parsed_value = fields.Datetime.from_string(value)
except ValueError: except ValueError:
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"'%s' does not seem to be a valid datetime for field '%%(field)s'"), self.env._("'%s' does not seem to be a valid datetime for field '%%(field)s'"),
value, value,
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31 23:59:59")} {'moreinfo': self.env._("Use the format '%s'", u"2012-12-31 23:59:59")}
) )
input_tz = self._input_tz()# Apply input tz to the parsed naive datetime input_tz = self._input_tz()# Apply input tz to the parsed naive datetime
@ -419,7 +404,7 @@ class IrFieldsConverter(models.AbstractModel):
def _get_boolean_translations(self, src): def _get_boolean_translations(self, src):
# Cache translations so they don't have to be reloaded from scratch on # Cache translations so they don't have to be reloaded from scratch on
# every row of the file # every row of the file
tnx_cache = self._cr.cache.setdefault(self._name, {}) tnx_cache = self.env.cr.cache.setdefault(self._name, {})
if src in tnx_cache: if src in tnx_cache:
return tnx_cache[src] return tnx_cache[src]
@ -438,7 +423,7 @@ class IrFieldsConverter(models.AbstractModel):
return [] return []
# Cache translations so they don't have to be reloaded from scratch on # Cache translations so they don't have to be reloaded from scratch on
# every row of the file # every row of the file
tnx_cache = self._cr.cache.setdefault(self._name, {}) tnx_cache = self.env.cr.cache.setdefault(self._name, {})
if src in tnx_cache: if src in tnx_cache:
return tnx_cache[src] return tnx_cache[src]
@ -459,7 +444,7 @@ class IrFieldsConverter(models.AbstractModel):
return result return result
@api.model @api.model
def _str_to_selection(self, model, field, value): def _str_to_selection(self, model, field, value, savepoint):
# get untranslated values # get untranslated values
env = self.with_context(lang=None).env env = self.with_context(lang=None).env
selection = field.get_description(env)['selection'] selection = field.get_description(env)['selection']
@ -478,19 +463,19 @@ class IrFieldsConverter(models.AbstractModel):
if value.lower() == str(item).lower() or any(value.lower() == label.lower() for label in labels): if value.lower() == str(item).lower() or any(value.lower() == label.lower() for label in labels):
return item, [] return item, []
if field.name in self._context.get('import_skip_records', []): if field.name in self.env.context.get('import_skip_records', []):
return None, [] return None, []
elif field.name in self._context.get('import_set_empty_fields', []): elif field.name in self.env.context.get('import_set_empty_fields', []):
return False, [] return False, []
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"Value '%s' not found in selection field '%%(field)s'"), self.env._("Value '%s' not found in selection field '%%(field)s'"),
value, value,
{'moreinfo': [_label or str(item) for item, _label in selection if _label or item]} {'moreinfo': [_label or str(item) for item, _label in selection if _label or item]}
) )
@api.model @api.model
def db_id_for(self, model, field, subfield, value): def db_id_for(self, model, field, subfield, value, savepoint):
""" Finds a database id for the reference ``value`` in the referencing """ Finds a database id for the reference ``value`` in the referencing
subfield ``subfield`` of the provided field of the provided model. subfield ``subfield`` of the provided field of the provided model.
@ -501,6 +486,7 @@ class IrFieldsConverter(models.AbstractModel):
``id`` for an external id and ``.id`` for a database ``id`` for an external id and ``.id`` for a database
id id
:param value: value of the reference to match to an actual record :param value: value of the reference to match to an actual record
:param savepoint: savepoint for rollback on errors
:return: a pair of the matched database identifier (if any), the :return: a pair of the matched database identifier (if any), the
translated user-readable name for the field and the list of translated user-readable name for the field and the list of
warnings warnings
@ -508,7 +494,7 @@ class IrFieldsConverter(models.AbstractModel):
""" """
# the function 'flush' comes from BaseModel.load(), and forces the # the function 'flush' comes from BaseModel.load(), and forces the
# creation/update of former records (batch creation) # creation/update of former records (batch creation)
flush = self._context.get('import_flush', lambda **kw: None) flush = self.env.context.get('import_flush', lambda **kw: None)
id = None id = None
warnings = [] warnings = []
@ -519,7 +505,7 @@ class IrFieldsConverter(models.AbstractModel):
'view_mode': 'list,form', 'view_mode': 'list,form',
'views': [(False, 'list'), (False, 'form')], 'views': [(False, 'list'), (False, 'form')],
'context': {'create': False}, 'context': {'create': False},
'help': _(u"See all possible values")} 'help': self.env._("See all possible values")}
if subfield is None: if subfield is None:
action['res_model'] = field.comodel_name action['res_model'] = field.comodel_name
elif subfield in ('id', '.id'): elif subfield in ('id', '.id'):
@ -528,33 +514,33 @@ class IrFieldsConverter(models.AbstractModel):
RelatedModel = self.env[field.comodel_name] RelatedModel = self.env[field.comodel_name]
if subfield == '.id': if subfield == '.id':
field_type = _(u"database id") field_type = self.env._("database id")
if isinstance(value, str) and not self._str_to_boolean(model, field, value)[0]: if isinstance(value, str) and not self._str_to_boolean(model, field, value, savepoint=savepoint)[0]:
return False, field_type, warnings return False, warnings
try: try:
tentative_id = int(value) tentative_id = int(value)
except ValueError: except ValueError:
raise self._format_import_error( raise self._format_import_error(
ValueError, ValueError,
_(u"Invalid database id '%s' for the field '%%(field)s'"), self.env._("Invalid database id '%s' for the field '%%(field)s'"),
value, value,
{'moreinfo': action}) {'moreinfo': action})
if RelatedModel.browse(tentative_id).exists(): if RelatedModel.browse(tentative_id).exists():
id = tentative_id id = tentative_id
elif subfield == 'id': elif subfield == 'id':
field_type = _(u"external id") field_type = self.env._("external id")
if not self._str_to_boolean(model, field, value)[0]: if not self._str_to_boolean(model, field, value, savepoint=savepoint)[0]:
return False, field_type, warnings return False, warnings
if '.' in value: if '.' in value:
xmlid = value xmlid = value
else: else:
xmlid = "%s.%s" % (self._context.get('_import_current_module', ''), value) xmlid = "%s.%s" % (self.env.context.get('_import_current_module', ''), value)
flush(xml_id=xmlid) flush(xml_id=xmlid)
id = self._xmlid_to_record_id(xmlid, RelatedModel) id = self._xmlid_to_record_id(xmlid, RelatedModel)
elif subfield is None: elif subfield is None:
field_type = _(u"name") field_type = self.env._("name")
if value == '': if value == '':
return False, field_type, warnings return False, warnings
flush(model=field.comodel_name) flush(model=field.comodel_name)
ids = RelatedModel.name_search(name=value, operator='=') ids = RelatedModel.name_search(name=value, operator='=')
if ids: if ids:
@ -569,14 +555,15 @@ class IrFieldsConverter(models.AbstractModel):
name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {} name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {}
if name_create_enabled_fields.get(field.name): if name_create_enabled_fields.get(field.name):
try: try:
with self.env.cr.savepoint():
id, _name = RelatedModel.name_create(name=value) id, _name = RelatedModel.name_create(name=value)
except (Exception, psycopg2.IntegrityError): RelatedModel.env.flush_all()
error_msg = _("Cannot create new '%s' records from their name alone. Please create those records manually and try importing again.", RelatedModel._description) except Exception: # noqa: BLE001
savepoint.rollback()
error_msg = self.env._("Cannot create new '%s' records from their name alone. Please create those records manually and try importing again.", RelatedModel._description)
else: else:
raise self._format_import_error( raise self._format_import_error(
Exception, Exception,
_("Unknown sub-field “%s", subfield), self.env._("Unknown sub-field “%s", subfield),
) )
set_empty = False set_empty = False
@ -588,9 +575,9 @@ class IrFieldsConverter(models.AbstractModel):
skip_record = field_path in self.env.context.get('import_skip_records', []) skip_record = field_path in self.env.context.get('import_skip_records', [])
if id is None and not set_empty and not skip_record: if id is None and not set_empty and not skip_record:
if error_msg: if error_msg:
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s' and the following error was encountered when we attempted to create one: %(error_message)s") message = self.env._("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s' and the following error was encountered when we attempted to create one: %(error_message)s")
else: else:
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s'") message = self.env._("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s'")
error_info_dict = {'moreinfo': action} error_info_dict = {'moreinfo': action}
if self.env.context.get('import_file'): if self.env.context.get('import_file'):
@ -604,7 +591,7 @@ class IrFieldsConverter(models.AbstractModel):
message, message,
{'field_type': field_type, 'value': value, 'error_message': error_msg}, {'field_type': field_type, 'value': value, 'error_message': error_msg},
error_info_dict) error_info_dict)
return id, field_type, warnings return id, warnings
def _xmlid_to_record_id(self, xmlid, model): def _xmlid_to_record_id(self, xmlid, model):
""" Return the record id corresponding to the given external id, """ Return the record id corresponding to the given external id,
@ -645,54 +632,54 @@ class IrFieldsConverter(models.AbstractModel):
fieldset = set(record) fieldset = set(record)
if fieldset - REFERENCING_FIELDS: if fieldset - REFERENCING_FIELDS:
raise ValueError( raise ValueError(
_(u"Can not create Many-To-One records indirectly, import the field separately")) self.env._("Can not create Many-To-One records indirectly, import the field separately"))
if len(fieldset) > 1: if len(fieldset) > 1:
raise ValueError( raise ValueError(
_(u"Ambiguous specification for field '%(field)s', only provide one of name, external id or database id")) self.env._("Ambiguous specification for field '%(field)s', only provide one of name, external id or database id"))
# only one field left possible, unpack # only one field left possible, unpack
[subfield] = fieldset [subfield] = fieldset
return subfield, [] return subfield, []
@api.model @api.model
def _str_to_many2one(self, model, field, values): def _str_to_many2one(self, model, field, values, savepoint):
# Should only be one record, unpack # Should only be one record, unpack
[record] = values [record] = values
subfield, w1 = self._referencing_subfield(record) subfield, w1 = self._referencing_subfield(record)
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield]) id, w2 = self.db_id_for(model, field, subfield, record[subfield], savepoint)
return id, w1 + w2 return id, w1 + w2
@api.model @api.model
def _str_to_many2one_reference(self, model, field, value): def _str_to_many2one_reference(self, model, field, value, savepoint):
return self._str_to_integer(model, field, value) return self._str_to_integer(model, field, value, savepoint)
@api.model @api.model
def _str_to_many2many(self, model, field, value): def _str_to_many2many(self, model, field, value, savepoint):
[record] = value [record] = value
subfield, warnings = self._referencing_subfield(record) subfield, warnings = self._referencing_subfield(record)
ids = [] ids = []
for reference in record[subfield].split(','): for reference in record[subfield].split(','):
id, _, ws = self.db_id_for(model, field, subfield, reference) id, ws = self.db_id_for(model, field, subfield, reference, savepoint)
ids.append(id) ids.append(id)
warnings.extend(ws) warnings.extend(ws)
if field.name in self._context.get('import_set_empty_fields', []) and any([id is None for id in ids]): if field.name in self.env.context.get('import_set_empty_fields', []) and any(id is None for id in ids):
ids = [id for id in ids if id] ids = [id for id in ids if id]
elif field.name in self._context.get('import_skip_records', []) and any([id is None for id in ids]): elif field.name in self.env.context.get('import_skip_records', []) and any(id is None for id in ids):
return None, warnings return None, warnings
if self._context.get('update_many2many'): if self.env.context.get('update_many2many'):
return [Command.link(id) for id in ids], warnings return [Command.link(id) for id in ids], warnings
else: else:
return [Command.set(ids)], warnings return [Command.set(ids)], warnings
@api.model @api.model
def _str_to_one2many(self, model, field, records): def _str_to_one2many(self, model, field, records, savepoint):
name_create_enabled_fields = self._context.get('name_create_enabled_fields') or {} name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {}
prefix = field.name + '/' prefix = field.name + '/'
relative_name_create_enabled_fields = { relative_name_create_enabled_fields = {
k[len(prefix):]: v k[len(prefix):]: v
@ -722,12 +709,12 @@ class IrFieldsConverter(models.AbstractModel):
# Complete the field hierarchy path # Complete the field hierarchy path
# E.g. For "parent/child/subchild", field hierarchy path for "subchild" is ['parent', 'child'] # E.g. For "parent/child/subchild", field hierarchy path for "subchild" is ['parent', 'child']
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy', []) + [field.name] parent_fields_hierarchy = self.env.context.get('parent_fields_hierarchy', []) + [field.name]
convert = self.with_context( convert = self.with_context(
name_create_enabled_fields=relative_name_create_enabled_fields, name_create_enabled_fields=relative_name_create_enabled_fields,
parent_fields_hierarchy=parent_fields_hierarchy parent_fields_hierarchy=parent_fields_hierarchy
).for_model(self.env[field.comodel_name]) ).for_model(self.env[field.comodel_name], savepoint=savepoint)
for record in records: for record in records:
id = None id = None
@ -737,7 +724,7 @@ class IrFieldsConverter(models.AbstractModel):
subfield, w1 = self._referencing_subfield(refs) subfield, w1 = self._referencing_subfield(refs)
warnings.extend(w1) warnings.extend(w1)
try: try:
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield]) id, w2 = self.db_id_for(model, field, subfield, record[subfield], savepoint)
warnings.extend(w2) warnings.extend(w2)
except ValueError: except ValueError:
if subfield != 'id': if subfield != 'id':

Some files were not shown because too many files have changed in this diff Show more