19.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:07:25 +02:00
parent 0a7ae8db93
commit 991d2234ca
416 changed files with 646602 additions and 300844 deletions

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import controllers
from . import models
from . import report
from . import wizard

View file

@ -23,6 +23,7 @@ The kernel of Odoo, needed for all installation.
'data/ir_demo_data.xml',
'security/base_groups.xml',
'security/base_security.xml',
'wizard/wizard_ir_model_menu_create_views.xml',
'views/base_menus.xml',
'views/decimal_precision_views.xml',
'views/res_config_views.xml',
@ -67,7 +68,9 @@ The kernel of Odoo, needed for all installation.
'views/res_bank_views.xml',
'views/res_country_views.xml',
'views/res_currency_views.xml',
'views/res_groups_views.xml',
'views/res_users_views.xml',
'views/res_users_apikeys_views.xml',
'views/res_device_views.xml',
'views/res_users_identitycheck_views.xml',
'views/res_config_settings_views.xml',
@ -91,5 +94,6 @@ The kernel of Odoo, needed for all installation.
'installable': True,
'auto_install': True,
'post_init_hook': 'post_init',
'author': 'Odoo S.A.',
'license': 'LGPL-3',
}

View file

@ -1 +0,0 @@
from . import rpc

View file

@ -1,178 +0,0 @@
import re
import sys
import traceback
import xmlrpc.client
from datetime import date, datetime
from collections import defaultdict
from markupsafe import Markup
import odoo
from odoo.http import Controller, route, dispatch_rpc, request, Response
from odoo.fields import Date, Datetime, Command
from odoo.tools import lazy
from odoo.tools.misc import frozendict
# ==========================================================
# XML-RPC helpers
# ==========================================================
# XML-RPC fault codes. Some care must be taken when changing these: the
# constants are also defined client-side and must remain in sync.
# User code must use the exceptions defined in ``odoo.exceptions`` (not
# create directly ``xmlrpc.client.Fault`` objects).
RPC_FAULT_CODE_CLIENT_ERROR = 1 # indistinguishable from app. error.
RPC_FAULT_CODE_APPLICATION_ERROR = 1
RPC_FAULT_CODE_WARNING = 2
RPC_FAULT_CODE_ACCESS_DENIED = 3
RPC_FAULT_CODE_ACCESS_ERROR = 4
# 0 to 31, excluding tab, newline, and carriage return
CONTROL_CHARACTERS = dict.fromkeys(set(range(32)) - {9, 10, 13})
def xmlrpc_handle_exception_int(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_WARNING, str(e))
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_ACCESS_ERROR, str(e))
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_ACCESS_DENIED, str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_WARNING, str(e))
else:
info = sys.exc_info()
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_APPLICATION_ERROR, formatted_info)
return dumps(fault)
def xmlrpc_handle_exception_string(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpc.client.Fault('warning -- Warning\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.MissingError):
fault = xmlrpc.client.Fault('warning -- MissingError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpc.client.Fault('warning -- AccessError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpc.client.Fault('AccessDenied', str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpc.client.Fault('warning -- UserError\n\n' + str(e), '')
#InternalError
else:
info = sys.exc_info()
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(odoo.tools.exception_to_unicode(e), formatted_info)
return dumps(fault)
class OdooMarshaller(xmlrpc.client.Marshaller):
dispatch = dict(xmlrpc.client.Marshaller.dispatch)
def dump_frozen_dict(self, value, write):
value = dict(value)
self.dump_struct(value, write)
# By default, in xmlrpc, bytes are converted to xmlrpc.client.Binary object.
# Historically, odoo is sending binary as base64 string.
# In python 3, base64.b64{de,en}code() methods now works on bytes.
def dump_bytes(self, value, write):
self.dump_unicode(value.decode(), write)
def dump_datetime(self, value, write):
# override to marshall as a string for backwards compatibility
value = Datetime.to_string(value)
self.dump_unicode(value, write)
# convert date objects to strings in iso8061 format.
def dump_date(self, value, write):
value = Date.to_string(value)
self.dump_unicode(value, write)
def dump_lazy(self, value, write):
v = value._value
return self.dispatch[type(v)](self, v, write)
def dump_unicode(self, value, write):
# XML 1.0 disallows control characters, remove them otherwise they break clients
return super().dump_unicode(value.translate(CONTROL_CHARACTERS), write)
dispatch[frozendict] = dump_frozen_dict
dispatch[bytes] = dump_bytes
dispatch[datetime] = dump_datetime
dispatch[date] = dump_date
dispatch[lazy] = dump_lazy
dispatch[str] = dump_unicode
dispatch[Command] = dispatch[int]
dispatch[defaultdict] = dispatch[dict]
dispatch[Markup] = lambda self, value, write: self.dispatch[str](self, str(value), write)
def dumps(params: list | tuple | xmlrpc.client.Fault) -> str:
response = OdooMarshaller(allow_none=False).dumps(params)
return f"""\
<?xml version="1.0"?>
<methodResponse>
{response}
</methodResponse>
"""
# ==========================================================
# RPC Controller
# ==========================================================
def _check_request():
if request.db:
request.env.cr.close()
class RPC(Controller):
"""Handle RPC connections."""
def _xmlrpc(self, service):
"""Common method to handle an XML-RPC request."""
_check_request()
data = request.httprequest.get_data()
params, method = xmlrpc.client.loads(data, use_datetime=True)
result = dispatch_rpc(service, method, params)
return dumps((result,))
@route("/xmlrpc/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_1(self, service):
"""XML-RPC service that returns faultCode as strings.
This entrypoint is historical and non-compliant, but kept for
backwards-compatibility.
"""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
error.error_response = Response(
response=xmlrpc_handle_exception_string(error),
mimetype='text/xml',
)
raise
return Response(response=response, mimetype='text/xml')
@route("/xmlrpc/2/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_2(self, service):
"""XML-RPC service that returns faultCode as int."""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
error.error_response = Response(
response=xmlrpc_handle_exception_int(error),
mimetype='text/xml',
)
raise
return Response(response=response, mimetype='text/xml')
@route('/jsonrpc', type='json', auth="none", save_session=False)
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
_check_request()
return dispatch_rpc(service, method, args)

View file

@ -3,13 +3,13 @@
<data>
<record model="ir.module.category" id="module_category_hidden">
<field name="name">Technical</field>
<field name="sequence">60</field>
<field name="sequence">70</field>
<field name="visible" eval="0" />
</record>
<record model="ir.module.category" id="module_category_accounting">
<field name="name">Accounting</field>
<field name="sequence">15</field>
<field name="sequence">20</field>
</record>
<record model="ir.module.category" id="module_category_accounting_localizations">
@ -30,12 +30,6 @@
<field name="visible" eval="0" />
</record>
<record model="ir.module.category" id="module_category_user_type">
<field name="name">User types</field>
<field name="description">Helps you manage users.</field>
<field name="sequence">9</field>
</record>
<record model="ir.module.category" id="module_category_accounting_accounting">
<field name="name">Invoicing</field>
<field name="sequence">4</field>
@ -56,14 +50,19 @@
<field name="sequence">40</field>
</record>
<record model="ir.module.category" id="module_category_manufacturing">
<field name="name">Manufacturing</field>
<field name="sequence">30</field>
<record model="ir.module.category" id="module_category_supply_chain">
<field name="name">Supply Chain</field>
<field name="sequence">25</field>
</record>
<record model="ir.module.category" id="module_category_shipping_connectors">
<field name="name">Shipping Connectors</field>
<field name="sequence">50</field>
</record>
<record model="ir.module.category" id="module_category_website">
<field name="name">Website</field>
<field name="sequence">35</field>
<field name="sequence">10</field>
</record>
<record model="ir.module.category" id="module_category_theme">
@ -78,6 +77,14 @@
<field name="parent_id" eval="False"/>
</record>
<record model="ir.module.category" id="base.module_category_human_resources_referrals">
<field name="name">Referrals</field>
<field name="description">Helps you manage referrals
User : Access to referral, share job, gain points, buy rewards
Administrator : edit rewards and more</field>
<field name="sequence">11</field>
</record>
<record model="ir.module.category" id="module_category_human_resources_appraisals">
<field name="name">Appraisals</field>
<field name="description">A user without any rights on Appraisals will be able to see the application, create and manage appraisals for himself and the people he's manager of.</field>
@ -92,7 +99,7 @@
<record model="ir.module.category" id="module_category_services">
<field name="name">Services</field>
<field name="sequence">10</field>
<field name="sequence">15</field>
</record>
<record model="ir.module.category" id="module_category_services_helpdesk">
@ -111,14 +118,14 @@
<field name="parent_id" ref="module_category_services"/>
</record>
<record model="ir.module.category" id="module_category_inventory">
<field name="name">Inventory</field>
<field name="sequence">25</field>
</record>
<record model="ir.module.category" id="module_category_productivity">
<field name="name">Productivity</field>
<field name="sequence">50</field>
<field name="sequence">30</field>
</record>
<record model="ir.module.category" id="module_category_esg">
<field name="name">ESG</field>
<field name="sequence">52</field>
</record>
<record model="ir.module.category" id="module_category_customizations">
@ -126,56 +133,20 @@
<field name="sequence">55</field>
</record>
<record model="ir.module.category" id="module_category_internet_of_things_(iot)">
<field name="name">Internet of Things (IoT)</field>
<field name="sequence">60</field>
</record>
<record model="ir.module.category" id="module_category_administration_administration">
<field name="name">Administration</field>
<field name="parent_id" ref="module_category_administration"/>
</record>
<record model="ir.module.category" id="module_category_usability">
<field name="name">Extra Rights</field>
<field name="sequence">101</field>
</record>
<record model="ir.module.category" id="module_category_extra">
<field name="name">Other Extra Rights</field>
<field name="sequence">102</field>
</record>
<!-- add applications to base groups -->
<record model="res.groups" id="group_erp_manager">
<field name="category_id" ref="module_category_administration_administration"/>
</record>
<record model="res.groups" id="group_system">
<field name="category_id" ref="module_category_administration_administration"/>
</record>
<record model="res.groups" id="group_user">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record model="res.groups" id="group_multi_company">
<field name="category_id" ref="module_category_usability"/>
</record>
<record model="res.groups" id="group_multi_currency">
<field name="category_id" ref="module_category_usability"/>
</record>
<record model="res.groups" id="group_no_one">
<field name="category_id" ref="module_category_usability"/>
</record>
<record id="group_portal" model="res.groups">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record id="group_public" model="res.groups">
<field name="category_id" ref="module_category_user_type"/>
</record>
<record id="group_partner_manager" model="res.groups">
<field name="category_id" ref="module_category_usability"/>
</record>
</data>
</odoo>

View file

@ -117,7 +117,7 @@
<record model="ir.module.module" id="base.module_mrp_plm">
<field name="name">mrp_plm</field>
<field name="shortdesc">Product Lifecycle Management (PLM)</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/>
<field name="category_id" ref="base.module_category_supply_chain"/>
<field name="sequence">155</field>
<field name="application" eval="True"/>
<field name="summary">PLM, ECOs, Versions</field>
@ -132,7 +132,7 @@
<field name="name">quality_control</field>
<field name="shortdesc">Quality</field>
<field name="sequence">120</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/>
<field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/>
<field name="summary">Quality Alerts, Control Points</field>
<field name="license">OEEL-1</field>
@ -202,7 +202,7 @@
<field name="name">stock_barcode</field>
<field name="shortdesc">Barcode</field>
<field name="sequence">255</field>
<field name="category_id" ref="base.module_category_inventory_inventory"/>
<field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/>
<field name="summary">Barcode scanner for warehouses</field>
<field name="license">OEEL-1</field>
@ -214,7 +214,7 @@
<record model="ir.module.module" id="base.module_voip">
<field name="name">voip</field>
<field name="shortdesc">VoIP</field>
<field name="shortdesc">Phone</field>
<field name="sequence">280</field>
<field name="category_id" ref="base.module_category_sales_sales"/>
<field name="application" eval="True"/>
@ -258,7 +258,7 @@
<field name="name">mrp_workorder</field>
<field name="sequence">16</field>
<field name="shortdesc">MRP II</field>
<field name="category_id" ref="base.module_category_manufacturing_manufacturing"/>
<field name="category_id" ref="base.module_category_supply_chain"/>
<field name="application" eval="True"/>
<field name="summary">Work Orders, Planning, Routing</field>
<field name="license">OEEL-1</field>

View file

@ -638,7 +638,7 @@ state_id_pb,id,"Papua Barat","PB"
state_id_pd,id,"Papua Barat Daya","PD"
state_id_ps,id,"Papua Selatan","PS"
state_id_pt,id,"Papua Tengah","PT"
state_id_pp,id,"Papua Pegunungan","PE"
state_id_pe,id,"Papua Pegunungan","PE"
state_id_ri,id,"Riau","RI"
state_id_sr,id,"Sulawesi Barat","SR"
state_id_sn,id,"Sulawesi Selatan","SN"
@ -1047,16 +1047,7 @@ state_tr_79,tr,"Kilis","79"
state_tr_80,tr,"Osmaniye","80"
state_tr_81,tr,"Düzce","81"
state_vn_VN-44,vn,"An Giang","VN-44"
state_vn_VN-57,vn,"Bình Dương","VN-57"
state_vn_VN-31,vn,"Bình Định","VN-31"
state_vn_VN-54,vn,"Bắc Giang","VN-54"
state_vn_VN-53,vn,"Bắc Kạn","VN-53"
state_vn_VN-55,vn,"Bạc Liêu","VN-55"
state_vn_VN-56,vn,"Bắc Ninh","VN-56"
state_vn_VN-58,vn,"Bình Phước","VN-58"
state_vn_VN-43,vn,"Bà Rịa - Vũng Tàu","VN-43"
state_vn_VN-40,vn,"Bình Thuận","VN-40"
state_vn_VN-50,vn,"Bến Tre","VN-50"
state_vn_VN-04,vn,"Cao Bằng","VN-04"
state_vn_VN-59,vn,"Cà Mau","VN-59"
state_vn_VN-CT,vn,"TP Cần Thơ","VN-CT"
@ -1064,51 +1055,31 @@ state_vn_VN-71,vn,"Điện Biên","VN-71"
state_vn_VN-33,vn,"Đắk Lắk","VN-33"
state_vn_VN-DN,vn,"TP Đà Nẵng","VN-DN"
state_vn_VN-39,vn,"Đồng Nai","VN-39"
state_vn_VN-72,vn,"Đắk Nông","VN-72"
state_vn_VN-45,vn,"Đồng Tháp","VN-45"
state_vn_VN-30,vn,"Gia Lai","VN-30"
state_vn_VN-14,vn,"Hòa Bình","VN-14"
state_vn_VN-SG,vn,"TP Hồ Chí Minh","VN-SG"
state_vn_VN-61,vn,"Hải Dương","VN-61"
state_vn_VN-73,vn,"Hậu Giang","VN-73"
state_vn_VN-03,vn,"Hà Giang","VN-03"
state_vn_VN-HN,vn,"Hà Nội","VN-HN"
state_vn_VN-63,vn,"Hà Nam","VN-63"
state_vn_VN-HP,vn,"TP Hải Phòng","VN-HP"
state_vn_VN-23,vn,"Hà Tĩnh","VN-23"
state_vn_VN-66,vn,"Hưng Yên","VN-66"
state_vn_VN-47,vn,"Kiên Giang","VN-47"
state_vn_VN-34,vn,"Khánh Hòa","VN-34"
state_vn_VN-28,vn,"Kon Tum","VN-28"
state_vn_VN-41,vn,"Long An","VN-41"
state_vn_VN-02,vn,"Lào Cai","VN-02"
state_vn_VN-01,vn,"Lai Châu","VN-01"
state_vn_VN-35,vn,"Lâm Đồng","VN-35"
state_vn_VN-09,vn,"Lạng Sơn","VN-09"
state_vn_VN-22,vn,"Nghệ An","VN-22"
state_vn_VN-18,vn,"Ninh Bình","VN-18"
state_vn_VN-67,vn,"Nam Định","VN-67"
state_vn_VN-36,vn,"Ninh Thuận","VN-36"
state_vn_VN-68,vn,"Phú Thọ","VN-68"
state_vn_VN-32,vn,"Phú Yên","VN-32"
state_vn_VN-24,vn,"Quảng Bình","VN-24"
state_vn_VN-13,vn,"Quảng Ninh","VN-13"
state_vn_VN-27,vn,"Quảng Nam","VN-27"
state_vn_VN-29,vn,"Quảng Ngãi","VN-29"
state_vn_VN-25,vn,"Quảng Trị","VN-25"
state_vn_VN-05,vn,"Sơn La","VN-05"
state_vn_VN-52,vn,"Sóc Trăng","VN-52"
state_vn_VN-20,vn,"Thái Bình","VN-20"
state_vn_VN-46,vn,"Tiền Giang","VN-46"
state_vn_VN-21,vn,"Thanh Hóa","VN-21"
state_vn_VN-69,vn,"Thái Nguyên","VN-69"
state_vn_VN-37,vn,"Tây Ninh","VN-37"
state_vn_VN-07,vn,"Tuyên Quang","VN-07"
state_vn_VN-26,vn,"Thừa Thiên - Huế","VN-26"
state_vn_VN-51,vn,"Trà Vinh","VN-51"
state_vn_VN-49,vn,"Vĩnh Long","VN-49"
state_vn_VN-70,vn,"Vĩnh Phúc","VN-70"
state_vn_VN-06,vn,"Yên Bái","VN-06"
state_SJ,cr,"San José","1"
state_A,cr,"Alajuela","2"
state_H,cr,"Heredia","4"
@ -1765,6 +1736,141 @@ state_kr_47,kr,"경상북도","KR-47"
state_kr_48,kr,"경상남도","KR-48"
state_kr_49,kr,"제주특별자치도","KR-49"
state_kr_50,kr,"세종특별자치시","KR-50"
state_pl_ds,pl,"dolnośląskie",""
state_pl_kp,pl,"kujawsko-pomorskie","KP"
state_pl_lb,pl,"lubelskie","LB"
state_pl_ls,pl,"lubuskie","LS"
state_pl_ld,pl,"łódzkie","ŁD"
state_pl_mp,pl,"małopolskie","MP"
state_pl_mz,pl,"mazowieckie","MZ"
state_pl_op,pl,"opolskie","OP"
state_pl_pk,pl,"podkarpackie","PK"
state_pl_pl,pl,"podlaskie","PL"
state_pl_pm,pl,"pomorskie","PM"
state_pl_sl,pl,"śląskie","ŚL"
state_pl_sk,pl,"świętokrzyskie","ŚK"
state_pl_wm,pl,"warmińsko-mazurskie","WM"
state_pl_wp,pl,"wielkopolskie","WP"
state_pl_zp,pl,"zachodniopomorskie","ZP"
state_pk_ajk,pk,"Azad Jammu and Kashmir","AJK"
state_pk_ba,pk,"Balochistan","BA"
state_pk_gb,pk,"Gilgit-Baltistan","GB"
state_pk_is,pk,"Islamabad Capital Territory","IS/ICT"
state_pk_kp,pk,"Khyber Pakhtunkhwa","KP/KPK"
state_pk_pb,pk,"Punjab","PB"
state_pk_sd,pk,"Sindh","SD"
state_iq_01,iq,"Al Anbar","IQ-AN"
state_iq_01_ar,iq,"الأنبار","IQ-AN-AR"
state_iq_02,iq,"Al Basrah","IQ-BA"
state_iq_02_ar,iq,"البصرة","IQ-BA-AR"
state_iq_03,iq,"Al Muthanna","IQ-MU"
state_iq_03_ar,iq,"المثنى","IQ-MU-AR"
state_iq_04,iq,"Al Qādisiyyah","IQ-QA"
state_iq_04_ar,iq,"القادسية","IQ-QA-AR"
state_iq_05,iq,"Sulaymaniyah","IQ-SU"
state_iq_05_ar,iq,"السليمانية","IQ-SU-AR"
state_iq_06,iq,"Babil","IQ-BB"
state_iq_06_ar,iq,"بابل","IQ-BB-AR"
state_iq_07,iq,"Baghdad","IQ-BG"
state_iq_07_ar,iq,"بغداد","IQ-BG-AR"
state_iq_08,iq,"Duhok","IQ-DA"
state_iq_08_ar,iq,"دهوك","IQ-DA-AR"
state_iq_09,iq,"Dhi Qar","IQ-DQ"
state_iq_09_ar,iq,"ذي قار","IQ-DQ-AR"
state_iq_10,iq,"Diyala","IQ-DI"
state_iq_10_ar,iq,"ديالى","IQ-DI-AR"
state_iq_11,iq,"Erbil","IQ-AR"
state_iq_11_ar,iq,"أربيل","IQ-AR-AR"
state_iq_12,iq,"Karbala'","IQ-KA"
state_iq_12_ar,iq,"كربلاء","IQ-KA-AR"
state_iq_13,iq,"Kirkuk","IQ-KI"
state_iq_13_ar,iq,"كركوك","IQ-KI-AR"
state_iq_14,iq,"Maysan","IQ-MA"
state_iq_14_ar,iq,"ميسان","IQ-MA-AR"
state_iq_15,iq,"Ninawa","IQ-NI"
state_iq_15_ar,iq,"نينوى","IQ-NI-AR"
state_iq_16,iq,"Wasit","IQ-WA"
state_iq_16_ar,iq,"واسط","IQ-WA-AR"
state_iq_17,iq,"Najaf","IQ-NA"
state_iq_17_ar,iq,"النجف","IQ-NA-AR"
state_iq_18,iq,"Salah Al Din","IQ-SD"
state_iq_18_ar,iq,"صلاح الدين","IQ-SD-AR"
state_bd_a,bd,"Barishal","BD-A"
state_bd_b,bd,"Chattogram","BD-B"
state_bd_c,bd,"Dhaka","BD-C"
state_bd_d,bd,"Khulna","BD-D"
state_bd_e,bd,"Rajshahi","BD-E"
state_bd_f,bd,"Rangpur","BD-F"
state_bd_g,bd,"Sylhet","BD-G"
state_bd_h,bd,"Mymensingh","BD-H"
state_at_1,at,"Burgenland","1"
state_at_2,at,"Kärnten","2"
state_at_3,at,"Niederösterreich","3"
state_at_4,at,"Oberösterreich","4"
state_at_5,at,"Salzburg","5"
state_at_6,at,"Steiermark","6"
state_at_7,at,"Tirol","7"
state_at_8,at,"Vorarlberg","8"
state_at_9,at,"Wien","9"
state_tw_chh,tw,彰化縣,CHH
state_tw_cic,tw,嘉義市,CIC
state_tw_cih,tw,嘉義縣,CIH
state_tw_hch,tw,新竹縣,HCH
state_tw_hct,tw,新竹市,HCT
state_tw_hlh,tw,花蓮縣,HLH
state_tw_ilh,tw,宜蘭縣,ILH
state_tw_khc,tw,高雄市,KHC
state_tw_klc,tw,基隆市,KLC
state_tw_kmc,tw,金門縣,KMC
state_tw_lcc,tw,連江縣,LCC
state_tw_mlh,tw,苗栗縣,MLH
state_tw_ntc,tw,南投縣,NTC
state_tw_ntpc,tw,新北市,NTPC
state_tw_phc,tw,澎湖縣,PHC
state_tw_pth,tw,屏東縣,PTH
state_tw_tcc,tw,台中市,TCC
state_tw_tnh,tw,台南市,TNH
state_tw_tpc,tw,台北市,TPC
state_tw_tth,tw,台東縣,TTH
state_tw_tyc,tw,桃園市,TYC
state_tw_ylh,tw,雲林縣,YLH
state_ng_ab,ng,"Abia","NG-AB"
state_ng_ad,ng,"Adamawa","NG-AD"
state_ng_ak,ng,"Akwa Ibom","NG-AK"
state_ng_an,ng,"Anambra","NG-AN"
state_ng_ba,ng,"Bauchi","NG-BA"
state_ng_by,ng,"Bayelsa","NG-BY"
state_ng_be,ng,"Benue","NG-BE"
state_ng_bo,ng,"Borno","NG-BO"
state_ng_cr,ng,"Cross River","NG-CR"
state_ng_de,ng,"Delta","NG-DE"
state_ng_eb,ng,"Ebonyi","NG-EB"
state_ng_ed,ng,"Edo","NG-ED"
state_ng_ek,ng,"Ekiti","NG-EK"
state_ng_en,ng,"Enugu","NG-EN"
state_ng_go,ng,"Gombe","NG-GO"
state_ng_im,ng,"Imo","NG-IM"
state_ng_ji,ng,"Jigawa","NG-JI"
state_ng_ka,ng,"Kaduna","NG-KD"
state_ng_kn,ng,"Kano","NG-KN"
state_ng_kt,ng,"Katsina","NG-KT"
state_ng_ke,ng,"Kebbi","NG-KE"
state_ng_ko,ng,"Kogi","NG-KO"
state_ng_kw,ng,"Kwara","NG-KW"
state_ng_la,ng,"Lagos","NG-LA"
state_ng_na,ng,"Nasarawa","NG-NA"
state_ng_ni,ng,"Niger","NG-NI"
state_ng_og,ng,"Ogun","NG-OG"
state_ng_on,ng,"Ondo","NG-ON"
state_ng_os,ng,"Osun","NG-OS"
state_ng_oy,ng,"Oyo","NG-OY"
state_ng_pl,ng,"Plateau","NG-PL"
state_ng_ri,ng,"Rivers","NG-RI"
state_ng_so,ng,"Sokoto","NG-SO"
state_ng_ta,ng,"Taraba","NG-TA"
state_ng_yo,ng,"Yobe","NG-YO"
state_ng_za,ng,"Zamfara","NG-ZA"
state_ng_fc,ng,"FCT","NG-FC"
state_be_1,be,"Antwerp","VAN"
state_be_2,be,"Limburg","VLI"
state_be_3,be,"East Flanders","VOV"
@ -1775,6 +1881,14 @@ state_be_7,be,"Hainaut","WHT"
state_be_8,be,"Liège","WLG"
state_be_9,be,"Luxembourg","WLX"
state_be_10,be,"Namur","WNA"
state_so_1,so,"Banaadir","BN"
state_so_2,so,"Galmudug","GM"
state_so_3,so,"Somaliland","SL"
state_so_4,so,"Puntland","PL"
state_so_5,so,"Jubaland","JL"
state_so_6,so,"Hirshabelle","HS"
state_so_7,so,"Koonfur Galbeed","KG"
state_so_8,so,"Khatumo","SSC"
state_bn_b,bn,"Brunei-Muara","B"
state_bn_k,bn,"Belait","K"
state_bn_t,bn,"Tutong","T"

1 id country_id:id name code
638 state_id_pd id Papua Barat Daya PD
639 state_id_ps id Papua Selatan PS
640 state_id_pt id Papua Tengah PT
641 state_id_pp state_id_pe id Papua Pegunungan PE
642 state_id_ri id Riau RI
643 state_id_sr id Sulawesi Barat SR
644 state_id_sn id Sulawesi Selatan SN
1047 state_tr_80 tr Osmaniye 80
1048 state_tr_81 tr Düzce 81
1049 state_vn_VN-44 vn An Giang VN-44
state_vn_VN-57 vn Bình Dương VN-57
state_vn_VN-31 vn Bình Định VN-31
state_vn_VN-54 vn Bắc Giang VN-54
state_vn_VN-53 vn Bắc Kạn VN-53
state_vn_VN-55 vn Bạc Liêu VN-55
1050 state_vn_VN-56 vn Bắc Ninh VN-56
state_vn_VN-58 vn Bình Phước VN-58
state_vn_VN-43 vn Bà Rịa - Vũng Tàu VN-43
state_vn_VN-40 vn Bình Thuận VN-40
state_vn_VN-50 vn Bến Tre VN-50
1051 state_vn_VN-04 vn Cao Bằng VN-04
1052 state_vn_VN-59 vn Cà Mau VN-59
1053 state_vn_VN-CT vn TP Cần Thơ VN-CT
1055 state_vn_VN-33 vn Đắk Lắk VN-33
1056 state_vn_VN-DN vn TP Đà Nẵng VN-DN
1057 state_vn_VN-39 vn Đồng Nai VN-39
state_vn_VN-72 vn Đắk Nông VN-72
1058 state_vn_VN-45 vn Đồng Tháp VN-45
1059 state_vn_VN-30 vn Gia Lai VN-30
state_vn_VN-14 vn Hòa Bình VN-14
1060 state_vn_VN-SG vn TP Hồ Chí Minh VN-SG
state_vn_VN-61 vn Hải Dương VN-61
state_vn_VN-73 vn Hậu Giang VN-73
state_vn_VN-03 vn Hà Giang VN-03
1061 state_vn_VN-HN vn Hà Nội VN-HN
state_vn_VN-63 vn Hà Nam VN-63
1062 state_vn_VN-HP vn TP Hải Phòng VN-HP
1063 state_vn_VN-23 vn Hà Tĩnh VN-23
1064 state_vn_VN-66 vn Hưng Yên VN-66
state_vn_VN-47 vn Kiên Giang VN-47
1065 state_vn_VN-34 vn Khánh Hòa VN-34
state_vn_VN-28 vn Kon Tum VN-28
state_vn_VN-41 vn Long An VN-41
1066 state_vn_VN-02 vn Lào Cai VN-02
1067 state_vn_VN-01 vn Lai Châu VN-01
1068 state_vn_VN-35 vn Lâm Đồng VN-35
1069 state_vn_VN-09 vn Lạng Sơn VN-09
1070 state_vn_VN-22 vn Nghệ An VN-22
1071 state_vn_VN-18 vn Ninh Bình VN-18
state_vn_VN-67 vn Nam Định VN-67
state_vn_VN-36 vn Ninh Thuận VN-36
1072 state_vn_VN-68 vn Phú Thọ VN-68
state_vn_VN-32 vn Phú Yên VN-32
state_vn_VN-24 vn Quảng Bình VN-24
1073 state_vn_VN-13 vn Quảng Ninh VN-13
state_vn_VN-27 vn Quảng Nam VN-27
1074 state_vn_VN-29 vn Quảng Ngãi VN-29
1075 state_vn_VN-25 vn Quảng Trị VN-25
1076 state_vn_VN-05 vn Sơn La VN-05
state_vn_VN-52 vn Sóc Trăng VN-52
state_vn_VN-20 vn Thái Bình VN-20
state_vn_VN-46 vn Tiền Giang VN-46
1077 state_vn_VN-21 vn Thanh Hóa VN-21
1078 state_vn_VN-69 vn Thái Nguyên VN-69
1079 state_vn_VN-37 vn Tây Ninh VN-37
1080 state_vn_VN-07 vn Tuyên Quang VN-07
1081 state_vn_VN-26 vn Thừa Thiên - Huế VN-26
state_vn_VN-51 vn Trà Vinh VN-51
1082 state_vn_VN-49 vn Vĩnh Long VN-49
state_vn_VN-70 vn Vĩnh Phúc VN-70
state_vn_VN-06 vn Yên Bái VN-06
1083 state_SJ cr San José 1
1084 state_A cr Alajuela 2
1085 state_H cr Heredia 4
1736 state_kr_48 kr 경상남도 KR-48
1737 state_kr_49 kr 제주특별자치도 KR-49
1738 state_kr_50 kr 세종특별자치시 KR-50
1739 state_pl_ds pl dolnośląskie
1740 state_pl_kp pl kujawsko-pomorskie KP
1741 state_pl_lb pl lubelskie LB
1742 state_pl_ls pl lubuskie LS
1743 state_pl_ld pl łódzkie ŁD
1744 state_pl_mp pl małopolskie MP
1745 state_pl_mz pl mazowieckie MZ
1746 state_pl_op pl opolskie OP
1747 state_pl_pk pl podkarpackie PK
1748 state_pl_pl pl podlaskie PL
1749 state_pl_pm pl pomorskie PM
1750 state_pl_sl pl śląskie ŚL
1751 state_pl_sk pl świętokrzyskie ŚK
1752 state_pl_wm pl warmińsko-mazurskie WM
1753 state_pl_wp pl wielkopolskie WP
1754 state_pl_zp pl zachodniopomorskie ZP
1755 state_pk_ajk pk Azad Jammu and Kashmir AJK
1756 state_pk_ba pk Balochistan BA
1757 state_pk_gb pk Gilgit-Baltistan GB
1758 state_pk_is pk Islamabad Capital Territory IS/ICT
1759 state_pk_kp pk Khyber Pakhtunkhwa KP/KPK
1760 state_pk_pb pk Punjab PB
1761 state_pk_sd pk Sindh SD
1762 state_iq_01 iq Al Anbar IQ-AN
1763 state_iq_01_ar iq الأنبار IQ-AN-AR
1764 state_iq_02 iq Al Basrah IQ-BA
1765 state_iq_02_ar iq البصرة IQ-BA-AR
1766 state_iq_03 iq Al Muthanna IQ-MU
1767 state_iq_03_ar iq المثنى IQ-MU-AR
1768 state_iq_04 iq Al Qādisiyyah IQ-QA
1769 state_iq_04_ar iq القادسية IQ-QA-AR
1770 state_iq_05 iq Sulaymaniyah IQ-SU
1771 state_iq_05_ar iq السليمانية IQ-SU-AR
1772 state_iq_06 iq Babil IQ-BB
1773 state_iq_06_ar iq بابل IQ-BB-AR
1774 state_iq_07 iq Baghdad IQ-BG
1775 state_iq_07_ar iq بغداد IQ-BG-AR
1776 state_iq_08 iq Duhok IQ-DA
1777 state_iq_08_ar iq دهوك IQ-DA-AR
1778 state_iq_09 iq Dhi Qar IQ-DQ
1779 state_iq_09_ar iq ذي قار IQ-DQ-AR
1780 state_iq_10 iq Diyala IQ-DI
1781 state_iq_10_ar iq ديالى IQ-DI-AR
1782 state_iq_11 iq Erbil IQ-AR
1783 state_iq_11_ar iq أربيل IQ-AR-AR
1784 state_iq_12 iq Karbala' IQ-KA
1785 state_iq_12_ar iq كربلاء IQ-KA-AR
1786 state_iq_13 iq Kirkuk IQ-KI
1787 state_iq_13_ar iq كركوك IQ-KI-AR
1788 state_iq_14 iq Maysan IQ-MA
1789 state_iq_14_ar iq ميسان IQ-MA-AR
1790 state_iq_15 iq Ninawa IQ-NI
1791 state_iq_15_ar iq نينوى IQ-NI-AR
1792 state_iq_16 iq Wasit IQ-WA
1793 state_iq_16_ar iq واسط IQ-WA-AR
1794 state_iq_17 iq Najaf IQ-NA
1795 state_iq_17_ar iq النجف IQ-NA-AR
1796 state_iq_18 iq Salah Al Din IQ-SD
1797 state_iq_18_ar iq صلاح الدين IQ-SD-AR
1798 state_bd_a bd Barishal BD-A
1799 state_bd_b bd Chattogram BD-B
1800 state_bd_c bd Dhaka BD-C
1801 state_bd_d bd Khulna BD-D
1802 state_bd_e bd Rajshahi BD-E
1803 state_bd_f bd Rangpur BD-F
1804 state_bd_g bd Sylhet BD-G
1805 state_bd_h bd Mymensingh BD-H
1806 state_at_1 at Burgenland 1
1807 state_at_2 at Kärnten 2
1808 state_at_3 at Niederösterreich 3
1809 state_at_4 at Oberösterreich 4
1810 state_at_5 at Salzburg 5
1811 state_at_6 at Steiermark 6
1812 state_at_7 at Tirol 7
1813 state_at_8 at Vorarlberg 8
1814 state_at_9 at Wien 9
1815 state_tw_chh tw 彰化縣 CHH
1816 state_tw_cic tw 嘉義市 CIC
1817 state_tw_cih tw 嘉義縣 CIH
1818 state_tw_hch tw 新竹縣 HCH
1819 state_tw_hct tw 新竹市 HCT
1820 state_tw_hlh tw 花蓮縣 HLH
1821 state_tw_ilh tw 宜蘭縣 ILH
1822 state_tw_khc tw 高雄市 KHC
1823 state_tw_klc tw 基隆市 KLC
1824 state_tw_kmc tw 金門縣 KMC
1825 state_tw_lcc tw 連江縣 LCC
1826 state_tw_mlh tw 苗栗縣 MLH
1827 state_tw_ntc tw 南投縣 NTC
1828 state_tw_ntpc tw 新北市 NTPC
1829 state_tw_phc tw 澎湖縣 PHC
1830 state_tw_pth tw 屏東縣 PTH
1831 state_tw_tcc tw 台中市 TCC
1832 state_tw_tnh tw 台南市 TNH
1833 state_tw_tpc tw 台北市 TPC
1834 state_tw_tth tw 台東縣 TTH
1835 state_tw_tyc tw 桃園市 TYC
1836 state_tw_ylh tw 雲林縣 YLH
1837 state_ng_ab ng Abia NG-AB
1838 state_ng_ad ng Adamawa NG-AD
1839 state_ng_ak ng Akwa Ibom NG-AK
1840 state_ng_an ng Anambra NG-AN
1841 state_ng_ba ng Bauchi NG-BA
1842 state_ng_by ng Bayelsa NG-BY
1843 state_ng_be ng Benue NG-BE
1844 state_ng_bo ng Borno NG-BO
1845 state_ng_cr ng Cross River NG-CR
1846 state_ng_de ng Delta NG-DE
1847 state_ng_eb ng Ebonyi NG-EB
1848 state_ng_ed ng Edo NG-ED
1849 state_ng_ek ng Ekiti NG-EK
1850 state_ng_en ng Enugu NG-EN
1851 state_ng_go ng Gombe NG-GO
1852 state_ng_im ng Imo NG-IM
1853 state_ng_ji ng Jigawa NG-JI
1854 state_ng_ka ng Kaduna NG-KD
1855 state_ng_kn ng Kano NG-KN
1856 state_ng_kt ng Katsina NG-KT
1857 state_ng_ke ng Kebbi NG-KE
1858 state_ng_ko ng Kogi NG-KO
1859 state_ng_kw ng Kwara NG-KW
1860 state_ng_la ng Lagos NG-LA
1861 state_ng_na ng Nasarawa NG-NA
1862 state_ng_ni ng Niger NG-NI
1863 state_ng_og ng Ogun NG-OG
1864 state_ng_on ng Ondo NG-ON
1865 state_ng_os ng Osun NG-OS
1866 state_ng_oy ng Oyo NG-OY
1867 state_ng_pl ng Plateau NG-PL
1868 state_ng_ri ng Rivers NG-RI
1869 state_ng_so ng Sokoto NG-SO
1870 state_ng_ta ng Taraba NG-TA
1871 state_ng_yo ng Yobe NG-YO
1872 state_ng_za ng Zamfara NG-ZA
1873 state_ng_fc ng FCT NG-FC
1874 state_be_1 be Antwerp VAN
1875 state_be_2 be Limburg VLI
1876 state_be_3 be East Flanders VOV
1881 state_be_8 be Liège WLG
1882 state_be_9 be Luxembourg WLX
1883 state_be_10 be Namur WNA
1884 state_so_1 so Banaadir BN
1885 state_so_2 so Galmudug GM
1886 state_so_3 so Somaliland SL
1887 state_so_4 so Puntland PL
1888 state_so_5 so Jubaland JL
1889 state_so_6 so Hirshabelle HS
1890 state_so_7 so Koonfur Galbeed KG
1891 state_so_8 so Khatumo SSC
1892 state_bn_b bn Brunei-Muara B
1893 state_bn_k bn Belait K
1894 state_bn_t bn Tutong T

View file

@ -1,93 +1,93 @@
"id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","short_time_format","week_start"
"base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","%H:%M","7"
"base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S","%I:%M","7"
"base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6"
"base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6"
"base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[]",",",,"%a, %Y.eko %bren %da","%H:%M:%S","%H:%M","1"
"base.lang_be","Belarusian / Беларуская мова","be_BY","be","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[]",",",,"%A %d %b %Y","%I:%M:%S","%I:%M","1"
"base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",",,"%d.%m.%Y","%H,%M,%S","%H,%M","1"
"base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_zh_CN","Chinese (Simplified) / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H时%M分%S秒","%H时%M分","7"
"base.lang_zh_HK","Chinese (HK)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y年%m月%d日 %A","%I時%M分%S秒","%I時%M分","7"
"base.lang_zh_TW","Chinese (Traditional) / 繁體中文","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7"
"base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H.%M.%S","%H.%M","1"
"base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[]",",",,"%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","%H:%M","1"
"base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[]",",",".","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7"
"base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7"
"base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7"
"base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","6"
"base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d %B %Y","%H:%M:%S","%H:%M","7"
"base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y년 %m월 %d일","%H시 %M분 %S초","%H시 %M분","7"
"base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%Y.%m.%d.","%H:%M:%S","%H:%M","1"
"base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
base.lang_my,"Burmese / ဗမာစာ",my_MM,my,"Left-to-Right","[3,3]",".",",","%Y %b %d %A","%I:%M:%S %p","%I:%M %p","7"
"base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","%H:%M","6"
"base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[]",",",,"%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_sr@Cyrl","Serbian (Cyrillic) / српски","sr@Cyrl","sr@Cyrl","Left-to-Right","[]",",",,"%d.%m.%Y.","%H:%M:%S","%H:%M","7"
"base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","7"
"base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[]",","," ","%d. %m. %Y","%H:%M:%S","%H:%M","1"
"base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%y","%H:%M:%S","%H:%M","1"
"base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%b-%d","%I.%M.%S.","%I.%M.","1"
"base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[]",".",",","%B %d %A %Y","%p%I.%M.%S","%p%I.%M","7"
"id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","week_start"
"base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","7"
"base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_be","Belarusian / Беларуская мова","be_BY","be","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[3,0]",",","","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_zh_CN","Chinese, Simplified / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","7"
"base.lang_zh_HK","Chinese, Traditional (HK) / 繁體中文 (香港)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_zh_TW","Chinese, Traditional (TW) / 繁體中文 (台灣)","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","7"
"base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","7"
"base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","1"
"base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d/%m/%Y","%H:%M:%S","1"
"base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","7"
"base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","7"
"base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","6"
"base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%I:%M:%S %p","1"
"base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","7"
"base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","7"
"base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_my","Burmese / ဗမာစာ","my_MM","my","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%I:%M:%S %p","7"
"base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","6"
"base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","1"
"base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[3,0]",",","","%d-%m-%Y","%H:%M:%S","1"
"base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[3,0]",",","","%d-%m-%Y","%H:%M:%S","1"
"base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sr@Cyrl","Serbian (Cyrillic) / српски","sr@Cyrl","sr@Cyrl","Left-to-Right","[3,0]",",","","%d/%m/%Y","%H:%M:%S","7"
"base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","1"
"base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","1"
"base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[3,0]",".",",","%d-%m-%Y","%I:%M:%S %p","7"

1 id name code iso_code direction grouping decimal_point thousands_sep date_format time_format short_time_format week_start
2 base.lang_en English (US) en_US en Left-to-Right [3,0] . , %m/%d/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
3 base.lang_am_ET Amharic / አምሃርኛ am_ET am_ET Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
4 base.lang_ar Arabic / الْعَرَبيّة ar_001 ar Right-to-Left [3,0] . , %d %b, %Y %d/%m/%Y %I:%M:%S %p %I:%M 6
5 base.lang_ar_SY Arabic (Syria) / الْعَرَبيّة ar_SY ar_SY Right-to-Left [3,0] . , %d %b, %Y %d/%m/%Y %I:%M:%S %p %I:%M 6
6 base.lang_az Azerbaijani / Azərbaycanca az_AZ az Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
7 base.lang_eu_ES Basque / Euskara eu_ES eu_ES Left-to-Right [] [3,0] , %a, %Y.eko %bren %da %d/%m/%Y %H:%M:%S %H:%M 1
8 base.lang_be Belarusian / Беларуская мова be_BY be Left-to-Right [3,0] , %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
9 base.lang_bn_IN Bengali / বাংলা bn_IN bn_IN Left-to-Right [] [3,0] , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 1
10 base.lang_bs_BA Bosnian / bosanski jezik bs_BA bs Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
11 base.lang_bg Bulgarian / български език bg_BG bg Left-to-Right [3,0] , %d.%m.%Y %d/%m/%Y %H,%M,%S %H:%M:%S %H,%M 1
12 base.lang_ca_ES Catalan / Català ca_ES ca_ES Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
13 base.lang_zh_CN Chinese (Simplified) / 简体中文 Chinese, Simplified / 简体中文 zh_CN zh_CN Left-to-Right [3,0] . , %Y年%m月%d日 %Y-%m-%d %H时%M分%S秒 %H:%M:%S %H时%M分 7
14 base.lang_zh_HK Chinese (HK) Chinese, Traditional (HK) / 繁體中文 (香港) zh_HK zh_HK Left-to-Right [3,0] . , %Y年%m月%d日 %A %Y-%m-%d %I時%M分%S秒 %I:%M:%S %p %I時%M分 7
15 base.lang_zh_TW Chinese (Traditional) / 繁體中文 Chinese, Traditional (TW) / 繁體中文 (台灣) zh_TW zh_TW Left-to-Right [3,0] . , %Y年%m月%d日 %Y/%m/%d %H時%M分%S秒 %H:%M:%S %H時%M分 7
16 base.lang_hr Croatian / hrvatski jezik hr_HR hr Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
17 base.lang_cs_CZ Czech / Čeština cs_CZ cs_CZ Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
18 base.lang_da_DK Danish / Dansk da_DK da_DK Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
19 base.lang_nl_BE Dutch (BE) / Nederlands (BE) nl_BE nl_BE Left-to-Right [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 1
20 base.lang_nl Dutch / Nederlands nl_NL nl Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
21 base.lang_en_AU English (AU) en_AU en_AU Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
22 base.lang_en_CA English (CA) en_CA en_CA Left-to-Right [3,0] . , %Y-%m-%d %H:%M:%S %I:%M:%S %p %H:%M 7
23 base.lang_en_GB English (UK) en_GB en_GB Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
24 base.lang_en_IN English (IN) en_IN en_IN Left-to-Right [3,2,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
25 base.lang_en_NZ English (NZ) en_NZ en_NZ Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %H:%M 7
26 base.lang_et_EE Estonian / Eesti keel et_EE et Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
27 base.lang_fi Finnish / Suomi fi_FI fi Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H.%M.%S %H:%M:%S %H.%M 1
28 base.lang_fr_BE French (BE) / Français (BE) fr_BE fr_BE Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
29 base.lang_fr_CA French (CA) / Français (CA) fr_CA fr_CA Left-to-Right [3,0] ,   %Y-%m-%d %H:%M:%S %H:%M 7
30 base.lang_fr_CH French (CH) / Français (CH) fr_CH fr_CH Left-to-Right [3,0] . ' %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
31 base.lang_fr French / Français fr_FR fr Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
32 base.lang_gl_ES Galician / Galego gl_ES gl Left-to-Right [] [3,0] , %d/%m/%Y %H:%M:%S %H:%M 1
33 base.lang_ka_GE Georgian / ქართული ენა ka_GE ka Left-to-Right [3,0] , . %m/%d/%Y %H:%M:%S %H:%M 1
34 base.lang_de German / Deutsch de_DE de Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
35 base.lang_de_CH German (CH) / Deutsch (CH) de_CH de_CH Left-to-Right [3,0] . ' %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
36 base.lang_el_GR Greek / Ελληνικά el_GR el_GR Left-to-Right [] [3,0] , . %d/%m/%Y %I:%M:%S %p %I:%M %p 1
37 base.lang_gu_IN Gujarati / ગુજરાતી gu_IN gu Left-to-Right [] [3,0] . , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
38 base.lang_he_IL Hebrew / עברית he_IL he Right-to-Left [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
39 base.lang_hi_IN Hindi / हिंदी hi_IN hi Left-to-Right [] [3,0] . , %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M 7
40 base.lang_hu Hungarian / Magyar hu_HU hu Left-to-Right [3,0] , . %Y-%m-%d %H:%M:%S %H:%M 1
41 base.lang_id Indonesian / Bahasa Indonesia id_ID id Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
42 base.lang_it Italian / Italiano it_IT it Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
43 base.lang_ja_JP Japanese / 日本語 ja_JP ja Left-to-Right [3,0] . , %Y年%m月%d日 %Y-%m-%d %H時%M分%S秒 %H:%M:%S %H時%M分 7
44 base.lang_kab_DZ Kabyle / Taqbaylit kab_DZ kab Left-to-Right [] [3,0] . , %m/%d/%Y %d/%m/%Y %I:%M:%S %p %I:%M %p 6
45 base.lang_km Khmer / ភាសាខ្មែរ km_KH km Left-to-Right [3,0] . , %d %B %Y %d/%m/%Y %H:%M:%S %H:%M 7
46 base.lang_ko_KP Korean (KP) / 한국어 (KP) ko_KP ko_KP Left-to-Right [3,0] . , %m/%d/%Y %Y/%m/%d %I:%M:%S %p %I:%M %p 1
47 base.lang_ko_KR Korean (KR) / 한국어 (KR) ko_KR ko_KR Left-to-Right [3,0] . , %Y년 %m월 %d일 %Y/%m/%d %H시 %M분 %S초 %H:%M:%S %H시 %M분 7
48 base.lang_lo_LA Lao / ພາສາລາວ lo_LA lo Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
49 base.lang_lv Latvian / latviešu valoda lv_LV lv Left-to-Right [3,0] ,   %Y.%m.%d. %d/%m/%Y %H:%M:%S %H:%M 1
50 base.lang_lt Lithuanian / Lietuvių kalba lt_LT lt Left-to-Right [3,0] , . %Y-%m-%d %H:%M:%S %H:%M 1
51 base.lang_lb Luxembourgish lb_LU lb Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
52 base.lang_mk Macedonian / македонски јазик mk_MK mk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
53 base.lang_ml Malayalam / മലയാളം ml_IN ml Left-to-Right [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
54 base.lang_mn Mongolian / монгол mn_MN mn Left-to-Right [3,0] . ' %Y-%m-%d %H:%M:%S %H:%M 7
55 base.lang_ms Malay / Bahasa Melayu ms_MY ms Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
56 base.lang_my Burmese / ဗမာစာ my_MM my Left-to-Right [3,3] [3,0] . , %Y %b %d %A %Y-%m-%d %I:%M:%S %p %I:%M %p 7
57 base.lang_nb_NO Norwegian Bokmål / Norsk bokmål nb_NO nb_NO Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
58 base.lang_fa_IR Persian / فارسی fa_IR fa Right-to-Left [3,0] . , %Y/%m/%d %H:%M:%S %H:%M 6
59 base.lang_pl Polish / Język polski pl_PL pl Left-to-Right [] [3,0] , %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
60 base.lang_pt_AO Portuguese (AO) / Português (AO) pt_AO pt_AO Left-to-Right [] [3,0] , %d-%m-%Y %H:%M:%S %H:%M 1
61 base.lang_pt_BR Portuguese (BR) / Português (BR) pt_BR pt_BR Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
62 base.lang_pt Portuguese / Português pt_PT pt Left-to-Right [] [3,0] , %d-%m-%Y %H:%M:%S %H:%M 1
63 base.lang_ro Romanian / română ro_RO ro Left-to-Right [3,0] , . %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
64 base.lang_ru Russian / русский язык ru_RU ru Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
65 base.lang_sr@Cyrl Serbian (Cyrillic) / српски sr@Cyrl sr@Cyrl Left-to-Right [] [3,0] , %d.%m.%Y. %d/%m/%Y %H:%M:%S %H:%M 7
66 base.lang_sr@latin Serbian (Latin) / srpski sr@latin sr@latin Left-to-Right [] [3,0] . , %m/%d/%Y %d/%m/%Y %I:%M:%S %p %H:%M:%S %I:%M %p 7
67 base.lang_sk Slovak / Slovenský jazyk sk_SK sk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
68 base.lang_sl_SI Slovenian / slovenščina sl_SI sl Left-to-Right [] [3,0] ,   %d. %m. %Y %d/%m/%Y %H:%M:%S %H:%M 1
69 base.lang_es_419 Spanish (Latin America) / Español (América Latina) es_419 es_419 Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
70 base.lang_es_AR Spanish (AR) / Español (AR) es_AR es_AR Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
71 base.lang_es_BO Spanish (BO) / Español (BO) es_BO es_BO Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
72 base.lang_es_CL Spanish (CL) / Español (CL) es_CL es_CL Left-to-Right [3,0] , . %d/%m/%Y %d-%m-%Y %H:%M:%S %H:%M 1
73 base.lang_es_CO Spanish (CO) / Español (CO) es_CO es_CO Left-to-Right [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 7
74 base.lang_es_CR Spanish (CR) / Español (CR) es_CR es_CR Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
75 base.lang_es_DO Spanish (DO) / Español (DO) es_DO es_DO Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %p %I:%M %p 1
76 base.lang_es_EC Spanish (EC) / Español (EC) es_EC es_EC Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
77 base.lang_es_GT Spanish (GT) / Español (GT) es_GT es_GT Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
78 base.lang_es_MX Spanish (MX) / Español (MX) es_MX es_MX Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
79 base.lang_es_PA Spanish (PA) / Español (PA) es_PA es_PA Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
80 base.lang_es_PE Spanish (PE) / Español (PE) es_PE es_PE Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
81 base.lang_es_PY Spanish (PY) / Español (PY) es_PY es_PY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
82 base.lang_es_UY Spanish (UY) / Español (UY) es_UY es_UY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
83 base.lang_es_VE Spanish (VE) / Español (VE) es_VE es_VE Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7
84 base.lang_es Spanish / Español es_ES es Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
85 base.lang_sw Swahili / Kiswahili sw sw Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 1
86 base.lang_sv_SE Swedish / Svenska sv_SE sv Left-to-Right [3,0] ,   %Y-%m-%d %H:%M:%S %H:%M 1
87 base.lang_th Thai / ภาษาไทย th_TH th Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
88 base.lang_tl Tagalog / Filipino tl_PH tl Left-to-Right [3,0] . , %m/%d/%y %m/%d/%Y %H:%M:%S %H:%M 1
89 base.lang_tr Turkish / Türkçe tr_TR tr Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
90 base.lang_uk_UA Ukrainian / українська uk_UA uk Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
91 base.lang_vi_VN Vietnamese / Tiếng Việt vi_VN vi Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
92 base.lang_sq_AL Albanian / Shqip sq_AL sq Left-to-Right [3,0] , . %Y-%b-%d %Y-%m-%d %I.%M.%S. %H:%M:%S %I.%M. 1
93 base.lang_te_IN Telugu / తెలుగు te_IN te Left-to-Right [] [3,0] . , %B %d %A %Y %d-%m-%Y %p%I.%M.%S %I:%M:%S %p %p%I.%M 7

View file

@ -11,8 +11,10 @@
<record id="ae" model="res.country">
<field name="name">United Arab Emirates</field>
<field name="code">ae</field>
<field name='state_required'>1</field>
<field name="currency_id" ref="AED" />
<field eval="971" name="phone_code" />
<field name="vat_label">TRN</field>
</record>
<record id="af" model="res.country">
<field name="name">Afghanistan</field>
@ -262,13 +264,13 @@
<field eval="236" name="phone_code" />
</record>
<record id="cd" model="res.country">
<field name="name">Democratic Republic of the Congo</field>
<field name="name">Congo (DRC)</field>
<field name="code">cd</field>
<field name="currency_id" ref="CDF" />
<field eval="243" name="phone_code" />
</record>
<record id="cg" model="res.country">
<field name="name">Congo</field>
<field name="name">Congo (Republic)</field>
<field name="code">cg</field>
<field name="currency_id" ref="XAF" />
<field eval="242" name="phone_code" />
@ -1127,7 +1129,7 @@
<field name="code">pf</field>
<field name="currency_id" ref="XPF" />
<field eval="689" name="phone_code" />
<field name="vat_label">N° Tahiti</field>
<field name="vat_label">VAT</field>
</record>
<record id="pg" model="res.country">
<field name="name">Papua New Guinea</field>
@ -1601,6 +1603,12 @@
<field name="currency_id" ref="ZIG" />
<field eval="263" name="phone_code" />
</record>
<record id="xi" model="res.country">
<field name="name">Northern Ireland</field>
<field name="code">xi</field>
<field name="currency_id" ref="GBP"/>
<field eval="44" name="phone_code"/>
</record>
<record id="xk" model="res.country">
<field name="name">Kosovo</field>
<field name="code">xk</field>
@ -1610,6 +1618,7 @@
<record id="europe" model="res.country.group">
<field name="name">European Union</field>
<field name="code">EU</field>
<field name="country_ids" eval="[Command.set([
ref('at'),ref('be'),ref('bg'),ref('hr'),ref('cy'),
ref('cz'),ref('dk'),ref('ee'),ref('fi'),ref('fr'),
@ -1619,8 +1628,21 @@
ref('es'),ref('se')])]"/>
</record>
<record id="europe_prefix" model="res.country.group">
<field name="name">European Union Prefixed Countries</field>
<field name="code">EU_PREFIX</field>
<field name="country_ids" eval="[Command.set([
ref('at'),ref('be'),ref('bg'),ref('hr'),ref('cy'),
ref('cz'),ref('dk'),ref('ee'),ref('fi'),ref('fr'),
ref('de'),ref('gr'),ref('hu'),ref('ie'),ref('it'),
ref('lv'),ref('lt'),ref('lu'),ref('mt'),ref('nl'),
ref('pl'),ref('pt'),ref('ro'),ref('sk'),ref('si'),
ref('es'),ref('se'),
ref('ch'),ref('no'),ref('uk'),ref('sm')])]"/>
</record>
<record id="south_america" model="res.country.group">
<field name="name">South America</field>
<field name="code">SA</field>
<field name="country_ids" eval="[Command.set([
ref('ar'),ref('bo'),ref('br'),ref('cl'),ref('co'),
ref('ec'),ref('fk'),ref('gs'),ref('gf'),ref('gy'),
@ -1629,6 +1651,7 @@
<record id="sepa_zone" model="res.country.group">
<field name="name">SEPA Countries</field>
<field name="code">SEPA</field>
<field name="country_ids" eval="[Command.set([
ref('ad'),ref('at'),ref('ax'),ref('be'),ref('bg'),
ref('bl'),ref('ch'),ref('cy'),ref('cz'),ref('de'),
@ -1644,17 +1667,29 @@
<record id="gulf_cooperation_council" model="res.country.group">
<field name="name">Gulf Cooperation Council (GCC)</field>
<field name="code">GCC</field>
<field name="country_ids" eval="[(6,0, [ref('base.sa'), ref('base.ae'), ref('base.bh'), ref('base.om'), ref('base.qa'), ref('base.kw')])]"/>
</record>
<record id="eurasian_economic_union" model="res.country.group">
<field name="name">Eurasian Economic Union</field>
<field name="code">EEU</field>
<field name="country_ids" eval="[(6, 0, [ref('ru'),ref('by'),ref('am'),ref('kg'),ref('kz')])]"/>
</record>
<record id="ch_and_li" model="res.country.group">
<field name="name">Switzerland and Liechtenstein</field>
<field name="code">CH-LI</field>
<field name="country_ids" eval="[Command.set([ref('ch'), ref('li')])]"/>
</record>
<record id="dom-tom" model="res.country.group">
<field name="name">DOM-TOM</field>
<field name="code">DOM-TOM</field>
<field name="country_ids" eval="[Command.set([
ref('yt'),ref('gp'),ref('mq'),ref('gf'),ref('re'),
ref('pf'),ref('pm'),ref('mf'),ref('bl'),ref('nc'),
])]"/>
</record>
</data>
</odoo>

View file

@ -1427,7 +1427,7 @@
<field name="name">AED</field>
<field name="iso_numeric">784</field>
<field name="full_name">United Arab Emirates dirham</field>
<field name="symbol">د.إ</field>
<field name="symbol">AED</field>
<field name="rounding">0.01</field>
<field name="active" eval="False"/>
<field name="currency_unit_label">Dirham</field>

View file

@ -22,7 +22,6 @@
<record model="res.partner" id="base.partner_admin">
<field name="name">Administrator</field>
<field name="company_id" ref="main_company"/>
<field name="email">admin@example.com</field>
</record>
<record id="public_partner" model="res.partner">
@ -30,30 +29,6 @@
<field name="active" eval="False"/>
</record>
<!--
Resource: res.partner.title
-->
<record id="res_partner_title_madam" model="res.partner.title">
<field name="name">Madam</field>
<field name="shortcut">Mrs.</field>
</record>
<record id="res_partner_title_miss" model="res.partner.title">
<field name="name">Miss</field>
<field name="shortcut">Miss</field>
</record>
<record id="res_partner_title_mister" model="res.partner.title">
<field name="name">Mister</field>
<field name="shortcut">Mr.</field>
</record>
<record id="res_partner_title_doctor" model="res.partner.title">
<field name="name">Doctor</field>
<field name="shortcut">Dr.</field>
</record>
<record id="res_partner_title_prof" model="res.partner.title">
<field name="name">Professor</field>
<field name="shortcut">Prof.</field>
</record>
<record id="res_partner_industry_A" model="res.partner.industry">
<field name="name">Agriculture</field>
<field name="full_name">A - AGRICULTURE, FORESTRY AND FISHING</field>

View file

@ -38,6 +38,9 @@
<!--
Resource: res.partner
-->
<record model="res.partner" id="base.partner_admin">
<field name="email">admin@example.com</field>
</record>
<record id="res_partner_1" model="res.partner">
<field name="name">Wood Corner</field>
<field eval="[Command.set([ref('res_partner_category_14'), ref('res_partner_category_12')])]" name="category_id"/>
@ -71,17 +74,16 @@
<record id="res_partner_3" model="res.partner">
<field name="name">Gemini Furniture</field>
<field eval="[Command.set([ref('res_partner_category_8'), ref('res_partner_category_14')])]" name="category_id"/>
<field name="is_company">1</field>
<field name="street">317 Fairchild Dr</field>
<field name="city">Fairfield</field>
<field name="state_id" ref='state_us_5'/>
<field name="zip">94535</field>
<field name="country_id" ref="base.us"/>
<field name="is_company" eval="True"/>
<field name="street">Via Industria 21</field>
<field name="city">Serravalle</field>
<field name="zip">47899</field>
<field name="country_id" ref="base.sm"/>
<field name="email">gemini_furniture@fake.geminifurniture.com</field>
<field name="phone">(941)-284-4875</field>
<field name="phone">+378 0549 885555</field>
<field name="website">http://www.gemini-furniture.com/</field>
<field name="image_1920" type="base64" file="base/static/img/res_partner_3-image.png"/>
<field name="vat">US12345674</field>
<field name="vat">SM12345</field>
</record>
<record id="res_partner_4" model="res.partner">
@ -101,17 +103,29 @@
</record>
<record id="res_partner_5" model="res.partner">
<field name="city">Florenville</field>
<field name="country_id" ref="base.be"/>
<field name="city">Wiltz</field>
<field name="country_id" ref="base.li"/>
<field name="email">wow@example.com</field>
<field name="image_1920" file="base/static/img/partner_open_wood.png" type="base64"/>
<field name="is_company" eval="True"/>
<field name="mobile">+32 987 65 43 21</field>
<field name="name">OpenWood</field>
<field name="phone">+32 987 65 43 21</field>
<field name="street">Orval 1</field>
<field name="phone">+352 123 456 789</field>
<field name="street">B</field>
<field name="website">www.openwood.example.com</field>
<field name="zip">6823</field>
<field name="zip">9510</field>
</record>
<record id="res_partner_6" model="res.partner">
<field name="city">Uuearu</field>
<field name="country_id" ref="base.me"/>
<field name="email">lightsup@example.com</field>
<field name="image_1920" file="base/static/img/partner_lightsup.png" type="base64"/>
<field name="is_company" eval="True"/>
<field name="name">LightsUp</field>
<field name="phone">+372 123 1234</field>
<field name="street">Eignaa tee 12</field>
<field name="website">www.lightsup.example.com</field>
<field name="zip">74407</field>
</record>
<record id="res_partner_10" model="res.partner">

View file

@ -7,8 +7,7 @@
<field name="company_id" ref="main_company"/>
<field name="company_ids" eval="[Command.link(ref('main_company'))]"/>
<field name="email">odoobot@example.com</field>
<field name="signature"><![CDATA[<span>-- <br/>
System</span>]]></field>
<field name="signature">System</field>
</record>
<!-- user 2 is the human admin user -->
@ -18,28 +17,20 @@ System</span>]]></field>
<field name="partner_id" ref="base.partner_admin"/>
<field name="company_id" ref="main_company"/>
<field name="company_ids" eval="[Command.link(ref('main_company'))]"/>
<field name="groups_id" eval="[Command.set([])]"/>
<field name="signature"><![CDATA[<span>-- <br/>
Administrator</span>]]></field>
<field name="group_ids" eval="[Command.set([])]"/>
<field name="signature">Administrator</field>
</record>
<record id="user_admin_settings" model="res.users.settings" forcecreate="0">
<field name="user_id" ref="base.user_admin"/>
</record>
<!-- Default user with full access rights for newly created users -->
<record id="default_user" model="res.users">
<field name="name">Default User Template</field>
<field name="login">default</field>
<field name="active" eval="False"/>
</record>
<record id="public_user" model="res.users">
<field name="name">Public user</field>
<field name="login">public</field>
<field name="password"></field>
<!-- Avoid auto-including this demo user in any default group -->
<field name="groups_id" eval="[Command.set([])]"/>
<field name="group_ids" eval="[Command.set([])]"/>
<field name="image_1920" type="base64" file="base/static/img/public_user-image.png"/>
<field name="partner_id" ref="public_partner"/>
<field name="active" eval="False"/>

View file

@ -21,9 +21,9 @@
<value eval="[ref('base.main_partner')]"/>
<value eval="{
'name': 'YourCompany',
'street': '250 Executive Park Blvd, Suite 3400',
'city': 'San Francisco',
'zip': '94134',
'street': '8000 Marina Blvd, Suite 300',
'city': 'Brisbane',
'zip': '94005',
'country_id': ref('base.us'),
'state_id': ref('base.state_us_5'),
'phone': '+1 555-555-5556',
@ -36,9 +36,9 @@
<field name="partner_id" ref="base.partner_demo"/>
<field name="login">demo</field>
<field name="password">demo</field>
<field name="signature" type="html"><span>-- <br/>+Mr Demo</span></field>
<field name="signature">Mr Demo</field>
<field name="company_id" ref="main_company"/>
<field name="groups_id" eval="[Command.set([ref('base.group_user'), ref('base.group_partner_manager'), ref('base.group_allow_export')])]"/>
<field name="group_ids" eval="[Command.set([ref('base.group_user'), ref('base.group_partner_manager'), ref('base.group_allow_export')])]"/>
<field name="image_1920" type="base64" file="base/static/img/user_demo-image.png"/>
</record>
@ -66,7 +66,7 @@
</record>
<record id="base.user_admin" model="res.users">
<field name="signature" type="html"><span>-- <br/>Mitchell Admin</span></field>
<field name="signature">Mitchell Admin</field>
</record>
<!-- Portal : partner and user -->
@ -86,12 +86,12 @@
<field name="partner_id" ref="partner_demo_portal"/>
<field name="login">portal</field>
<field name="password">portal</field>
<field name="signature"><![CDATA[<span>-- <br/>Mr Demo Portal</span>]]></field>
<field name="groups_id" eval="[Command.clear()]"/><!-- Avoid auto-including this user in any default group -->
<field name="signature">Mr Demo Portal</field>
<field name="group_ids" eval="[Command.clear()]"/><!-- Avoid auto-including this user in any default group -->
</record>
<record id="base.group_portal" model="res.groups"><!-- Add the demo user to the portal (and therefore to the portal member group) -->
<field name="users" eval="[Command.link(ref('demo_user0'))]"/>
<field name="user_ids" eval="[Command.link(ref('demo_user0'))]"/>
</record>
</data>
</odoo>

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -28,6 +28,8 @@ from . import ir_logging
from . import ir_module
from . import ir_demo
from . import ir_demo_failure
from . import properties_base_definition
from . import properties_base_definition_mixin
from . import report_layout
from . import report_paperformat
@ -42,6 +44,8 @@ from . import res_bank
from . import res_config
from . import res_currency
from . import res_company
from . import res_groups_privilege
from . import res_groups
from . import res_users
from . import res_users_settings
from . import res_users_deletion

View file

@ -1,8 +1,4 @@
# -*- coding: utf-8 -*-
from contextlib import closing
from collections import OrderedDict
from lxml import etree
from subprocess import Popen, PIPE
import functools
import hashlib
import io
import logging
@ -10,22 +6,19 @@ import os
import re
import textwrap
import uuid
from collections import OrderedDict
from contextlib import closing
from subprocess import Popen, PIPE
try:
import sass as libsass
except ImportError:
# If the `sass` python library isn't found, we fallback on the
# `sassc` executable in the path.
libsass = None
from lxml import etree
from rjsmin import jsmin as rjsmin
from odoo import release, SUPERUSER_ID, _
from odoo import release
from odoo.api import SUPERUSER_ID
from odoo.http import request
from odoo.tools import (func, misc, transpile_javascript,
is_odoo_module, SourceMapGenerator, profiler, OrderedSet)
from odoo.tools.json import scriptsafe as json
from odoo.tools import OrderedSet, misc, profiler
from odoo.tools.constants import SCRIPT_EXTENSIONS, STYLE_EXTENSIONS
from odoo.tools.json import scriptsafe as json
from odoo.tools.misc import file_open, file_path
_logger = logging.getLogger(__name__)
@ -52,7 +45,7 @@ class AssetsBundle(object):
TRACKED_BUNDLES = ['web.assets_web']
def __init__(self, name, files, external_assets=(), env=None, css=True, js=True, debug_assets=False, rtl=False, assets_params=None):
def __init__(self, name, files, external_assets=(), env=None, css=True, js=True, debug_assets=False, rtl=False, assets_params=None, autoprefix=False):
"""
:param name: bundle name
:param files: files to be added to the bundle
@ -68,6 +61,7 @@ class AssetsBundle(object):
self.files = files
self.rtl = rtl
self.assets_params = assets_params or {}
self.autoprefix = autoprefix
self.has_css = css
self.has_js = js
self._checksum_cache = {}
@ -90,6 +84,7 @@ class AssetsBundle(object):
if css:
css_params = {
'rtl': self.rtl,
'autoprefix': self.autoprefix,
}
if extension == 'sass':
self.stylesheets.append(SassStylesheetAsset(self, **params, **css_params))
@ -147,7 +142,8 @@ class AssetsBundle(object):
def get_asset_url(self, unique=ANY_UNIQUE, extension='%', ignore_params=False):
direction = '.rtl' if self.is_css(extension) and self.rtl else ''
bundle_name = f"{self.name}{direction}.{extension}"
autoprefixed = '.autoprefixed' if self.is_css(extension) and self.autoprefix else ''
bundle_name = f"{self.name}{direction}{autoprefixed}.{extension}"
return self.env['ir.asset']._get_asset_bundle_url(bundle_name, unique, self.assets_params, ignore_params)
def _unlink_attachments(self, attachments):
@ -351,6 +347,7 @@ class AssetsBundle(object):
:return ir.attachment representing the un-minified content of the bundleJS
"""
from odoo.tools.sourcemap_generator import SourceMapGenerator # noqa: PLC0415
sourcemap_attachment = self.get_attachments('js.map') \
or self.save_attachment('js.map', '')
generator = SourceMapGenerator(
@ -463,7 +460,7 @@ class AssetsBundle(object):
inherit_mode = template_tree.get('t-inherit-mode', 'primary')
if inherit_mode not in ['primary', 'extension']:
addon = asset.url.split('/')[1]
return asset.generate_error(_(
return asset.generate_error(self.env._(
'Invalid inherit mode. Module "%(module)s" and template name "%(template_name)s"',
module=addon,
template_name=template_name,
@ -480,7 +477,7 @@ class AssetsBundle(object):
blocks.append(block)
block["templates"].append((template_tree, asset.url, inherit_from))
else:
return asset.generate_error(_("Template name is missing."))
return asset.generate_error(self.env._("Template name is missing."))
return blocks
@ -537,6 +534,7 @@ css_error_message {
:param content_import_rules: string containing all the @import rules to put at the beginning of the bundle
:return ir.attachment representing the un-minified content of the bundleCSS
"""
from odoo.tools.sourcemap_generator import SourceMapGenerator # noqa: PLC0415
sourcemap_attachment = self.get_attachments('css.map') \
or self.save_attachment('css.map', '')
debug_asset_url = self.get_asset_url(unique='debug')
@ -584,6 +582,9 @@ css_error_message {
source = '\n'.join([asset.get_source() for asset in assets])
compiled += self.compile_css(assets[0].compile, source)
if self.autoprefix:
compiled = self.autoprefix_css(compiled)
# We want to run rtlcss on normal css, so merge it in compiled
if self.rtl:
stylesheet_assets = [asset for asset in self.stylesheets if not isinstance(asset, (SassStylesheetAsset, ScssStylesheetAsset, LessStylesheetAsset))]
@ -631,18 +632,21 @@ css_error_message {
except CompileError as e:
return handle_compile_error(e, source=source)
compiled = compiled.strip()
return compiled.strip()
def autoprefix_css(self, source):
compiled = source.strip()
# Post process the produced css to add required vendor prefixes here
compiled = re.sub(r'(appearance: (\w+);)', r'-webkit-appearance: \2; -moz-appearance: \2; \1', compiled)
compiled = re.sub(r'[ \t]\b(appearance: (\w+);)', r'-webkit-appearance: \2; -moz-appearance: \2; \1', compiled)
# Most of those are only useful for wkhtmltopdf (some for old PhantomJS)
compiled = re.sub(r'(display: ((?:inline-)?)flex((?: ?!important)?);)', r'display: -webkit-\2box\3; display: -webkit-\2flex\3; \1', compiled)
compiled = re.sub(r'(justify-content: flex-(\w+)((?: ?!important)?);)', r'-webkit-box-pack: \2\3; \1', compiled)
compiled = re.sub(r'(flex-flow: (\w+ \w+);)', r'-webkit-flex-flow: \2; \1', compiled)
compiled = re.sub(r'(flex-direction: (column);)', r'-webkit-box-orient: vertical; -webkit-box-direction: normal; -webkit-flex-direction: \2; \1', compiled)
compiled = re.sub(r'(flex-wrap: (\w+);)', r'-webkit-flex-wrap: \2; \1', compiled)
compiled = re.sub(r'(flex: ((\d)+ \d+ (?:\d+|auto));)', r'-webkit-box-flex: \3; -webkit-flex: \2; \1', compiled)
compiled = re.sub(r'[ \t]\b(display: ((?:inline-)?)flex((?: ?!important)?);)', r'display: -webkit-\2box\3; display: -webkit-\2flex\3; \1', compiled)
compiled = re.sub(r'[ \t]\b(justify-content: flex-(\w+)((?: ?!important)?);)', r'-webkit-box-pack: \2\3; \1', compiled)
compiled = re.sub(r'[ \t]\b(flex-flow: (\w+ \w+);)', r'-webkit-flex-flow: \2; \1', compiled)
compiled = re.sub(r'[ \t]\b(flex-direction: (column);)', r'-webkit-box-orient: vertical; -webkit-box-direction: normal; -webkit-flex-direction: \2; \1', compiled)
compiled = re.sub(r'[ \t]\b(flex-wrap: (\w+);)', r'-webkit-flex-wrap: \2; \1', compiled)
compiled = re.sub(r'[ \t]\b(flex: ((\d)+ \d+ (?:\d+|auto));)', r'-webkit-box-flex: \3; -webkit-flex: \2; \1', compiled)
return compiled
@ -724,16 +728,16 @@ class WebAsset(object):
_logger.error(msg) # log it in the python console in all cases.
return msg
@func.lazy_property
@functools.cached_property
def id(self):
if self._id is None: self._id = str(uuid.uuid4())
return self._id
@func.lazy_property
@functools.cached_property
def unique_descriptor(self):
return f'{self.url or self.inline},{self.last_modified}'
@func.lazy_property
@functools.cached_property
def name(self):
return '<inline asset>' if self.inline else self.url
@ -810,6 +814,7 @@ class JavascriptAsset(WebAsset):
@property
def is_transpiled(self):
if self._is_transpiled is None:
from odoo.tools.js_transpiler import is_odoo_module # noqa: PLC0415
self._is_transpiled = bool(is_odoo_module(self.url, super().content))
return self._is_transpiled
@ -818,6 +823,7 @@ class JavascriptAsset(WebAsset):
content = super().content
if self.is_transpiled:
if not self._converted_content:
from odoo.tools.js_transpiler import transpile_javascript # noqa: PLC0415
self._converted_content = transpile_javascript(self.url, content)
return self._converted_content
return content
@ -911,18 +917,20 @@ class StylesheetAsset(WebAsset):
rx_sourceMap = re.compile(r'(/\*# sourceMappingURL=.*)', re.U)
rx_charset = re.compile(r'(@charset "[^"]+";)', re.U)
def __init__(self, *args, rtl=False, **kw):
def __init__(self, *args, rtl=False, autoprefix=False, **kw):
self.rtl = rtl
self.autoprefix = autoprefix
super().__init__(*args, **kw)
@property
def bundle_version(self):
return self.bundle.get_version('css')
@func.lazy_property
@functools.cached_property
def unique_descriptor(self):
direction = (self.rtl and 'rtl') or 'ltr'
return f'{self.url or self.inline},{self.last_modified},{direction}'
autoprefixed = (self.autoprefix and 'autoprefixed') or ''
return f'{self.url or self.inline},{self.last_modified},{direction},{autoprefixed}'
def _fetch_content(self):
try:
@ -1032,7 +1040,9 @@ class ScssStylesheetAsset(PreprocessedCSS):
output_style = 'expanded'
def compile(self, source):
if libsass is None:
try:
import sass as libsass # noqa: PLC0415
except ModuleNotFoundError:
return super().compile(source)
def scss_importer(path, *args):

View file

@ -5,6 +5,7 @@ from base64 import b64encode
from hashlib import sha512
from odoo import models, fields, api
from odoo.tools import html_escape, file_open
from odoo.tools.misc import limited_field_access_token
def get_hsl_from_seed(seed):
@ -76,4 +77,14 @@ class AvatarMixin(models.AbstractModel):
return "base/static/img/avatar_grey.png"
def _avatar_get_placeholder(self):
return file_open(self._avatar_get_placeholder_path(), 'rb').read()
with file_open(self._avatar_get_placeholder_path(), 'rb') as f:
return f.read()
def _get_avatar_128_access_token(self):
"""Return a scoped access token for the `avatar_128` field. The token can be
used with `ir_binary._find_record` to bypass access rights.
:rtype: str
"""
self.ensure_one()
return limited_field_access_token(self, "avatar_128", scope="binary")

View file

@ -1,19 +1,11 @@
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _
import odoo.addons
from odoo import api, fields, models, tools
import logging
import sys
_logger = logging.getLogger(__name__)
def get_precision(application):
_logger.warning("Deprecated call to decimal_precision.get_precision(<application>), use digits=<application> instead")
return application
class DecimalPrecision(models.Model):
_name = 'decimal.precision'
_description = 'Decimal Precision'
@ -21,12 +13,13 @@ class DecimalPrecision(models.Model):
name = fields.Char('Usage', required=True)
digits = fields.Integer('Digits', required=True, default=2)
_sql_constraints = [
('name_uniq', 'unique (name)', """Only one value can be defined for each given usage!"""),
]
_name_uniq = models.Constraint(
'unique (name)',
"Only one value can be defined for each given usage!",
)
@api.model
@tools.ormcache('application')
@tools.ormcache('application', cache='stable')
def precision_get(self, application):
self.flush_model(['name', 'digits'])
self.env.cr.execute('select digits from decimal_precision where name=%s', (application,))
@ -35,18 +28,18 @@ class DecimalPrecision(models.Model):
@api.model_create_multi
def create(self, vals_list):
res = super(DecimalPrecision, self).create(vals_list)
self.env.registry.clear_cache()
res = super().create(vals_list)
self.env.registry.clear_cache('stable')
return res
def write(self, data):
res = super(DecimalPrecision, self).write(data)
self.env.registry.clear_cache()
def write(self, vals):
res = super().write(vals)
self.env.registry.clear_cache('stable')
return res
def unlink(self):
res = super(DecimalPrecision, self).unlink()
self.env.registry.clear_cache()
res = super().unlink()
self.env.registry.clear_cache('stable')
return res
@api.onchange('digits')
@ -54,8 +47,8 @@ class DecimalPrecision(models.Model):
if self.digits < self._origin.digits:
return {
'warning': {
'title': _("Warning for %s", self.name),
'message': _(
'title': self.env._("Warning for %s", self.name),
'message': self.env._(
"The precision has been reduced for %s.\n"
"Note that existing data WON'T be updated by this change.\n\n"
"As decimal precisions impact the whole system, this may cause critical issues.\n"
@ -65,9 +58,3 @@ class DecimalPrecision(models.Model):
)
}
}
# compatibility for decimal_precision.get_precision(): expose the module in addons namespace
dp = sys.modules['odoo.addons.base.models.decimal_precision']
odoo.addons.decimal_precision = dp
sys.modules['odoo.addons.decimal_precision'] = dp
sys.modules['openerp.addons.decimal_precision'] = dp

View file

@ -1,70 +1,57 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from contextlib import ExitStack
from markupsafe import Markup
from urllib.parse import urlparse
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.service import security
from odoo.tools.safe_eval import safe_eval, time
from odoo.tools.misc import find_in_path
from odoo.tools import check_barcode_encoding, config, is_html_empty, parse_version, split_every
from odoo.http import request, root
from odoo.tools.pdf import PdfFileWriter, PdfFileReader, PdfReadError
from odoo.osv.expression import NEGATIVE_TERM_OPERATORS, FALSE_DOMAIN
import functools
import io
import json
import logging
import os
import lxml.html
import tempfile
import subprocess
import re
import requests
import json
from lxml import etree
from contextlib import closing
from reportlab.graphics.barcode import createBarcodeDrawing
from reportlab.pdfbase.pdfmetrics import getFont, TypeFace
import subprocess
import tempfile
import typing
import unittest
from ast import literal_eval
from collections import OrderedDict
from collections.abc import Iterable
from PIL import Image, ImageFile
from contextlib import closing, ExitStack
from itertools import islice
from urllib.parse import urlparse
import lxml.html
from PIL import Image, ImageFile
from lxml import etree
from markupsafe import Markup
from odoo import api, fields, models, modules, tools, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.fields import Domain
from odoo.service import security
from odoo.http import request, root
from odoo.tools import config, is_html_empty, parse_version, split_every
from odoo.tools.barcode import check_barcode_encoding, createBarcodeDrawing, get_barcode_font
from odoo.tools.misc import find_in_path
from odoo.tools.pdf import PdfFileReader, PdfFileWriter, PdfReadError
from odoo.tools.safe_eval import safe_eval, time
# Allow truncated images
ImageFile.LOAD_TRUNCATED_IMAGES = True
_logger = logging.getLogger(__name__)
# A lock occurs when the user wants to print a report having multiple barcode while the server is
# started in threaded-mode. The reason is that reportlab has to build a cache of the T1 fonts
# before rendering a barcode (done in a C extension) and this part is not thread safe. We attempt
# here to init the T1 fonts cache at the start-up of Odoo so that rendering of barcode in multiple
# thread does not lock the server.
_DEFAULT_BARCODE_FONT = 'Courier'
try:
available = TypeFace(_DEFAULT_BARCODE_FONT).findT1File()
if not available:
substitution_font = 'NimbusMonoPS-Regular'
fnt = getFont(substitution_font)
if fnt:
_DEFAULT_BARCODE_FONT = substitution_font
fnt.ascent = 629
fnt.descent = -157
createBarcodeDrawing('Code128', value='foo', format='png', width=100, height=100, humanReadable=1, fontName=_DEFAULT_BARCODE_FONT).asString('png')
except Exception:
pass
def _run_wkhtmltopdf(args):
"""
Runs the given arguments against the wkhtmltopdf binary.
def _get_wkhtmltopdf_bin():
return find_in_path('wkhtmltopdf')
def _get_wkhtmltoimage_bin():
return find_in_path('wkhtmltoimage')
Returns:
The process
"""
bin_path = _wkhtml().bin
return subprocess.run(
[bin_path, *args],
capture_output=True,
encoding='utf-8',
check=False,
)
def _split_table(tree, max_rows):
@ -87,58 +74,85 @@ def _split_table(tree, max_rows):
prev.addnext(sibling)
prev = sibling
# Check the presence of Wkhtmltopdf and return its version at Odoo start-up
wkhtmltopdf_state = 'install'
wkhtmltopdf_dpi_zoom_ratio = False
try:
process = subprocess.Popen(
[_get_wkhtmltopdf_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
except (OSError, IOError):
_logger.info('You need Wkhtmltopdf to print a pdf version of the reports.')
else:
_logger.info('Will use the Wkhtmltopdf binary at %s' % _get_wkhtmltopdf_bin())
out, err = process.communicate()
match = re.search(b'([0-9.]+)', out)
if match:
version = match.group(0).decode('ascii')
if parse_version(version) < parse_version('0.12.0'):
_logger.info('Upgrade Wkhtmltopdf to (at least) 0.12.0')
wkhtmltopdf_state = 'upgrade'
class WkhtmlInfo(typing.NamedTuple):
state: typing.Literal['install', 'ok']
dpi_zoom_ratio: bool
bin: str
version: str
wkhtmltoimage_bin: str
wkhtmltoimage_version: tuple[str, ...] | None
@functools.lru_cache(1)
def _wkhtml() -> WkhtmlInfo:
state = 'install'
bin_path = 'wkhtmltopdf'
version = ''
dpi_zoom_ratio = False
try:
bin_path = find_in_path('wkhtmltopdf')
process = subprocess.Popen(
[bin_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
except OSError:
_logger.info('You need Wkhtmltopdf to print a pdf version of the reports.')
else:
_logger.info('Will use the Wkhtmltopdf binary at %s', bin_path)
out, _err = process.communicate()
version = out.decode('ascii')
match = re.search(r'([0-9.]+)', version)
if match:
version = match.group(0)
if parse_version(version) < parse_version('0.12.0'):
_logger.info('Upgrade Wkhtmltopdf to (at least) 0.12.0')
state = 'upgrade'
else:
state = 'ok'
if parse_version(version) >= parse_version('0.12.2'):
dpi_zoom_ratio = True
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to print a pdf version of the reports.')
state = 'workers'
else:
wkhtmltopdf_state = 'ok'
if parse_version(version) >= parse_version('0.12.2'):
wkhtmltopdf_dpi_zoom_ratio = True
_logger.info('Wkhtmltopdf seems to be broken.')
state = 'broken'
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to print a pdf version of the reports.')
wkhtmltopdf_state = 'workers'
wkhtmltoimage_version = None
image_bin_path = 'wkhtmltoimage'
try:
image_bin_path = find_in_path('wkhtmltoimage')
process = subprocess.Popen(
[image_bin_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
except OSError:
_logger.info('You need Wkhtmltoimage to generate images from html.')
else:
_logger.info('Wkhtmltopdf seems to be broken.')
wkhtmltopdf_state = 'broken'
_logger.info('Will use the Wkhtmltoimage binary at %s', image_bin_path)
out, _err = process.communicate()
match = re.search(rb'([0-9.]+)', out)
if match:
wkhtmltoimage_version = parse_version(match.group(0).decode('ascii'))
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to convert images to html.')
else:
_logger.info('Wkhtmltoimage seems to be broken.')
wkhtmltoimage_version = None
try:
process = subprocess.Popen(
[_get_wkhtmltoimage_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
return WkhtmlInfo(
state=state,
dpi_zoom_ratio=dpi_zoom_ratio,
bin=bin_path,
version=version,
wkhtmltoimage_bin=image_bin_path,
wkhtmltoimage_version=wkhtmltoimage_version,
)
except OSError:
_logger.info('You need Wkhtmltoimage to generate images from html.')
else:
_logger.info('Will use the Wkhtmltoimage binary at %s', _get_wkhtmltoimage_bin())
out, err = process.communicate()
match = re.search(b'([0-9.]+)', out)
if match:
wkhtmltoimage_version = parse_version(match.group(0).decode('ascii'))
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to convert images to html.')
else:
_logger.info('Wkhtmltoimage seems to be broken.')
class IrActionsReport(models.Model):
_name = 'ir.actions.report'
_description = 'Report Action'
_inherit = 'ir.actions.actions'
_inherit = ['ir.actions.actions']
_table = 'ir_act_report_xml'
_order = 'name, id'
_allow_sudo_commands = False
@ -160,10 +174,10 @@ class IrActionsReport(models.Model):
report_name = fields.Char(string='Template Name', required=True)
report_file = fields.Char(string='Report File', required=False, readonly=False, store=True,
help="The path to the main report file (depending on Report Type) or empty if the content is in another field")
groups_id = fields.Many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', string='Groups')
group_ids = fields.Many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', string='Groups')
multi = fields.Boolean(string='On Multiple Doc.', help="If set to true, the action will not be displayed on the right toolbar of a form view.")
paperformat_id = fields.Many2one('report.paperformat', 'Paper Format')
paperformat_id = fields.Many2one('report.paperformat', 'Paper Format', index='btree_not_null')
print_report_name = fields.Char('Printed Report Name', translate=True,
help="This is the filename of the report going to download. Keep empty to not change the report filename. You can use a python expression with the 'object' and 'time' variables.")
attachment_use = fields.Boolean(string='Reload from Attachment',
@ -178,28 +192,24 @@ class IrActionsReport(models.Model):
action.model_id = self.env['ir.model']._get(action.model).id
def _search_model_id(self, operator, value):
ir_model_ids = None
if operator in Domain.NEGATIVE_OPERATORS:
return NotImplemented
models = self.env['ir.model']
if isinstance(value, str):
names = self.env['ir.model'].name_search(value, operator=operator)
ir_model_ids = [n[0] for n in names]
elif operator in ('any', 'not any'):
ir_model_ids = self.env['ir.model']._search(value)
elif isinstance(value, Iterable):
ir_model_ids = value
elif isinstance(value, int) and not isinstance(value, bool):
ir_model_ids = [value]
if ir_model_ids:
operator = 'not in' if operator in NEGATIVE_TERM_OPERATORS else 'in'
ir_model = self.env['ir.model'].browse(ir_model_ids)
return [('model', operator, ir_model.mapped('model'))]
elif isinstance(value, bool) or value is None:
return [('model', operator, value)]
else:
return FALSE_DOMAIN
models = models.search(Domain('display_name', operator, value))
elif isinstance(value, Domain):
models = models.search(value)
elif operator == 'any!':
models = models.sudo().search(Domain('id', operator, value))
elif operator == 'any' or isinstance(value, int):
models = models.search(Domain('id', operator, value))
elif operator == 'in':
models = models.search(Domain.OR(
Domain('id' if isinstance(v, int) else 'display_name', operator, v)
for v in value
if v
))
return Domain('model', 'in', models.mapped('model'))
def _get_readable_fields(self):
return super()._get_readable_fields() | {
@ -268,7 +278,7 @@ class IrActionsReport(models.Model):
:return: wkhtmltopdf_state
'''
return wkhtmltopdf_state
return _wkhtml().state
def get_paperformat(self):
return self.paperformat_id or self.env.company.paperformat_id
@ -333,7 +343,7 @@ class IrActionsReport(models.Model):
dpi = paperformat_id.dpi
if dpi:
command_args.extend(['--dpi', str(dpi)])
if wkhtmltopdf_dpi_zoom_ratio:
if _wkhtml().dpi_zoom_ratio:
command_args.extend(['--zoom', str(96.0 / dpi)])
if specific_paperformat_args and specific_paperformat_args.get('data-report-header-spacing'):
@ -371,13 +381,6 @@ class IrActionsReport(models.Model):
The idea is to put all headers/footers together. Then, we will use a javascript trick
(see minimal_layout template) to set the right header/footer during the processing of wkhtmltopdf.
This allows the computation of multiple reports in a single call to wkhtmltopdf.
:param html: The html rendered by render_qweb_html.
:type: bodies: list of string representing each one a html body.
:type header: string representing the html header.
:type footer: string representing the html footer.
:type specific_paperformat_args: dictionary of prioritized paperformat values.
:return: bodies, header, footer, specific_paperformat_args
'''
# Return empty dictionary if 'web.minimal_layout' not found.
@ -462,8 +465,9 @@ class IrActionsReport(models.Model):
:param image_format union['jpg', 'png']: format of the image
:return list[bytes|None]:
"""
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_image_rendering'):
if (modules.module.current_test or tools.config['test_enable']) and not self.env.context.get('force_image_rendering'):
return [None] * len(bodies)
wkhtmltoimage_version = _wkhtml().wkhtmltoimage_version
if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'):
raise UserError(_('wkhtmltoimage 0.12.0^ is required in order to render images from html'))
command_args = [
@ -475,17 +479,19 @@ class IrActionsReport(models.Model):
with ExitStack() as stack:
files = []
for body in bodies:
input_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix='.html', prefix='report_image_html_input.tmp.'))
output_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix=f'.{image_format}', prefix='report_image_output.tmp.'))
input_file.write(body.encode())
files.append((input_file, output_file))
(input_fd, input_path) = tempfile.mkstemp(suffix='.html', prefix='report_image_html_input.tmp.')
(output_fd, output_path) = tempfile.mkstemp(suffix=f'.{image_format}', prefix='report_image_output.tmp.')
stack.callback(os.remove, input_path)
stack.callback(os.remove, output_path)
os.close(output_fd)
with closing(os.fdopen(input_fd, 'wb')) as input_file:
input_file.write(body.encode())
files.append((input_path, output_path))
output_images = []
for input_file, output_file in files:
# smaller bodies may be held in a python buffer until close, force flush
input_file.flush()
wkhtmltoimage = [_get_wkhtmltoimage_bin()] + command_args + [input_file.name, output_file.name]
for (input_path, output_path) in files:
wkhtmltoimage = [_wkhtml().wkhtmltoimage_bin, *command_args, input_path, output_path]
# start and block, no need for parallelism for now
completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False)
completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False, encoding='utf-8')
if completed_process.returncode:
message = _(
'Wkhtmltoimage failed (error code: %(error_code)s). Message: %(error_message_end)s',
@ -495,7 +501,8 @@ class IrActionsReport(models.Model):
_logger.warning(message)
output_images.append(None)
else:
output_images.append(output_file.read())
with open(output_path, 'rb') as output_file:
output_images.append(output_file.read())
return output_images
@api.model
@ -511,7 +518,7 @@ class IrActionsReport(models.Model):
'''Execute wkhtmltopdf as a subprocess in order to convert html given in input into a pdf
document.
:param list[str] bodies: The html bodies of the report, one per page.
:param Iterable[str] bodies: The html bodies of the report, one per page.
:param report_ref: report reference that is needed to get report paperformat.
:param str header: The html header of the report containing all headers.
:param str footer: The html footer of the report containing all footers.
@ -531,107 +538,107 @@ class IrActionsReport(models.Model):
set_viewport_size=set_viewport_size)
files_command_args = []
temporary_files = []
temp_session = None
# Passing the cookie to wkhtmltopdf in order to resolve internal links.
if request and request.db:
# Create a temporary session which will not create device logs
temp_session = root.session_store.new()
temp_session.update({
**request.session,
'debug': '',
'_trace_disable': True,
})
if temp_session.uid:
temp_session.session_token = security.compute_session_token(temp_session, self.env)
root.session_store.save(temp_session)
def delete_file(file_path):
try:
os.unlink(file_path)
except OSError:
_logger.error('Error when trying to remove file %s', file_path)
base_url = self._get_report_url()
domain = urlparse(base_url).hostname
cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;'
cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.')
temporary_files.append(cookie_jar_file_path)
with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file:
cookie_jar_file.write(cookie.encode())
command_args.extend(['--cookie-jar', cookie_jar_file_path])
with ExitStack() as stack:
if header:
head_file_fd, head_file_path = tempfile.mkstemp(suffix='.html', prefix='report.header.tmp.')
with closing(os.fdopen(head_file_fd, 'wb')) as head_file:
head_file.write(header.encode())
temporary_files.append(head_file_path)
files_command_args.extend(['--header-html', head_file_path])
if footer:
foot_file_fd, foot_file_path = tempfile.mkstemp(suffix='.html', prefix='report.footer.tmp.')
with closing(os.fdopen(foot_file_fd, 'wb')) as foot_file:
foot_file.write(footer.encode())
temporary_files.append(foot_file_path)
files_command_args.extend(['--footer-html', foot_file_path])
# Passing the cookie to wkhtmltopdf in order to resolve internal links.
if request and request.db:
# Create a temporary session which will not create device logs
temp_session = root.session_store.new()
temp_session.update({
**request.session,
'debug': '',
'_trace_disable': True,
})
if temp_session.uid:
temp_session.session_token = security.compute_session_token(temp_session, self.env)
root.session_store.save(temp_session)
stack.callback(root.session_store.delete, temp_session)
paths = []
for i, body in enumerate(bodies):
prefix = '%s%d.' % ('report.body.tmp.', i)
body_file_fd, body_file_path = tempfile.mkstemp(suffix='.html', prefix=prefix)
with closing(os.fdopen(body_file_fd, 'wb')) as body_file:
# HACK: wkhtmltopdf doesn't like big table at all and the
# processing time become exponential with the number
# of rows (like 1H for 250k rows).
#
# So we split the table into multiple tables containing
# 500 rows each. This reduce the processing time to 1min
# for 250k rows. The number 500 was taken from opw-1689673
if len(body) < 4 * 1024 * 1024: # 4Mib
body_file.write(body.encode())
else:
tree = lxml.html.fromstring(body)
_split_table(tree, 500)
body_file.write(lxml.html.tostring(tree))
paths.append(body_file_path)
temporary_files.append(body_file_path)
base_url = self._get_report_url()
domain = urlparse(base_url).hostname
cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;'
cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.')
stack.callback(delete_file, cookie_jar_file_path)
with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file:
cookie_jar_file.write(cookie.encode())
command_args.extend(['--cookie-jar', cookie_jar_file_path])
pdf_report_fd, pdf_report_path = tempfile.mkstemp(suffix='.pdf', prefix='report.tmp.')
os.close(pdf_report_fd)
temporary_files.append(pdf_report_path)
if header:
head_file_fd, head_file_path = tempfile.mkstemp(suffix='.html', prefix='report.header.tmp.')
with closing(os.fdopen(head_file_fd, 'wb')) as head_file:
head_file.write(header.encode())
stack.callback(delete_file, head_file_path)
files_command_args.extend(['--header-html', head_file_path])
if footer:
foot_file_fd, foot_file_path = tempfile.mkstemp(suffix='.html', prefix='report.footer.tmp.')
with closing(os.fdopen(foot_file_fd, 'wb')) as foot_file:
foot_file.write(footer.encode())
stack.callback(delete_file, foot_file_path)
files_command_args.extend(['--footer-html', foot_file_path])
try:
wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args + files_command_args + paths + [pdf_report_path]
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8")
_out, err = process.communicate()
paths = []
body_idx = 0
for body_idx, body in enumerate(bodies):
prefix = f'report.body.tmp.{body_idx}.'
body_file_fd, body_file_path = tempfile.mkstemp(suffix='.html', prefix=prefix)
with closing(os.fdopen(body_file_fd, 'wb')) as body_file:
# HACK: wkhtmltopdf doesn't like big table at all and the
# processing time become exponential with the number
# of rows (like 1H for 250k rows).
#
# So we split the table into multiple tables containing
# 500 rows each. This reduce the processing time to 1min
# for 250k rows. The number 500 was taken from opw-1689673
if len(body) < 4 * 1024 * 1024: # 4Mib
body_file.write(body.encode())
else:
tree = lxml.html.fromstring(body)
_split_table(tree, 500)
body_file.write(lxml.html.tostring(tree))
paths.append(body_file_path)
stack.callback(delete_file, body_file_path)
if process.returncode not in [0, 1]:
if process.returncode == -11:
pdf_report_fd, pdf_report_path = tempfile.mkstemp(suffix='.pdf', prefix='report.tmp.')
os.close(pdf_report_fd)
stack.callback(delete_file, pdf_report_path)
process = _run_wkhtmltopdf(command_args + files_command_args + paths + [pdf_report_path])
err = process.stderr
match process.returncode:
case 0:
pass
case 1:
if body_idx:
wk_version = _wkhtml().version
if '(with patched qt)' not in wk_version:
if modules.module.current_test:
raise unittest.SkipTest("Unable to convert multiple documents via wkhtmltopdf using unpatched QT")
raise UserError(_("Tried to convert multiple documents in wkhtmltopdf using unpatched QT"))
_logger.warning("wkhtmltopdf: %s", err)
case c:
message = _(
'Wkhtmltopdf failed (error code: %(error_code)s). Memory limit too low or maximum file number of subprocess reached. Message : %(message)s',
error_code=process.returncode,
error_code=c,
message=err[-1000:],
)
else:
message = _(
) if c == -11 else _(
'Wkhtmltopdf failed (error code: %(error_code)s). Message: %(message)s',
error_code=process.returncode,
error_code=c,
message=err[-1000:],
)
_logger.warning(message)
raise UserError(message)
else:
if err:
_logger.warning('wkhtmltopdf: %s' % err)
except:
raise
finally:
if temp_session:
root.session_store.delete(temp_session)
_logger.warning(message)
raise UserError(message)
with open(pdf_report_path, 'rb') as pdf_document:
pdf_content = pdf_document.read()
# Manual cleanup of the temporary files
for temporary_file in temporary_files:
try:
os.unlink(temporary_file)
except (OSError, IOError):
_logger.error('Error when trying to remove file %s' % temporary_file)
with open(pdf_report_path, 'rb') as pdf_document:
pdf_content = pdf_document.read()
return pdf_content
@ -648,7 +655,9 @@ class IrActionsReport(models.Model):
@api.model
def _get_report(self, report_ref):
"""Get the report (with sudo) from a reference
report_ref: can be one of
:param report_ref: can be one of
- ir.actions.report id
- ir.actions.report record
- ir.model.data reference to ir.actions.report
@ -691,7 +700,7 @@ class IrActionsReport(models.Model):
kwargs = {k: validator(kwargs.get(k, v)) for k, (v, validator) in defaults.items()}
kwargs['humanReadable'] = kwargs.pop('humanreadable')
if kwargs['humanReadable']:
kwargs['fontName'] = _DEFAULT_BARCODE_FONT
kwargs['fontName'] = get_barcode_font()
if kwargs['width'] * kwargs['height'] > 1200000 or max(kwargs['width'], kwargs['height']) > 10000:
raise ValueError("Barcode too large")
@ -706,7 +715,8 @@ class IrActionsReport(models.Model):
elif barcode_type == 'QR':
# for `QR` type, `quiet` is not supported. And is simply ignored.
# But we can use `barBorder` to get a similar behaviour.
if kwargs['quiet']:
# quiet=True & barBorder=4 by default cf above, remove border only if quiet=False
if not kwargs['quiet']:
kwargs['barBorder'] = 0
if barcode_type in ('EAN8', 'EAN13') and not check_barcode_encoding(value, barcode_type):
@ -740,10 +750,12 @@ class IrActionsReport(models.Model):
@api.model
def get_available_barcode_masks(self):
""" Hook for extension.
This function returns the available QR-code masks, in the form of a
list of (code, mask_function) elements, where code is a string identifying
the mask uniquely, and mask_function is a function returning a reportlab
Drawing object with the result of the mask, and taking as parameters:
- width of the QR-code, in pixels
- height of the QR-code, in pixels
- reportlab Drawing object containing the barcode to apply the mask on
@ -811,7 +823,7 @@ class IrActionsReport(models.Model):
stream = None
attachment = None
if not has_duplicated_ids and report_sudo.attachment and not self._context.get("report_pdf_no_attachment"):
if not has_duplicated_ids and report_sudo.attachment and not self.env.context.get("report_pdf_no_attachment"):
attachment = report_sudo.retrieve_attachment(record)
# Extract the stream from the attachment.
@ -871,9 +883,9 @@ class IrActionsReport(models.Model):
report_ref=report_ref,
header=header,
footer=footer,
landscape=self._context.get('landscape'),
landscape=self.env.context.get('landscape'),
specific_paperformat_args=specific_paperformat_args,
set_viewport_size=self._context.get('set_viewport_size'),
set_viewport_size=self.env.context.get('set_viewport_size'),
)
pdf_content_stream = io.BytesIO(pdf_content)
@ -1002,7 +1014,7 @@ class IrActionsReport(models.Model):
data.setdefault('report_type', 'pdf')
# In case of test environment without enough workers to perform calls to wkhtmltopdf,
# fallback to render_html.
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_report_rendering'):
if (modules.module.current_test or tools.config['test_enable']) and not self.env.context.get('force_report_rendering'):
return self._render_qweb_html(report_ref, res_ids, data=data)
self = self.with_context(webp_as_jpg=True)
@ -1025,7 +1037,7 @@ class IrActionsReport(models.Model):
report_sudo = self._get_report(report_ref)
# Generate the ir.attachment if needed.
if not has_duplicated_ids and report_sudo.attachment and not self._context.get("report_pdf_no_attachment"):
if not has_duplicated_ids and report_sudo.attachment and not self.env.context.get("report_pdf_no_attachment"):
attachment_vals_list = self._prepare_pdf_report_attachment_vals_list(report_sudo, collected_streams)
if attachment_vals_list:
attachment_names = ', '.join(x['name'] for x in attachment_vals_list)

View file

@ -4,9 +4,8 @@ from glob import glob
from logging import getLogger
from werkzeug import urls
import odoo
import odoo.modules.module # get_manifest, don't from-import it
from odoo import api, fields, models, tools
from odoo.modules import Manifest
from odoo.tools import misc
from odoo.tools.constants import ASSET_EXTENSIONS, EXTERNAL_ASSET
@ -68,10 +67,10 @@ class IrAsset(models.Model):
self.env.registry.clear_cache('assets')
return super().create(vals_list)
def write(self, values):
def write(self, vals):
if self:
self.env.registry.clear_cache('assets')
return super().write(values)
return super().write(vals)
def unlink(self):
self.env.registry.clear_cache('assets')
@ -105,11 +104,15 @@ class IrAsset(models.Model):
def _parse_bundle_name(self, bundle_name, debug_assets):
bundle_name, asset_type = bundle_name.rsplit('.', 1)
rtl = False
autoprefix = False
if not debug_assets:
bundle_name, min_ = bundle_name.rsplit('.', 1)
if min_ != 'min':
raise ValueError("'min' expected in extension in non debug mode")
if asset_type == 'css':
if bundle_name.endswith('.autoprefixed'):
bundle_name = bundle_name[:-13]
autoprefix = True
if bundle_name.endswith('.rtl'):
bundle_name = bundle_name[:-4]
rtl = True
@ -117,7 +120,7 @@ class IrAsset(models.Model):
raise ValueError('Only js and css assets bundle are supported for now')
if len(bundle_name.split('.')) != 2:
raise ValueError(f'{bundle_name} is not a valid bundle name, should have two parts')
return bundle_name, rtl, asset_type
return bundle_name, rtl, asset_type, autoprefix
@tools.conditional(
'xml' not in tools.config['dev_mode'],
@ -183,7 +186,7 @@ class IrAsset(models.Model):
# 2. Process all addons' manifests.
for addon in addons:
for command in odoo.modules.module._get_manifest_cached(addon)['assets'].get(bundle, ()):
for command in Manifest.for_addon(addon)['assets'].get(bundle, ()):
directive, target, path_def = self._process_command(command)
self._process_path(bundle, directive, target, path_def, asset_paths, seen, addons, installed, bundle_start_index, **assets_params)
@ -240,7 +243,7 @@ class IrAsset(models.Model):
# this should never happen
raise ValueError("Unexpected directive")
def _get_related_assets(self, domain):
def _get_related_assets(self, domain, **kwargs):
"""
Returns a set of assets matching the domain, regardless of their
active state. This method can be overridden to filter the results.
@ -258,8 +261,8 @@ class IrAsset(models.Model):
a specific asset and target the right bundle, i.e. the first one
defining the target path.
:param target_path_def: string: path to match.
:root_bundle: string: bundle from which to initiate the search.
:param str target_path_def: path to match.
:param str root_bundle: bundle from which to initiate the search.
:returns: the first matching bundle or None
"""
installed = self._get_installed_addons_list()
@ -273,7 +276,7 @@ class IrAsset(models.Model):
return root_bundle
def _get_active_addons_list(self):
def _get_active_addons_list(self, **kwargs):
"""Can be overridden to filter the returned list of active modules."""
return self._get_installed_addons_list()
@ -285,10 +288,10 @@ class IrAsset(models.Model):
IrModule = self.env['ir.module.module']
def mapper(addon):
manif = odoo.modules.module._get_manifest_cached(addon)
manif = Manifest.for_addon(addon) or {}
from_terp = IrModule.get_values_from_terp(manif)
from_terp['name'] = addon
from_terp['depends'] = manif.get('depends', ['base'])
from_terp['depends'] = manif.get('depends') or ['base']
return from_terp
manifs = map(mapper, addons_tuple)
@ -307,9 +310,7 @@ class IrAsset(models.Model):
Returns the list of all installed addons.
:returns: string[]: list of module names
"""
# Main source: the current registry list
# Second source of modules: server wide modules
return self.env.registry._init_modules.union(odoo.conf.server_wide_modules or [])
return self.env.registry._init_modules.union(tools.config['server_wide_modules'])
def _get_paths(self, path_def, installed):
"""
@ -330,35 +331,31 @@ class IrAsset(models.Model):
:param path_def: the definition (glob) of file paths to match
:param installed: the list of installed addons
:param extensions: a list of extensions that found files must match
:returns: a list of tuple: (path, full_path, modified)
"""
paths = None
path_def = fs2web(path_def) # we expect to have all path definition unix style or url style, this is a safety
path_parts = [part for part in path_def.split('/') if part]
addon = path_parts[0]
addon_manifest = odoo.modules.module._get_manifest_cached(addon)
addon_manifest = Manifest.for_addon(addon, display_warning=False)
safe_path = True
safe_path = False
if addon_manifest:
if addon not in installed:
# Assert that the path is in the installed addons
raise Exception(f"Unallowed to fetch files from addon {addon} for file {path_def}")
addons_path = addon_manifest['addons_path']
full_path = os.path.normpath(os.sep.join([addons_path, *path_parts]))
addons_path = addon_manifest.addons_path
full_path = os.path.normpath(os.path.join(addons_path, *path_parts))
# forbid escape from the current addon
# "/mymodule/../myothermodule" is forbidden
static_prefix = os.sep.join([addons_path, addon, 'static', ''])
static_prefix = os.path.join(addon_manifest.path, 'static', '')
if full_path.startswith(static_prefix):
paths_with_timestamps = _glob_static_file(full_path)
paths = [
(fs2web(absolute_path[len(addons_path):]), absolute_path, timestamp)
for absolute_path, timestamp in paths_with_timestamps
]
else:
safe_path = False
else:
safe_path = False
safe_path = True
if not paths and not can_aggregate(path_def): # http:// or /web/content
paths = [(path_def, EXTERNAL_ASSET, -1)]

View file

@ -10,18 +10,39 @@ import os
import psycopg2
import re
import uuid
import warnings
import werkzeug
from collections import defaultdict
from collections.abc import Collection
from odoo import api, fields, models, SUPERUSER_ID, tools, _
from odoo.exceptions import AccessError, ValidationError, UserError
from odoo import api, fields, models, _
from odoo.exceptions import AccessError, MissingError, ValidationError, UserError
from odoo.fields import Domain
from odoo.http import Stream, root, request
from odoo.tools import config, human_size, image, str2bool, consteq
from odoo.tools import config, consteq, human_size, image, split_every, str2bool, OrderedSet
from odoo.tools.constants import PREFETCH_MAX
from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes
from odoo.osv import expression
from odoo.tools.misc import limited_field_access_token
_logger = logging.getLogger(__name__)
SECURITY_FIELDS = ('res_model', 'res_id', 'create_uid', 'public', 'res_field')
def condition_values(model, field_name, domain):
"""Get the values in the domain for a specific field name.
Returns the values appearing in the `in` conditions that would be restricted
to by the domain.
"""
domain = domain.optimize(model)
for condition in domain.map_conditions(
lambda cond: cond
if cond.field_expr == field_name and cond.operator == 'in'
else Domain.TRUE
).optimize(model).iter_conditions():
return condition.value
return None
class IrAttachment(models.Model):
@ -56,7 +77,7 @@ class IrAttachment(models.Model):
@api.model
def _filestore(self):
return config.filestore(self._cr.dbname)
return config.filestore(self.env.cr.dbname)
@api.model
def _get_storage_domain(self):
@ -74,7 +95,7 @@ class IrAttachment(models.Model):
# Migrate only binary attachments and bypass the res_field automatic
# filter added in _search override
self.search(expression.AND([
self.search(Domain.AND([
self._get_storage_domain(),
['&', ('type', '=', 'binary'), '|', ('res_field', '=', False), ('res_field', '!=', False)]
]))._migrate()
@ -82,11 +103,6 @@ class IrAttachment(models.Model):
def _migrate(self):
record_count = len(self)
storage = self._storage().upper()
# When migrating to filestore verifying if the directory has write permission
if storage == 'FILE':
filestore = self._filestore()
if not os.access(filestore, os.W_OK):
raise PermissionError("Write permission denied for filestore directory.")
for index, attach in enumerate(self):
_logger.debug("Migrate attachment %s/%s to %s", index + 1, record_count, storage)
# pass mimetype, to avoid recomputation
@ -95,7 +111,7 @@ class IrAttachment(models.Model):
@api.model
def _full_path(self, path):
# sanitize path
path = re.sub('[.]', '', path)
path = re.sub('[.:]', '', path)
path = path.strip('/\\')
return os.path.join(self._filestore(), path)
@ -115,13 +131,13 @@ class IrAttachment(models.Model):
return fname, full_path
@api.model
def _file_read(self, fname):
def _file_read(self, fname, size=None):
assert isinstance(self, IrAttachment)
full_path = self._full_path(fname)
try:
with open(full_path, 'rb') as f:
return f.read()
except (IOError, OSError):
return f.read(size)
except OSError:
_logger.info("_read_file reading %s", full_path, exc_info=True)
return b''
@ -135,8 +151,9 @@ class IrAttachment(models.Model):
fp.write(bin_value)
# add fname to checklist, in case the transaction aborts
self._mark_for_gc(fname)
except IOError:
_logger.info("_file_write writing %s", full_path, exc_info=True)
except OSError:
_logger.info("_file_write writing %s", full_path)
raise
return fname
@api.model
@ -147,7 +164,7 @@ class IrAttachment(models.Model):
def _mark_for_gc(self, fname):
""" Add ``fname`` in a checklist for the filestore garbage collection. """
assert isinstance(self, IrAttachment)
fname = re.sub('[.]', '', fname).strip('/\\')
fname = re.sub('[.:]', '', fname).strip('/\\')
# we use a spooldir: add an empty file in the subdirectory 'checklist'
full_path = os.path.join(self._full_path('checklist'), fname)
if not os.path.exists(full_path):
@ -171,7 +188,7 @@ class IrAttachment(models.Model):
# the LOCK statement will wait until those concurrent transactions end.
# But this transaction will not see the new attachements if it has done
# other requests before the LOCK (like the method _storage() above).
cr = self._cr
cr = self.env.cr
cr.commit()
# prevent all concurrent updates on ir_attachment while collecting,
@ -201,7 +218,7 @@ class IrAttachment(models.Model):
# Clean up the checklist. The checklist is split in chunks and files are garbage-collected
# for each chunk.
removed = 0
for names in self.env.cr.split_for_in_conditions(checklist):
for names in split_every(self.env.cr.IN_MAX, checklist):
# determine which files to keep among the checklist
self.env.cr.execute("SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names])
whitelist = set(row[0] for row in self.env.cr.fetchall())
@ -214,7 +231,7 @@ class IrAttachment(models.Model):
os.unlink(self._full_path(fname))
_logger.debug("_file_gc unlinked %s", self._full_path(fname))
removed += 1
except (OSError, IOError):
except OSError:
_logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True)
with contextlib.suppress(OSError):
os.unlink(filepath)
@ -224,7 +241,7 @@ class IrAttachment(models.Model):
@api.depends('store_fname', 'db_datas', 'file_size')
@api.depends_context('bin_size')
def _compute_datas(self):
if self._context.get('bin_size'):
if self.env.context.get('bin_size'):
for attach in self:
attach.datas = human_size(attach.file_size)
return
@ -247,17 +264,31 @@ class IrAttachment(models.Model):
self._set_attachment_data(lambda attach: base64.b64decode(attach.datas or b''))
def _set_attachment_data(self, asbytes):
old_fnames = []
checksum_raw_map = {}
for attach in self:
# compute the fields that depend on datas
bin_data = asbytes(attach)
vals = self._get_datas_related_values(bin_data, attach.mimetype)
if bin_data:
checksum_raw_map[vals['checksum']] = bin_data
# take current location in filestore to possibly garbage-collect it
fname = attach.store_fname
if attach.store_fname:
old_fnames.append(attach.store_fname)
# write as superuser, as user probably does not have write access
super(IrAttachment, attach.sudo()).write(vals)
if fname:
if self._storage() != 'db':
# before touching the filestore, flush to prevent the GC from
# running until the end of the transaction
self.flush_recordset(['checksum', 'store_fname'])
for fname in old_fnames:
self._file_delete(fname)
for checksum, raw in checksum_raw_map.items():
self._file_write(raw, checksum)
def _get_datas_related_values(self, data, mimetype):
checksum = self._compute_checksum(data)
@ -273,7 +304,7 @@ class IrAttachment(models.Model):
'db_datas': data,
}
if data and self._storage() != 'db':
values['store_fname'] = self._file_write(data, values['checksum'])
values['store_fname'], _full_path = self._get_path(data, checksum)
values['db_datas'] = False
return values
@ -426,20 +457,14 @@ class IrAttachment(models.Model):
mimetype = fields.Char('Mime Type', readonly=True)
index_content = fields.Text('Indexed Content', readonly=True, prefetch=False)
def _auto_init(self):
res = super(IrAttachment, self)._auto_init()
tools.create_index(self._cr, 'ir_attachment_res_idx',
self._table, ['res_model', 'res_id'])
return res
_res_idx = models.Index("(res_model, res_id)")
@api.constrains('type', 'url')
def _check_serving_attachments(self):
if self.env.is_admin():
return
for attachment in self:
# restrict writing on attachments that could be served by the
# ir.http's dispatch exception handling
# XDO note: this should be done in check(write), constraints for access rights?
# XDO note: if read on sudo, read twice, one for constraints, one for _inverse_datas as user
if attachment.type == 'binary' and attachment.url:
has_group = self.env.user.has_group
@ -449,155 +474,230 @@ class IrAttachment(models.Model):
@api.model
def check(self, mode, values=None):
""" Restricts the access to an ir.attachment, according to referred mode """
if self.env.is_superuser():
return True
warnings.warn("Since 19.0, use check_access", DeprecationWarning, stacklevel=2)
# Always require an internal user (aka, employee) to access to a attachment
if not (self.env.is_admin() or self.env.user._is_internal()):
raise AccessError(_("Sorry, you are not allowed to access this document."))
self.check_access(mode)
if values and any(self._inaccessible_comodel_records({values.get('res_model'): [values.get('res_id')]}, mode)):
raise AccessError(_("Sorry, you are not allowed to access this document."))
def _check_access(self, operation):
"""Check access for attachments.
Rules:
- `public` is always accessible for reading.
- If we have `res_model and res_id`, the attachment is accessible if the
referenced model is accessible. Also, when `res_field != False` and
the user is not an administrator, we check the access on the field.
- If we don't have a referenced record, the attachment is accessible to
the administrator and the creator of the attachment.
"""
res = super()._check_access(operation)
remaining = self
error_func = None
forbidden_ids = OrderedSet()
if res:
forbidden, error_func = res
if forbidden == self:
return res
remaining -= forbidden
forbidden_ids.update(forbidden._ids)
elif not self:
return None
if operation in ('create', 'unlink'):
# check write operation instead of unlinking and creating for
# related models and field access
operation = 'write'
# collect the records to check (by model)
model_ids = defaultdict(set) # {model_name: set(ids)}
if self:
# DLE P173: `test_01_portal_attachment`
self.env['ir.attachment'].flush_model(['res_model', 'res_id', 'create_uid', 'public', 'res_field'])
self._cr.execute('SELECT res_model, res_id, create_uid, public, res_field FROM ir_attachment WHERE id IN %s', [tuple(self.ids)])
for res_model, res_id, create_uid, public, res_field in self._cr.fetchall():
if public and mode == 'read':
continue
if not self.env.is_system():
if not res_id and create_uid != self.env.uid:
raise AccessError(_("Sorry, you are not allowed to access this document."))
if res_field:
field = self.env[res_model]._fields[res_field]
if not field.is_accessible(self.env):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if not (res_model and res_id):
continue
model_ids[res_model].add(res_id)
if values and values.get('res_model') and values.get('res_id'):
model_ids[values['res_model']].add(values['res_id'])
# check access rights on the records
for res_model, res_ids in model_ids.items():
# ignore attachments that are not attached to a resource anymore
# when checking access rights (resource was deleted but attachment
# was not)
if res_model not in self.env:
att_model_ids = [] # [(att_id, (res_model, res_id))]
# DLE P173: `test_01_portal_attachment`
remaining = remaining.sudo()
remaining.fetch(SECURITY_FIELDS) # fetch only these fields
for attachment in remaining:
if attachment.public and operation == 'read':
continue
if res_model == 'res.users' and len(res_ids) == 1 and self.env.uid == list(res_ids)[0]:
att_id = attachment.id
res_model, res_id = attachment.res_model, attachment.res_id
if not self.env.is_system():
if not res_id and attachment.create_uid.id != self.env.uid:
forbidden_ids.add(att_id)
continue
if res_field := attachment.res_field:
try:
field = self.env[res_model]._fields[res_field]
except KeyError:
# field does not exist
field = None
if field is None or not self._has_field_access(field, operation):
forbidden_ids.add(att_id)
continue
if res_model and res_id:
model_ids[res_model].add(res_id)
att_model_ids.append((att_id, (res_model, res_id)))
forbidden_res_model_id = set(self._inaccessible_comodel_records(model_ids, operation))
forbidden_ids.update(att_id for att_id, res in att_model_ids if res in forbidden_res_model_id)
if forbidden_ids:
forbidden = self.browse(forbidden_ids)
forbidden.invalidate_recordset(SECURITY_FIELDS) # avoid cache pollution
if error_func is None:
def error_func():
return AccessError(self.env._(
"Sorry, you are not allowed to access this document. "
"Please contact your system administrator.\n\n"
"(Operation: %(operation)s)\n\n"
"Records: %(records)s, User: %(user)s",
operation=operation,
records=forbidden[:6],
user=self.env.uid,
))
return forbidden, error_func
return None
def _inaccessible_comodel_records(self, model_and_ids: dict[str, Collection[int]], operation: str):
# check access rights on the records
if self.env.su:
return
for res_model, res_ids in model_and_ids.items():
res_ids = OrderedSet(filter(None, res_ids))
if not res_model or not res_ids:
# nothing to check
continue
# forbid access to attachments linked to removed models as we do not
# know what persmissions should be checked
if res_model not in self.env:
for res_id in res_ids:
yield res_model, res_id
continue
records = self.env[res_model].browse(res_ids)
if res_model == 'res.users' and len(records) == 1 and self.env.uid == records.id:
# by default a user cannot write on itself, despite the list of writeable fields
# e.g. in the case of a user inserting an image into his image signature
# we need to bypass this check which would needlessly throw us away
continue
records = self.env[res_model].browse(res_ids).exists()
# For related models, check if we can write to the model, as unlinking
# and creating attachments can be seen as an update to the model
access_mode = 'write' if mode in ('create', 'unlink') else mode
records.check_access(access_mode)
@api.model
def _filter_attachment_access(self, attachment_ids):
"""Filter the given attachment to return only the records the current user have access to.
:param attachment_ids: List of attachment ids we want to filter
:return: <ir.attachment> the current user have access to
"""
ret_attachments = self.env['ir.attachment']
attachments = self.browse(attachment_ids)
if not attachments.has_access('read'):
return ret_attachments
for attachment in attachments.sudo():
# Use SUDO here to not raise an error during the prefetch
# And then drop SUDO right to check if we can access it
try:
attachment.sudo(False).check('read')
ret_attachments |= attachment
except AccessError:
continue
return ret_attachments
records = records._filtered_access(operation)
except MissingError:
records = records.exists()._filtered_access(operation)
res_ids.difference_update(records._ids)
for res_id in res_ids:
yield res_model, res_id
@api.model
def _search(self, domain, offset=0, limit=None, order=None):
# add res_field=False in domain if not present; the arg[0] trick below
# works for domain items and '&'/'|'/'!' operators too
def _search(self, domain, offset=0, limit=None, order=None, *, active_test=True, bypass_access=False):
assert not self._active_name, "active name not supported on ir.attachment"
disable_binary_fields_attachments = False
if not self.env.context.get('skip_res_field_check') and not any(arg[0] in ('id', 'res_field') for arg in domain):
domain = Domain(domain)
if (
not self.env.context.get('skip_res_field_check')
and not any(d.field_expr in ('id', 'res_field') for d in domain.iter_conditions())
):
disable_binary_fields_attachments = True
domain = [('res_field', '=', False)] + domain
domain &= Domain('res_field', '=', False)
if self.env.is_superuser():
# rules do not apply for the superuser
return super()._search(domain, offset, limit, order)
domain = domain.optimize(self)
if self.env.su or bypass_access or domain.is_false():
return super()._search(domain, offset, limit, order, active_test=active_test, bypass_access=bypass_access)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document. For the sake of performance, fetch the fields to
# determine those permissions within the same SQL query.
fnames_to_read = ['id', 'res_model', 'res_id', 'res_field', 'public', 'create_uid']
query = super()._search(domain, offset, limit, order)
rows = self.env.execute_query(query.select(
*[self._field_to_sql(self._table, fname) for fname in fnames_to_read],
))
# General access rules
# - public == True are always accessible
sec_domain = Domain('public', '=', True)
# - res_id == False needs to be system user or creator
res_ids = condition_values(self, 'res_id', domain)
if not res_ids or False in res_ids:
if self.env.is_system():
sec_domain |= Domain('res_id', '=', False)
else:
sec_domain |= Domain('res_id', '=', False) & Domain('create_uid', '=', self.env.uid)
# determine permissions based on linked records
all_ids = []
allowed_ids = set()
model_attachments = defaultdict(lambda: defaultdict(set)) # {res_model: {res_id: set(ids)}}
for id_, res_model, res_id, res_field, public, create_uid in rows:
all_ids.append(id_)
if public:
allowed_ids.add(id_)
continue
if res_field and not self.env.is_system():
field = self.env[res_model]._fields[res_field]
if field.groups and not self.env.user.has_groups(field.groups):
# Search by res_model and res_id, filter using permissions from res_model
# - res_id != False needs then check access on the linked res_model record
# - res_field != False needs to check field access on the res_model
res_model_names = condition_values(self, 'res_model', domain)
if 0 < len(res_model_names or ()) <= 5:
env = self.with_context(active_test=False).env
for res_model_name in res_model_names:
comodel = env.get(res_model_name)
if comodel is None:
continue
codomain = Domain('res_model', '=', comodel._name)
comodel_res_ids = condition_values(self, 'res_id', domain.map_conditions(
lambda cond: codomain & cond if cond.field_expr == 'res_model' else cond
))
query = comodel._search(Domain('id', 'in', comodel_res_ids) if comodel_res_ids else Domain.TRUE)
if query.is_empty():
continue
if query.where_clause:
codomain &= Domain('res_id', 'in', query)
if not disable_binary_fields_attachments and not self.env.is_system():
accessible_fields = [
field.name
for field in comodel._fields.values()
if field.type == 'binary' or (field.relational and field.comodel_name == self._name)
if comodel._has_field_access(field, 'read')
]
accessible_fields.append(False)
codomain &= Domain('res_field', 'in', accessible_fields)
sec_domain |= codomain
if not res_id and (self.env.is_system() or create_uid == self.env.uid):
allowed_ids.add(id_)
continue
if not (res_field and disable_binary_fields_attachments) and res_model and res_id:
model_attachments[res_model][res_id].add(id_)
return super()._search(domain & sec_domain, offset, limit, order, active_test=active_test)
# check permissions on records model by model
for res_model, targets in model_attachments.items():
if res_model not in self.env:
allowed_ids.update(id_ for ids in targets.values() for id_ in ids)
continue
if not self.env[res_model].has_access('read'):
continue
# filter ids according to what access rules permit
ResModel = self.env[res_model].with_context(active_test=False)
for res_id in ResModel.search([('id', 'in', list(targets))])._ids:
allowed_ids.update(targets[res_id])
# filter out all_ids by keeping allowed_ids only
result = [id_ for id_ in all_ids if id_ in allowed_ids]
# If the original search reached the limit, it is important the
# filtered record set does so too. When a JS view receive a
# record set whose length is below the limit, it thinks it
# reached the last page. To avoid an infinite recursion due to the
# permission checks the sub-call need to be aware of the number of
# expected records to retrieve
if len(all_ids) == limit and len(result) < self._context.get('need', limit):
need = self._context.get('need', limit) - len(result)
more_ids = self.with_context(need=need)._search(
domain, offset + len(all_ids), limit, order,
)
result.extend(list(more_ids)[:limit - len(result)])
return self.browse(result)._as_query(order)
# We do not have a small restriction on res_model. We still need to
# support other queries such as: `('id', 'in' ...)`.
# Restrict with domain and add all attachments linked to a model.
domain &= sec_domain | Domain('res_model', '!=', False)
domain = domain.optimize_full(self)
ordered = bool(order)
if limit is None:
records = self.sudo().with_context(active_test=False).search_fetch(
domain, SECURITY_FIELDS, order=order).sudo(False)
return records._filtered_access('read')[offset:]._as_query(ordered)
# Fetch by small batches
sub_offset = 0
limit += offset
result = []
if not ordered:
# By default, order by model to batch access checks.
order = 'res_model nulls first, id'
while len(result) < limit:
records = self.sudo().with_context(active_test=False).search_fetch(
domain,
SECURITY_FIELDS,
offset=sub_offset,
limit=PREFETCH_MAX,
order=order,
).sudo(False)
result.extend(records._filtered_access('read')._ids)
if len(records) < PREFETCH_MAX:
# There are no more records
break
sub_offset += PREFETCH_MAX
return self.browse(result[offset:limit])._as_query(ordered)
def write(self, vals):
self.check('write', values=vals)
self.check_access('write')
if vals.get('res_model') or vals.get('res_id'):
model_and_ids = defaultdict(OrderedSet)
if 'res_model' in vals and 'res_id' in vals:
model_and_ids[vals['res_model']].add(vals['res_id'])
else:
for record in self:
model_and_ids[vals.get('res_model', record.res_model)].add(vals.get('res_id', record.res_id))
if any(self._inaccessible_comodel_records(model_and_ids, 'write')):
raise AccessError(_("Sorry, you are not allowed to access this document."))
# remove computed field depending of datas
for field in ('file_size', 'checksum', 'store_fname'):
vals.pop(field, False)
if 'mimetype' in vals or 'datas' in vals or 'raw' in vals:
vals = self._check_contents(vals)
return super(IrAttachment, self).write(vals)
res = super().write(vals)
if 'url' in vals or 'type' in vals:
self._check_serving_attachments()
return res
def copy_data(self, default=None):
default = dict(default or {})
@ -609,16 +709,12 @@ class IrAttachment(models.Model):
return vals_list
def unlink(self):
if not self:
return True
self.check('unlink')
# First delete in the database, *then* in the filesystem if the
# database allowed it. Helps avoid errors when concurrent transactions
# are deleting the same file, and some of the transactions are
# rolled back by PostgreSQL (due to concurrent updates detection).
to_delete = set(attach.store_fname for attach in self if attach.store_fname)
res = super(IrAttachment, self).unlink()
to_delete = OrderedSet(attach.store_fname for attach in self if attach.store_fname)
res = super().unlink()
for file_path in to_delete:
self._file_delete(file_path)
@ -635,6 +731,7 @@ class IrAttachment(models.Model):
in vals.items()
if key not in ('file_size', 'checksum', 'store_fname')
} for vals in vals_list]
checksum_raw_map = {}
for values in vals_list:
values = self._check_contents(values)
@ -643,10 +740,11 @@ class IrAttachment(models.Model):
if isinstance(raw, str):
# b64decode handles str input but raw needs explicit encoding
raw = raw.encode()
values.update(self._get_datas_related_values(
raw or base64.b64decode(datas or b''),
values['mimetype']
))
elif not raw:
raw = base64.b64decode(datas or b'')
values.update(self._get_datas_related_values(raw, values['mimetype']))
if raw:
checksum_raw_map[values['checksum']] = raw
# 'check()' only uses res_model and res_id from values, and make an exists.
# We can group the values by model, res_id to make only one query when
@ -655,10 +753,17 @@ class IrAttachment(models.Model):
record_tuple_set.add(record_tuple)
# don't use possible contextual recordset for check, see commit for details
Attachments = self.browse()
model_and_ids = defaultdict(set)
for res_model, res_id in record_tuple_set:
Attachments.check('create', values={'res_model':res_model, 'res_id':res_id})
return super().create(vals_list)
model_and_ids[res_model].add(res_id)
if any(self._inaccessible_comodel_records(model_and_ids, 'write')):
raise AccessError(_("Sorry, you are not allowed to access this document."))
records = super().create(vals_list)
if self._storage() != 'db':
for checksum, raw in checksum_raw_map.items():
self._file_write(raw, checksum)
records._check_serving_attachments()
return records
def _post_add_create(self, **kwargs):
# TODO master: rename to _post_upload, better indicating its usage
@ -675,6 +780,15 @@ class IrAttachment(models.Model):
tokens.append(access_token)
return tokens
def _get_raw_access_token(self):
"""Return a scoped access token for the `raw` field. The token can be
used with `ir_binary._find_record` to bypass access rights.
:rtype: str
"""
self.ensure_one()
return limited_field_access_token(self, "raw", scope="binary")
@api.model
def create_unique(self, values_list):
ids = []
@ -703,28 +817,6 @@ class IrAttachment(models.Model):
def _generate_access_token(self):
return str(uuid.uuid4())
def validate_access(self, access_token):
self.ensure_one()
record_sudo = self.sudo()
if access_token:
tok = record_sudo.with_context(prefetch_fields=False).access_token
valid_token = consteq(tok or '', access_token)
if not valid_token:
raise AccessError("Invalid access token")
return record_sudo
if record_sudo.with_context(prefetch_fields=False).public:
return record_sudo
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check('read')
return record_sudo
return self
@api.model
def action_get(self):
return self.env['ir.actions.act_window']._for_xml_id('base.action_attachment')
@ -741,7 +833,7 @@ class IrAttachment(models.Model):
("url", "=like", "/web/assets/%"),
('res_model', '=', 'ir.ui.view'),
('res_id', '=', 0),
('create_uid', '=', SUPERUSER_ID),
('create_uid', '=', api.SUPERUSER_ID),
]).unlink()
self.env.registry.clear_cache('assets')
@ -836,3 +928,18 @@ class IrAttachment(models.Model):
def _is_remote_source(self):
self.ensure_one()
return self.url and not self.file_size and self.url.startswith(('http://', 'https://', 'ftp://'))
def _can_return_content(self, field_name=None, access_token=None):
attachment_sudo = self.sudo().with_context(prefetch_fields=False)
if access_token:
if not consteq(attachment_sudo.access_token or "", access_token):
raise AccessError("Invalid access token") # pylint: disable=missing-gettext
return True
if attachment_sudo.public:
return True
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check_access('read')
return True
return super()._can_return_content(field_name, access_token)

View file

@ -1,13 +1,15 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import collections
import inspect
import logging
import warnings
import traceback
import random
import time
from odoo import api, models
from odoo.exceptions import AccessDenied
from odoo.modules.registry import _CACHES_BY_KEY
from odoo.tools import SQL
_logger = logging.getLogger(__name__)
@ -17,7 +19,7 @@ def is_autovacuum(func):
return callable(func) and getattr(func, '_autovacuum', False)
class AutoVacuum(models.AbstractModel):
class IrAutovacuum(models.AbstractModel):
""" Helper model to the ``@api.autovacuum`` method decorator. """
_name = 'ir.autovacuum'
_description = 'Automatic Vacuum'
@ -27,16 +29,47 @@ class AutoVacuum(models.AbstractModel):
Perform a complete database cleanup by safely calling every
``@api.autovacuum`` decorated method.
"""
if not self.env.is_admin():
if not self.env.is_admin() or not self.env.context.get('cron_id'):
raise AccessDenied()
for model in self.env.values():
cls = self.env.registry[model._name]
for attr, func in inspect.getmembers(cls, is_autovacuum):
_logger.debug('Calling %s.%s()', model, attr)
try:
func(model)
self.env.cr.commit()
except Exception:
_logger.exception("Failed %s.%s()", model, attr)
self.env.cr.rollback()
all_methods = [
(model, attr, func)
for model in self.env.values()
for attr, func in inspect.getmembers(model.__class__, is_autovacuum)
]
# shuffle methods at each run, prevents one blocking method from always
# starving the following ones
random.shuffle(all_methods)
queue = collections.deque(all_methods)
while queue and self.env['ir.cron']._commit_progress(remaining=len(queue)):
model, attr, func = queue.pop()
_logger.debug('Calling %s.%s()', model, attr)
try:
start_time = time.monotonic()
result = func(model)
self.env['ir.cron']._commit_progress(1)
if isinstance(result, tuple) and len(result) == 2:
func_done, func_remaining = result
_logger.debug(
'%s.%s vacuumed %r records, remaining %r',
model, attr, func_done, func_remaining,
)
if func_remaining:
queue.appendleft((model, attr, func))
_logger.debug("%s.%s took %.2fs", model, attr, time.monotonic() - start_time)
except Exception:
_logger.exception("Failed %s.%s()", model, attr)
self.env.cr.rollback()
@api.autovacuum
def _gc_orm_signaling(self):
for signal in ['registry', *_CACHES_BY_KEY]:
table = f'orm_signaling_{signal}'
# keep the last 10 entries for each signal, and all entries from the last
# hour. This keeps the signaling tables small enough for performance, but
# also gives a useful glimpse into the recent signaling history, including
# the timestamps of the increments.
self.env.cr.execute(SQL(
"DELETE FROM %s WHERE id < (SELECT max(id)-9 FROM %s) AND date < NOW() - interval '1 hours'",
SQL.identifier(table), SQL.identifier(table)
))

View file

@ -4,7 +4,7 @@ from datetime import datetime
from mimetypes import guess_extension
from odoo import models
from odoo.exceptions import MissingError, UserError
from odoo.exceptions import AccessError, MissingError, UserError
from odoo.http import Stream, request
from odoo.tools import file_open, replace_exceptions
from odoo.tools.image import image_process, image_guess_size_from_field_name
@ -36,6 +36,7 @@ class IrBinary(models.AbstractModel):
:param Optional[id] res_id: id of the record
:param Optional[str] access_token: access token to use instead
of the access rights and access rules.
:param Optional[str] field: image field name to check the access to
:returns: single record
:raises MissingError: when no record was found.
"""
@ -45,17 +46,12 @@ class IrBinary(models.AbstractModel):
elif res_id is not None and res_model in self.env:
record = self.env[res_model].browse(res_id).exists()
if not record:
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}")
if access_token and verify_limited_field_access_token(record, field, access_token):
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}") # pylint: disable=missing-gettext
if access_token and verify_limited_field_access_token(record, field, access_token, scope="binary"):
return record.sudo()
record = self._find_record_check_access(record, access_token, field)
return record
def _find_record_check_access(self, record, access_token, field):
if record._name == 'ir.attachment':
return record.validate_access(access_token)
record.check_access('read')
if record._can_return_content(field, access_token):
return record.sudo()
record.check_access("read")
return record
def _record_to_stream(self, record, field_name):
@ -73,16 +69,17 @@ class IrBinary(models.AbstractModel):
if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'):
return record._to_http_stream()
record.check_field_access_rights('read', [field_name])
field = record._fields[field_name]
record._check_field_access(field, 'read')
if record._fields[field_name].attachment:
if field.attachment:
field_attachment = self.env['ir.attachment'].sudo().search(
domain=[('res_model', '=', record._name),
('res_id', '=', record.id),
('res_field', '=', field_name)],
limit=1)
if not field_attachment:
raise MissingError("The related attachment does not exist.")
raise MissingError(self.env._("The related attachment does not exist."))
return field_attachment._to_http_stream()
return Stream.from_binary_field(record, field_name)
@ -111,15 +108,15 @@ class IrBinary(models.AbstractModel):
``application/octet-stream``.
:rtype: odoo.http.Stream
"""
with replace_exceptions(ValueError, by=UserError(f'Expected singleton: {record}')):
with replace_exceptions(ValueError, by=UserError(f'Expected singleton: {record}')): # pylint: disable=missing-gettext
record.ensure_one()
try:
field_def = record._fields[field_name]
except KeyError:
raise UserError(f"Record has no field {field_name!r}.")
raise UserError(f"Record has no field {field_name!r}.") # pylint: disable=missing-gettext
if field_def.type != 'binary':
raise UserError(
raise UserError( # pylint: disable=missing-gettext
f"Field {field_def!r} is type {field_def.type!r} but "
f"it is only possible to stream Binary or Image fields."
)

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
"""
Store database-specific configuration parameters
@ -7,7 +6,7 @@ Store database-specific configuration parameters
import uuid
import logging
from odoo import api, fields, models, _
from odoo import api, fields, models
from odoo.exceptions import ValidationError
from odoo.tools import config, ormcache, mute_logger
@ -26,7 +25,7 @@ _default_parameters = {
}
class IrConfigParameter(models.Model):
class IrConfig_Parameter(models.Model):
"""Per-database storage of configuration key-value pairs."""
_name = 'ir.config_parameter'
_description = 'System Parameter'
@ -37,9 +36,10 @@ class IrConfigParameter(models.Model):
key = fields.Char(required=True)
value = fields.Text(required=True)
_sql_constraints = [
('key_uniq', 'unique (key)', 'Key must be unique.')
]
_key_uniq = models.Constraint(
'unique (key)',
"Key must be unique.",
)
@mute_logger('odoo.addons.base.models.ir_config_parameter')
def init(self, force=False):
@ -69,7 +69,7 @@ class IrConfigParameter(models.Model):
return self._get_param(key) or default
@api.model
@ormcache('key')
@ormcache('key', cache='stable')
def _get_param(self, key):
# we bypass the ORM because get_param() is used in some field's depends,
# and must therefore work even when the ORM is not ready to work
@ -104,22 +104,22 @@ class IrConfigParameter(models.Model):
@api.model_create_multi
def create(self, vals_list):
self.env.registry.clear_cache()
return super(IrConfigParameter, self).create(vals_list)
self.env.registry.clear_cache('stable')
return super().create(vals_list)
def write(self, vals):
if 'key' in vals:
illegal = _default_parameters.keys() & self.mapped('key')
if illegal:
raise ValidationError(_("You cannot rename config parameters with keys %s", ', '.join(illegal)))
self.env.registry.clear_cache()
return super(IrConfigParameter, self).write(vals)
raise ValidationError(self.env._("You cannot rename config parameters with keys %s", ', '.join(illegal)))
self.env.registry.clear_cache('stable')
return super().write(vals)
def unlink(self):
self.env.registry.clear_cache()
return super(IrConfigParameter, self).unlink()
self.env.registry.clear_cache('stable')
return super().unlink()
@api.ondelete(at_uninstall=False)
def unlink_default_parameters(self):
for record in self.filtered(lambda p: p.key in _default_parameters.keys()):
raise ValidationError(_("You cannot delete the %s record.", record.key))
raise ValidationError(self.env._("You cannot delete the %s record.", record.key))

View file

@ -1,25 +1,32 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from __future__ import annotations
import logging
import threading
import time
import os
import psycopg2
import psycopg2.errors
import pytz
from datetime import datetime, timedelta
import typing
from datetime import datetime, timedelta, timezone
from dateutil.relativedelta import relativedelta
import odoo
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo import api, fields, models, sql_db
from odoo.exceptions import LockError, UserError
from odoo.modules import Manifest
from odoo.modules.registry import Registry
from odoo.tools import SQL
from odoo.tools.constants import GC_UNLINK_LIMIT
if typing.TYPE_CHECKING:
from collections.abc import Iterable
from odoo.sql_db import BaseCursor
_logger = logging.getLogger(__name__)
BASE_VERSION = odoo.modules.get_manifest('base')['version']
BASE_VERSION = Manifest.for_addon('base')['version']
MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice
MAX_BATCH_PER_CRON_JOB = 10
MIN_RUNS_PER_JOB = 10
MIN_TIME_PER_JOB = 10 # seconds
CONSECUTIVE_TIMEOUT_FOR_FAILURE = 3
MIN_FAILURE_COUNT_BEFORE_DEACTIVATION = 5
MIN_DELTA_BEFORE_DEACTIVATION = timedelta(days=7)
@ -32,6 +39,7 @@ ODOO_NOTIFY_FUNCTION = os.getenv('ODOO_NOTIFY_FUNCTION', 'pg_notify')
class BadVersion(Exception):
pass
class BadModuleState(Exception):
pass
@ -39,7 +47,7 @@ class BadModuleState(Exception):
_intervalTypes = {
'days': lambda interval: relativedelta(days=interval),
'hours': lambda interval: relativedelta(hours=interval),
'weeks': lambda interval: relativedelta(days=7*interval),
'weeks': lambda interval: relativedelta(days=7 * interval),
'months': lambda interval: relativedelta(months=interval),
'minutes': lambda interval: relativedelta(minutes=interval),
}
@ -51,7 +59,7 @@ class CompletionStatus: # inherit from enum.StrEnum in 3.11
FAILED = 'failed'
class ir_cron(models.Model):
class IrCron(models.Model):
""" Model describing cron jobs (also called actions or tasks).
"""
@ -59,19 +67,20 @@ class ir_cron(models.Model):
# that would cause database wake-up even if the database has not been
# loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something)
# See also odoo.cron
_name = "ir.cron"
_order = 'cron_name'
_name = 'ir.cron'
_order = 'cron_name, id'
_description = 'Scheduled Actions'
_allow_sudo_commands = False
_inherits = {'ir.actions.server': 'ir_actions_server_id'}
ir_actions_server_id = fields.Many2one(
'ir.actions.server', 'Server action',
'ir.actions.server', 'Server action', index=True,
delegate=True, ondelete='restrict', required=True)
cron_name = fields.Char('Name', compute='_compute_cron_name', store=True)
user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True)
active = fields.Boolean(default=True)
interval_number = fields.Integer(default=1, aggregator=None, help="Repeat every x.", required=True)
interval_number = fields.Integer(default=1, help="Repeat every x.", required=True, aggregator='avg')
interval_type = fields.Selection([('minutes', 'Minutes'),
('hours', 'Hours'),
('days', 'Days'),
@ -83,13 +92,10 @@ class ir_cron(models.Model):
failure_count = fields.Integer(default=0, help="The number of consecutive failures of this job. It is automatically reset on success.")
first_failure_date = fields.Datetime(string='First Failure Date', help="The first time the cron failed. It is automatically reset on success.")
_sql_constraints = [
(
'check_strictly_positive_interval',
'CHECK(interval_number > 0)',
'The interval number must be a strictly positive number.'
),
]
_check_strictly_positive_interval = models.Constraint(
'CHECK(interval_number > 0)',
"The interval number must be a strictly positive number.",
)
@api.depends('ir_actions_server_id.name')
def _compute_cron_name(self):
@ -101,57 +107,51 @@ class ir_cron(models.Model):
for vals in vals_list:
vals['usage'] = 'ir_cron'
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
self.env.cr.postcommit.add(self._notifydb)
return super().create(vals_list)
@api.model
def default_get(self, fields_list):
def default_get(self, fields):
# only 'code' state is supported for cron job so set it as default
if not self._context.get('default_state'):
self = self.with_context(default_state='code')
return super(ir_cron, self).default_get(fields_list)
model = self
if not model.env.context.get('default_state'):
model = model.with_context(default_state='code')
return super(IrCron, model).default_get(fields)
def method_direct_trigger(self):
"""Run the CRON job in the current (HTTP) thread.
The job is still ran as it would be by the scheduler: a new cursor
is used for the execution of the job.
:raises UserError: when the job is already running
"""
self.ensure_one()
self.browse().check_access('write')
self._try_lock()
_logger.info('Job %r (%s) started manually', self.name, self.id)
self, _ = self.with_user(self.user_id).with_context({'lastcall': self.lastcall})._add_progress() # noqa: PLW0642
self.ir_actions_server_id.run()
self.lastcall = fields.Datetime.now()
self.env.flush_all()
_logger.info('Job %r (%s) done', self.name, self.id)
# cron will be run in a separate transaction, flush before and
# invalidate because data will be changed by that transaction
self.env.invalidate_all(flush=True)
cron_cr = self.env.cr
job = self._acquire_one_job(cron_cr, self.id, include_not_ready=True)
if not job:
raise UserError(self.env._("Job '%s' already executing", self.name))
self._process_job(cron_cr, job)
return True
@classmethod
def _process_jobs(cls, db_name):
@staticmethod
def _process_jobs(db_name: str) -> None:
""" Execute every job ready to be run on this database. """
try:
db = odoo.sql_db.db_connect(db_name)
db = sql_db.db_connect(db_name)
threading.current_thread().dbname = db_name
with db.cursor() as cron_cr:
cls = IrCron
cls._check_version(cron_cr)
jobs = cls._get_all_ready_jobs(cron_cr)
if not jobs:
return
cls._check_modules_state(cron_cr, jobs)
for job_id in (job['id'] for job in jobs):
try:
job = cls._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
continue
if not job:
_logger.debug("another worker is processing job %s, skip", job_id)
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = Registry(db_name).check_signaling()
registry[cls._name]._process_job(db, cron_cr, job)
_logger.debug("job %s updated and released", job_id)
cls._process_jobs_loop(cron_cr, job_ids=[job['id'] for job in jobs])
except BadVersion:
_logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION)
except BadModuleState:
@ -159,7 +159,7 @@ class ir_cron(models.Model):
except psycopg2.errors.UndefinedTable:
# The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
except psycopg2.ProgrammingError as e:
except psycopg2.ProgrammingError:
raise
except Exception:
_logger.warning('Exception in cron:', exc_info=True)
@ -167,8 +167,33 @@ class ir_cron(models.Model):
if hasattr(threading.current_thread(), 'dbname'):
del threading.current_thread().dbname
@classmethod
def _check_version(cls, cron_cr):
@staticmethod
def _process_jobs_loop(cron_cr: BaseCursor, *, job_ids: Iterable[int] = ()):
""" Process ready jobs to run on this database.
The `cron_cr` is used to lock the currently processed job and relased
by committing after each job.
"""
db_name = cron_cr.dbname
for job_id in job_ids:
try:
job = IrCron._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
continue
if not job:
_logger.debug("job %s is being processed by another worker, skip", job_id)
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = Registry(db_name)
registry[IrCron._name]._process_job(cron_cr, job)
cron_cr.commit()
_logger.debug("job %s updated and released", job_id)
@staticmethod
def _check_version(cron_cr):
""" Ensure the code version matches the database version """
cron_cr.execute("""
SELECT latest_version
@ -181,8 +206,8 @@ class ir_cron(models.Model):
if version != BASE_VERSION:
raise BadVersion()
@classmethod
def _check_modules_state(cls, cr, jobs):
@staticmethod
def _check_modules_state(cr, jobs):
""" Ensure no module is installing or upgrading """
cr.execute("""
SELECT COUNT(*)
@ -196,10 +221,10 @@ class ir_cron(models.Model):
if not jobs:
raise BadModuleState()
oldest = min([
fields.Datetime.from_string(job['nextcall'])
for job in jobs
])
# use the max(job['nextcall'], job['write_date']) to avoid the cron
# reset_module_state for an ongoing module installation process
# right after installing a module with an old 'nextcall' cron in data
oldest = min(max(job['nextcall'], job['write_date'] or job['nextcall']) for job in jobs)
if datetime.now() - oldest < MAX_FAIL_TIME:
raise BadModuleState()
@ -207,28 +232,35 @@ class ir_cron(models.Model):
# per minute for 5h) in which case we assume that the crons are stuck
# because the db has zombie states and we force a call to
# reset_module_states.
odoo.modules.reset_modules_state(cr.dbname)
from odoo.modules.loading import reset_modules_state # noqa: PLC0415
reset_modules_state(cr.dbname)
@classmethod
def _get_all_ready_jobs(cls, cr):
""" Return a list of all jobs that are ready to be executed """
cr.execute("""
SELECT *
FROM ir_cron
WHERE active = true
AND (nextcall <= (now() at time zone 'UTC')
OR id in (
@staticmethod
def _get_ready_sql_condition(cr: BaseCursor) -> SQL:
return SQL("""
active IS TRUE
AND (nextcall <= %(now)s
OR id IN (
SELECT cron_id
FROM ir_cron_trigger
WHERE call_at <= (now() at time zone 'UTC')
WHERE call_at <= %(now)s
)
)
)
""", now=cr.now())
@staticmethod
def _get_all_ready_jobs(cr: BaseCursor) -> list[dict]:
""" Return a list of all jobs that are ready to be executed """
cr.execute(SQL("""
SELECT *
FROM ir_cron
WHERE %s
ORDER BY failure_count, priority, id
""")
""", IrCron._get_ready_sql_condition(cr)))
return cr.dictfetchall()
@classmethod
def _acquire_one_job(cls, cr, job_id):
@staticmethod
def _acquire_one_job(cr: BaseCursor, job_id: int, *, include_not_ready: bool = False) -> dict | None:
"""
Acquire for update the job with id ``job_id``.
@ -270,32 +302,25 @@ class ir_cron(models.Model):
#
# Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
query = """
where_clause = SQL("id = %s", job_id)
if not include_not_ready:
where_clause = SQL("%s AND %s", where_clause, IrCron._get_ready_sql_condition(cr))
query = SQL("""
WITH last_cron_progress AS (
SELECT id as progress_id, cron_id, timed_out_counter, done, remaining
FROM ir_cron_progress
WHERE cron_id = %s
WHERE cron_id = %(cron_id)s
ORDER BY id DESC
LIMIT 1
)
SELECT *
FROM ir_cron
LEFT JOIN last_cron_progress lcp ON lcp.cron_id = ir_cron.id
WHERE ir_cron.active = true
AND (nextcall <= (now() at time zone 'UTC')
OR EXISTS (
SELECT cron_id
FROM ir_cron_trigger
WHERE call_at <= (now() at time zone 'UTC')
AND cron_id = ir_cron.id
)
)
AND id = %s
ORDER BY priority
WHERE %(where)s
FOR NO KEY UPDATE SKIP LOCKED
"""
""", cron_id=job_id, where=where_clause)
try:
cr.execute(query, [job_id, job_id], log_exceptions=False)
cr.execute(query, log_exceptions=False)
except psycopg2.extensions.TransactionRollbackError:
# A serialization error can occur when another cron worker
# commits the new `nextcall` value of a cron it just ran and
@ -326,7 +351,7 @@ class ir_cron(models.Model):
_logger.warning(message)
@classmethod
def _process_job(cls, db, cron_cr, job):
def _process_job(cls, cron_cr: BaseCursor, job) -> None:
"""
Execute the cron's server action in a dedicated transaction.
@ -335,10 +360,8 @@ class ir_cron(models.Model):
``'failed'``.
The server action can use the progress API via the method
:meth:`_notify_progress` to report processing progress, i.e. how
many records are done and how many records are remaining to
process.
:meth:`_commit_progress` to report how many records are done
in each batch.
Those progress notifications are used to determine the job's
``CompletionStatus`` and to determine the next time the cron
will be executed:
@ -358,6 +381,7 @@ class ir_cron(models.Model):
env = api.Environment(cron_cr, job['user_id'], {})
ir_cron = env[cls._name]
ir_cron._clear_schedule(job)
failed_by_timeout = (
job['timed_out_counter'] >= CONSECUTIVE_TIMEOUT_FOR_FAILURE
and not job['done']
@ -383,12 +407,10 @@ class ir_cron(models.Model):
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
cron_cr.postcommit.add(ir_cron._notifydb) # See: `_notifydb`
else:
raise RuntimeError("unreachable")
cron_cr.commit()
raise RuntimeError(f"unreachable {status=}")
@classmethod
def _run_job(cls, job):
def _run_job(cls, job) -> CompletionStatus:
"""
Execute the job's server action multiple times until it
completes. The completion status is returned.
@ -411,20 +433,33 @@ class ir_cron(models.Model):
timed_out_counter = job['timed_out_counter']
with cls.pool.cursor() as job_cr:
start_time = time.monotonic()
env = api.Environment(job_cr, job['user_id'], {
'lastcall': job['lastcall'],
'cron_id': job['id'],
'cron_end_time': start_time + MIN_TIME_PER_JOB,
})
cron = env[cls._name].browse(job['id'])
status = None
for i in range(MAX_BATCH_PER_CRON_JOB):
loop_count = 0
_logger.info('Job %r (%s) starting', job['cron_name'], job['id'])
# stop after MIN_RUNS_PER_JOB runs and MIN_TIME_PER_JOB seconds, or
# upon full completion or failure
while (
loop_count < MIN_RUNS_PER_JOB
or time.monotonic() < env.context['cron_end_time']
):
cron, progress = cron._add_progress(timed_out_counter=timed_out_counter)
job_cr.commit()
try:
# signaling check and commit is done inside `_callback`
cron._callback(job['cron_name'], job['ir_actions_server_id'])
except Exception: # noqa: BLE001
_logger.exception('Job %r (%s) server action #%s failed',
job['cron_name'], job['id'], job['ir_actions_server_id'])
if progress.done and progress.remaining:
# we do not consider it a failure if some progress has
# been committed
@ -433,28 +468,38 @@ class ir_cron(models.Model):
status = CompletionStatus.FAILED
else:
if not progress.remaining:
status = CompletionStatus.FULLY_DONE
elif not progress.done:
# assume the server action doesn't use the progress API
# and that there is nothing left to process
status = CompletionStatus.FULLY_DONE
else:
status = CompletionStatus.PARTIALLY_DONE
if not progress.done:
break
if status == CompletionStatus.FULLY_DONE and progress.deactivate:
job['active'] = False
finally:
done, remaining = progress.done, progress.remaining
loop_count += 1
progress.timed_out_counter = 0
timed_out_counter = 0
job_cr.commit()
_logger.info('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], progress.done, progress.remaining)
job_cr.commit() # ensure we have no leftovers
_logger.debug('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], done, remaining)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
break
_logger.info(
'Job %r (%s) %s (#loop %s; done %s; remaining %s; duration %.2fs)',
job['cron_name'], job['id'], status,
loop_count, done, remaining, time.monotonic() - start_time)
return status
def _update_failure_count(self, job, status):
@api.model
def _update_failure_count(self, job: dict, status: CompletionStatus) -> None:
"""
Update cron ``failure_count`` and ``first_failure_date`` given
the job's completion status. Deactivate the cron when BOTH the
@ -469,26 +514,25 @@ class ir_cron(models.Model):
reached, ``active`` is set to ``False`` and both values are
reset.
"""
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
if status == CompletionStatus.FAILED:
now = self.env.cr.now().replace(microsecond=0)
failure_count = job['failure_count'] + 1
first_failure_date = job['first_failure_date'] or now
active = job['active']
if (
failure_count >= MIN_FAILURE_COUNT_BEFORE_DEACTIVATION
and fields.Datetime.context_timestamp(self, first_failure_date) + MIN_DELTA_BEFORE_DEACTIVATION < now
and first_failure_date + MIN_DELTA_BEFORE_DEACTIVATION < now
):
failure_count = 0
first_failure_date = None
active = False
self._notify_admin(_(
self._notify_admin(self.env._(
"Cron job %(name)s (%(id)s) has been deactivated after failing %(count)s times. "
"More information can be found in the server logs around %(time)s.",
name=repr(job['cron_name']),
id=job['id'],
count=MIN_FAILURE_COUNT_BEFORE_DEACTIVATION,
time=datetime.replace(datetime.utcnow(), microsecond=0),
time=now,
))
else:
failure_count = 0
@ -508,44 +552,52 @@ class ir_cron(models.Model):
job['id'],
])
def _reschedule_later(self, job):
@api.model
def _clear_schedule(self, job):
"""Remove triggers for the given job."""
now = self.env.cr.now().replace(microsecond=0)
self.env.cr.execute("""
DELETE FROM ir_cron_trigger
WHERE cron_id = %s
AND call_at <= %s
""", [job['id'], now])
@api.model
def _reschedule_later(self, job: dict) -> None:
"""
Reschedule the job to be executed later, after its regular
interval or upon a trigger.
"""
# Use the user's timezone to compare and compute datetimes, otherwise unexpected results may appear.
# For instance, adding 1 month in UTC to July 1st at midnight in GMT+2 gives July 30 instead of August 1st!
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
nextcall = fields.Datetime.context_timestamp(self, job['nextcall'])
now = self.env.cr.now().replace(microsecond=0)
nextcall = job['nextcall']
# Use the timezone of the user when adding the interval. When adding a
# day or more, the user may want to keep the same hour each day.
# The interval won't be fixed, but the hour will stay the same,
# even when changing DST.
interval = _intervalTypes[job['interval_type']](job['interval_number'])
while nextcall <= now:
nextcall = fields.Datetime.context_timestamp(self, nextcall)
nextcall += interval
nextcall = nextcall.astimezone(timezone.utc).replace(tzinfo=None)
_logger.info('Job %r (%s) completed', job['cron_name'], job['id'])
self.env.cr.execute("""
UPDATE ir_cron
SET nextcall = %s,
lastcall = %s
WHERE id = %s
""", [
fields.Datetime.to_string(nextcall.astimezone(pytz.UTC)),
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['id'],
])
""", [nextcall, now, job['id']])
self.env.cr.execute("""
DELETE FROM ir_cron_trigger
WHERE cron_id = %s
AND call_at < (now() at time zone 'UTC')
""", [job['id']])
def _reschedule_asap(self, job):
@api.model
def _reschedule_asap(self, job: dict) -> None:
"""
Reschedule the job to be executed ASAP, after the other cron
jobs had a chance to run.
"""
# leave the existing nextcall and triggers, this leave the job "ready"
pass
now = self.env.cr.now().replace(microsecond=0)
self.env.cr.execute("""
INSERT INTO ir_cron_trigger(call_at, cron_id)
VALUES (%s, %s)
""", [now, job['id']])
def _callback(self, cron_name, server_action_id):
""" Run the method associated to a given job. It takes care of logging
@ -555,81 +607,47 @@ class ir_cron(models.Model):
try:
if self.pool != self.pool.check_signaling():
# the registry has changed, reload self in the new registry
self.env.reset()
self = self.env()[self._name]
self.env.transaction.reset()
_logger.debug(
"cron.object.execute(%r, %d, '*', %r, %d)",
self.env.cr.dbname,
self._uid,
self.env.uid,
cron_name,
server_action_id,
)
_logger.info('Job %r (%s) starting', cron_name, self.id)
start_time = time.time()
self.env['ir.actions.server'].browse(server_action_id).run()
self.env.flush_all()
end_time = time.time()
_logger.info('Job %r (%s) done in %.3fs', cron_name, self.id, end_time - start_time)
if start_time and _logger.isEnabledFor(logging.DEBUG):
_logger.debug('Job %r (%s) server action #%s with uid %s executed in %.3fs',
cron_name, self.id, server_action_id, self.env.uid, end_time - start_time)
self.pool.signal_changes()
self.env.cr.commit()
except Exception:
self.pool.reset_changes()
_logger.exception('Job %r (%s) server action #%s failed', cron_name, self.id, server_action_id)
self.env.cr.rollback()
raise
def _try_lock(self, lockfk=False):
"""Try to grab a dummy exclusive write-lock to the rows with the given ids,
to make sure a following write() or unlink() will not block due
to a process currently executing those cron tasks.
:param lockfk: acquire a strong row lock which conflicts with
the lock acquired by foreign keys when they
reference this row.
"""
if not self:
return
row_level_lock = "UPDATE" if lockfk else "NO KEY UPDATE"
try:
self._cr.execute(f"""
SELECT id
FROM "{self._table}"
WHERE id IN %s
FOR {row_level_lock} NOWAIT
""", [tuple(self.ids)], log_exceptions=False)
except psycopg2.OperationalError:
self._cr.rollback() # early rollback to allow translations to work for the user feedback
raise UserError(_("Record cannot be modified right now: "
"This cron task is currently being executed and may not be modified "
"Please try again in a few minutes"))
def write(self, vals):
self._try_lock()
if ('nextcall' in vals or vals.get('active')) and os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
return super(ir_cron, self).write(vals)
def unlink(self):
self._try_lock(lockfk=True)
return super(ir_cron, self).unlink()
def try_write(self, values):
try:
with self._cr.savepoint():
self._cr.execute(f"""
SELECT id
FROM "{self._table}"
WHERE id IN %s
FOR NO KEY UPDATE NOWAIT
""", [tuple(self.ids)], log_exceptions=False)
except psycopg2.OperationalError:
pass
else:
return super(ir_cron, self).write(values)
return False
self.lock_for_update(allow_referencing=True)
except LockError:
raise UserError(self.env._(
"Record cannot be modified right now: "
"This cron task is currently being executed and may not be modified "
"Please try again in a few minutes"
)) from None
if ('nextcall' in vals or vals.get('active')) and os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self.env.cr.postcommit.add(self._notifydb)
return super().write(vals)
@api.ondelete(at_uninstall=False)
def _unlink_unless_running(self):
try:
self.lock_for_update()
except LockError:
raise UserError(self.env._(
"Record cannot be modified right now: "
"This cron task is currently being executed and may not be modified "
"Please try again in a few minutes"
)) from None
@api.model
def toggle(self, model, domain):
@ -639,9 +657,13 @@ class ir_cron(models.Model):
return True
active = bool(self.env[model].search_count(domain))
return self.try_write({'active': active})
try:
self.lock_for_update(allow_referencing=True)
except LockError:
return True
return self.write({'active': active})
def _trigger(self, at=None):
def _trigger(self, at: datetime | Iterable[datetime] | None = None):
"""
Schedule a cron job to be executed soon independently of its
``nextcall`` field value.
@ -655,11 +677,10 @@ class ir_cron(models.Model):
datetime. The actual implementation is in :meth:`~._trigger_list`,
which is the recommended method for overrides.
:param Optional[Union[datetime.datetime, list[datetime.datetime]]] at:
:param at:
When to execute the cron, at one or several moments in time
instead of as soon as possible.
:return: the created triggers records
:rtype: recordset
"""
if at is None:
at_list = [fields.Datetime.now()]
@ -671,14 +692,12 @@ class ir_cron(models.Model):
return self._trigger_list(at_list)
def _trigger_list(self, at_list):
def _trigger_list(self, at_list: list[datetime]):
"""
Implementation of :meth:`~._trigger`.
:param list[datetime.datetime] at_list:
Execute the cron later, at precise moments in time.
:param at_list: Execute the cron later, at precise moments in time.
:return: the created triggers records
:rtype: recordset
"""
self.ensure_one()
now = fields.Datetime.now()
@ -699,15 +718,16 @@ class ir_cron(models.Model):
_logger.debug('Job %r (%s) will execute at %s', self.sudo().name, self.id, ats)
if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
self.env.cr.postcommit.add(self._notifydb)
return triggers
@api.model
def _notifydb(self):
""" Wake up the cron workers
The ODOO_NOTIFY_CRON_CHANGES environment variable allows to force the notifydb on both
ir_cron modification and on trigger creation (regardless of call_at)
IrCron modification and on trigger creation (regardless of call_at)
"""
with odoo.sql_db.db_connect('postgres').cursor() as cr:
with sql_db.db_connect('postgres').cursor() as cr:
cr.execute(SQL("SELECT %s('cron_trigger', %s)", SQL.identifier(ODOO_NOTIFY_FUNCTION), self.env.cr.dbname))
_logger.debug("cron workers notified")
@ -731,9 +751,11 @@ class ir_cron(models.Model):
}])
return self.with_context(ir_cron_progress_id=progress.id), progress
def _notify_progress(self, *, done, remaining, deactivate=False):
@api.deprecated("Since 19.0, use _commit_progress")
def _notify_progress(self, *, done: int, remaining: int, deactivate: bool = False):
"""
Log the progress of the cron job.
Use ``_commit_progress()`` instead.
:param int done: the number of tasks already processed
:param int remaining: the number of tasks left to process
@ -743,32 +765,88 @@ class ir_cron(models.Model):
return
if done < 0 or remaining < 0:
raise ValueError("`done` and `remaining` must be positive integers.")
self.env['ir.cron.progress'].sudo().browse(progress_id).write({
progress = self.env['ir.cron.progress'].sudo().browse(progress_id)
assert progress.cron_id.id == self.env.context.get('cron_id'), "Progress on the wrong cron_id"
progress.write({
'remaining': remaining,
'done': done,
'deactivate': deactivate,
})
@api.model
def _commit_progress(
self,
processed: int = 0,
*,
remaining: int | None = None,
deactivate: bool = False,
) -> float:
"""
Commit and log progress for the batch from a cron function.
class ir_cron_trigger(models.Model):
The number of items processed is added to the current done count.
If you don't specify a remaining count, the number of items processed
is subtracted from the existing remaining count.
If called from outside the cron job, the progress function call will
just commit.
:param processed: number of processed items in this step
:param remaining: set the remaining count to the given count
:param deactivate: deactivate the cron after running it
:return: remaining time (seconds) for the cron run
"""
ctx = self.env.context
progress = self.env['ir.cron.progress'].sudo().browse(ctx.get('ir_cron_progress_id'))
if not progress:
# not called during a cron, just commit
self.env.cr.commit()
return float('inf')
assert processed >= 0, 'processed must be positive'
assert (remaining or 0) >= 0, "remaining must be positive"
assert progress.cron_id.id == ctx.get('cron_id'), "Progress on the wrong cron_id"
if remaining is None:
remaining = max(progress.remaining - processed, 0)
done = progress.done + processed
vals = {
'remaining': remaining,
'done': done,
}
if deactivate:
vals['deactivate'] = True
progress.write(vals)
self.env.cr.commit()
return max(ctx.get('cron_end_time', float('inf')) - time.monotonic(), 0)
def action_open_parent_action(self):
return self.ir_actions_server_id.action_open_parent_action()
def action_open_scheduled_action(self):
return self.ir_actions_server_id.action_open_scheduled_action()
class IrCronTrigger(models.Model):
_name = 'ir.cron.trigger'
_description = 'Triggered actions'
_rec_name = 'cron_id'
_allow_sudo_commands = False
cron_id = fields.Many2one("ir.cron", index=True)
call_at = fields.Datetime(index=True)
cron_id = fields.Many2one("ir.cron", index=True, required=True, ondelete="cascade")
call_at = fields.Datetime(index=True, required=True)
@api.autovacuum
def _gc_cron_triggers(self):
domain = [('call_at', '<', datetime.now() + relativedelta(weeks=-1))]
records = self.search(domain, limit=models.GC_UNLINK_LIMIT)
if len(records) >= models.GC_UNLINK_LIMIT:
self.env.ref('base.autovacuum_job')._trigger()
return records.unlink()
# active cron jobs are cleared by `_clear_schedule` when the job starts
domain = [
('call_at', '<', datetime.now() + relativedelta(weeks=-1)),
('cron_id.active', '=', False),
]
records = self.search(domain, limit=GC_UNLINK_LIMIT)
records.unlink()
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining
class ir_cron_progress(models.Model):
class IrCronProgress(models.Model):
_name = 'ir.cron.progress'
_description = 'Progress of Scheduled Actions'
_rec_name = 'cron_id'
@ -781,4 +859,6 @@ class ir_cron_progress(models.Model):
@api.autovacuum
def _gc_cron_progress(self):
self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))]).unlink()
records = self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))], limit=GC_UNLINK_LIMIT)
records.unlink()
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining

View file

@ -1,11 +1,12 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from datetime import date
from odoo import api, fields, models, tools, _, SUPERUSER_ID
from odoo import api, fields, models, tools
from odoo.api import SUPERUSER_ID
from odoo.exceptions import ValidationError
from odoo.fields import Domain
from odoo.tools import SQL
@ -31,7 +32,7 @@ class IrDefault(models.Model):
try:
json.loads(record.json_value)
except json.JSONDecodeError:
raise ValidationError(_('Invalid JSON format in Default Value field.'))
raise ValidationError(self.env._('Invalid JSON format in Default Value field.'))
@api.model_create_multi
def create(self, vals_list):
@ -88,11 +89,11 @@ class IrDefault(models.Model):
value = field.to_string(value)
json_value = json.dumps(value, ensure_ascii=False)
except KeyError:
raise ValidationError(_("Invalid field %(model)s.%(field)s", model=model_name, field=field_name))
raise ValidationError(self.env._("Invalid field %(model)s.%(field)s", model=model_name, field=field_name))
except Exception:
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value))
raise ValidationError(self.env._("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value))
if field.type == 'integer' and not (-2**31 < parsed < 2**31-1):
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value))
raise ValidationError(self.env._("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value))
# update existing default for the same scope, or create one
field = self.env['ir.model.fields']._get(model_name, field_name)
@ -213,16 +214,19 @@ class IrDefault(models.Model):
for id_ in company_ids
})
def _evaluate_condition_with_fallback(self, model_name, condition):
def _evaluate_condition_with_fallback(self, model_name, field_expr, operator, value):
"""
when the field value of the condition is company_dependent without
customization, evaluate if its fallback value will be kept by
the condition
return True/False/None(for unknown)
"""
field_name = condition[0].split('.', 1)[0]
field_name, _property_name = fields.parse_field_expr(field_expr)
model = self.env[model_name]
field = model._fields[field_name]
fallback = field.get_company_dependent_fallback(model)
record = model.new({field_name: field.convert_to_write(fallback, model)})
return bool(record.filtered_domain([condition]))
try:
record = model.new({field_name: field.convert_to_write(fallback, model)})
return bool(record.filtered_domain(Domain(field_expr, operator, value)))
except ValueError:
return None

View file

@ -1,19 +1,17 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
from odoo.modules.loading import force_demo
from odoo.addons.base.models.ir_module import assert_log_admin_access
class IrDemo(models.TransientModel):
_name = 'ir.demo'
_description = 'Demo'
@assert_log_admin_access
def install_demo(self):
force_demo(self.env)
import odoo.modules.loading # noqa: PLC0415
odoo.modules.loading.force_demo(self.env)
return {
'type': 'ir.actions.act_url',
'target': 'self',

View file

@ -1,7 +1,7 @@
from odoo import api, fields, models
class DemoFailure(models.TransientModel):
class IrDemo_Failure(models.TransientModel):
""" Stores modules for which we could not install demo data
"""
_name = 'ir.demo_failure'
@ -11,7 +11,8 @@ class DemoFailure(models.TransientModel):
error = fields.Char(string="Error")
wizard_id = fields.Many2one('ir.demo_failure.wizard')
class DemoFailureWizard(models.TransientModel):
class IrDemo_FailureWizard(models.TransientModel):
_name = 'ir.demo_failure.wizard'
_description = 'Demo Failure wizard'

View file

@ -1,6 +1,6 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo import api, fields, models
from odoo.exceptions import UserError
from ast import literal_eval
@ -28,22 +28,17 @@ class IrEmbeddedActions(models.Model):
context = fields.Char(default="{}", help="Context dictionary as Python expression, empty by default (Default: {})")
groups_ids = fields.Many2many('res.groups', help='Groups that can execute the embedded action. Leave empty to allow everybody.')
_sql_constraints = [
(
'check_only_one_action_defined',
"""CHECK(
(action_id IS NOT NULL AND python_method IS NULL) OR
(action_id IS NULL AND python_method IS NOT NULL)
)""",
'Constraint to ensure that either an XML action or a python_method is defined, but not both.'
), (
'check_python_method_requires_name',
"""CHECK(
NOT (python_method IS NOT NULL AND name IS NULL)
)""",
'Constraint to ensure that if a python_method is defined, then the name must also be defined.'
)
]
_check_only_one_action_defined = models.Constraint(
'''CHECK(
(action_id IS NOT NULL AND python_method IS NULL)
OR (action_id IS NULL AND python_method IS NOT NULL)
)''',
"Constraint to ensure that either an XML action or a python_method is defined, but not both.",
)
_check_python_method_requires_name = models.Constraint(
'CHECK(NOT (python_method IS NOT NULL AND name IS NULL))',
"Constraint to ensure that if a python_method is defined, then the name must also be defined.",
)
@api.model_create_multi
def create(self, vals_list):
@ -80,7 +75,8 @@ class IrEmbeddedActions(models.Model):
active_model_record = self.env[parent_res_model].search(domain_id, order='id')
for record in records:
action_groups = record.groups_ids
if not action_groups or (action_groups & self.env.user.groups_id):
is_valid_method = not record.python_method or hasattr(self.env[parent_res_model], record.python_method)
if is_valid_method and (not action_groups or (action_groups & self.env.user.all_group_ids)):
domain_model = literal_eval(record.domain or '[]')
record.is_visible = (
record.parent_res_id in (False, self.env.context.get('active_id', False))
@ -95,7 +91,7 @@ class IrEmbeddedActions(models.Model):
def _unlink_if_action_deletable(self):
for record in self:
if not record.is_deletable:
raise UserError(_('You cannot delete a default embedded action'))
raise UserError(self.env._('You cannot delete a default embedded action'))
def _get_readable_fields(self):
""" return the list of fields that are safe to read

View file

@ -5,9 +5,9 @@ from odoo import fields, models
class IrExports(models.Model):
_name = "ir.exports"
_name = 'ir.exports'
_description = 'Exports'
_order = 'name'
_order = 'name, id'
name = fields.Char(string='Export Name')
resource = fields.Char(index=True)

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
@ -6,7 +5,6 @@ import functools
import itertools
from typing import NamedTuple
import psycopg2
import pytz
from odoo import api, Command, fields, models
@ -42,6 +40,7 @@ class ImportWarning(Warning):
class ConversionNotFound(ValueError):
pass
class IrFieldsConverter(models.AbstractModel):
_name = 'ir.fields.converter'
_description = 'Fields Converter'
@ -75,6 +74,7 @@ class IrFieldsConverter(models.AbstractModel):
The field_path value is computed based on the last field in the chain.
for example,
- path_field for 'Private address' at childA_1 is ['partner_id', 'type']
- path_field for 'childA_1' is ['partner_id']
@ -82,7 +82,7 @@ class IrFieldsConverter(models.AbstractModel):
we can the link the errors to the correct header-field couple in the import UI.
"""
field_path = [field]
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy')
parent_fields_hierarchy = self.env.context.get('parent_fields_hierarchy')
if parent_fields_hierarchy:
field_path = parent_fields_hierarchy + field_path
@ -95,14 +95,15 @@ class IrFieldsConverter(models.AbstractModel):
return field_path
@api.model
def for_model(self, model, fromtype=str):
def for_model(self, model, fromtype=str, *, savepoint):
""" Returns a converter object for the model. A converter is a
callable taking a record-ish (a dictionary representing an odoo
record with values of typetag ``fromtype``) and returning a converted
records matching what :meth:`odoo.osv.orm.Model.write` expects.
records matching what :meth:`odoo.models.Model.write` expects.
:param model: :class:`odoo.osv.orm.Model` for the conversion base
:param model: :class:`odoo.models.Model` for the conversion base
:param fromtype:
:param savepoint: savepoint to rollback to on error
:returns: a converter callable
:rtype: (record: dict, logger: (field, error) -> None) -> dict
"""
@ -110,7 +111,7 @@ class IrFieldsConverter(models.AbstractModel):
model = self.env[model._name]
converters = {
name: self.to_field(model, field, fromtype)
name: self.to_field(model, field, fromtype, savepoint=savepoint)
for name, field in model._fields.items()
}
@ -155,7 +156,7 @@ class IrFieldsConverter(models.AbstractModel):
return fn
@api.model
def to_field(self, model, field, fromtype=str):
def to_field(self, model, field, fromtype=str, *, savepoint):
""" Fetches a converter for the provided field object, from the
specified type.
@ -191,6 +192,7 @@ class IrFieldsConverter(models.AbstractModel):
:type field: :class:`odoo.fields.Field`
:param fromtype: type to convert to something fitting for ``field``
:type fromtype: type | str
:param savepoint: savepoint to rollback to on errors
:return: a function (fromtype -> field.write_type), if a converter is found
:rtype: Callable | None
"""
@ -200,38 +202,37 @@ class IrFieldsConverter(models.AbstractModel):
converter = getattr(self, '_%s_to_%s' % (typename, field.type), None)
if not converter:
return None
return functools.partial(converter, model, field)
return functools.partial(converter, model, field, savepoint=savepoint)
def _str_to_json(self, model, field, value):
def _str_to_json(self, model, field, value, savepoint):
try:
return json.loads(value), []
except ValueError:
msg = _("'%s' does not seem to be a valid JSON for field '%%(field)s'")
msg = self.env._("'%s' does not seem to be a valid JSON for field '%%(field)s'")
raise self._format_import_error(ValueError, msg, value)
def _str_to_properties(self, model, field, value):
def _str_to_properties(self, model, field, value, savepoint):
# If we want to import the all properties at once (with the technical value)
if isinstance(value, str):
try:
value = json.loads(value)
except ValueError:
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
msg = self.env._("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg)
if not isinstance(value, list):
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
msg = self.env._("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg, {'value': value})
warnings = []
for property_dict in value:
if not (property_dict.keys() >= {'name', 'type', 'string'}):
msg = _("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.")
msg = self.env._("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.")
raise self._format_import_error(ValueError, msg, {'value': property_dict})
val = property_dict.get('value')
if not val:
property_dict.pop('value', None)
continue
property_type = property_dict['type']
@ -243,7 +244,7 @@ class IrFieldsConverter(models.AbstractModel):
if val in (sel_val, sel_label)
), None)
if not new_val:
msg = _("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).")
msg = self.env._("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
property_dict['value'] = new_val
@ -256,17 +257,17 @@ class IrFieldsConverter(models.AbstractModel):
if tag in (tag_val, tag_label)
), None)
if not val_tag:
msg = _("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).")
msg = self.env._("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': tag, 'label_property': property_dict['string']})
new_val.append(val_tag)
property_dict['value'] = new_val
elif property_type == 'boolean':
new_val, warnings = self._str_to_boolean(model, field, val)
new_val, warnings = self._str_to_boolean(model, field, val, savepoint=savepoint)
if not warnings:
property_dict['value'] = new_val
else:
msg = _("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).")
msg = self.env._("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type in ('many2one', 'many2many'):
@ -282,7 +283,7 @@ class IrFieldsConverter(models.AbstractModel):
ids = []
fake_field = FakeField(comodel_name=property_dict['comodel'], name=property_dict['string'])
for reference in references:
id_, __, ws = self.db_id_for(model, fake_field, subfield, reference)
id_, ws = self.db_id_for(model, fake_field, subfield, reference, savepoint)
ids.append(id_)
warnings.extend(ws)
@ -292,20 +293,20 @@ class IrFieldsConverter(models.AbstractModel):
try:
property_dict['value'] = int(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).")
msg = self.env._("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type == 'float':
try:
property_dict['value'] = float(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).")
msg = self.env._("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
return value, warnings
@api.model
def _str_to_boolean(self, model, field, value):
def _str_to_boolean(self, model, field, value, savepoint):
# all translatables used for booleans
# potentially broken casefolding? What about locales?
trues = set(word.lower() for word in itertools.chain(
@ -325,89 +326,73 @@ class IrFieldsConverter(models.AbstractModel):
if value.lower() in falses:
return False, []
if field.name in self._context.get('import_skip_records', []):
if field.name in self.env.context.get('import_skip_records', []):
return None, []
return True, [self._format_import_error(
ValueError,
_(u"Unknown value '%s' for boolean field '%%(field)s'"),
self.env._("Unknown value '%s' for boolean field '%%(field)s'"),
value,
{'moreinfo': _(u"Use '1' for yes and '0' for no")}
{'moreinfo': self.env._("Use '1' for yes and '0' for no")}
)]
@api.model
def _str_to_integer(self, model, field, value):
def _str_to_integer(self, model, field, value, savepoint):
try:
return int(value), []
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be an integer for field '%%(field)s'"),
self.env._("'%s' does not seem to be an integer for field '%%(field)s'"),
value
)
@api.model
def _str_to_float(self, model, field, value):
def _str_to_float(self, model, field, value, savepoint):
try:
return float(value), []
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be a number for field '%%(field)s'"),
self.env._("'%s' does not seem to be a number for field '%%(field)s'"),
value
)
_str_to_monetary = _str_to_float
@api.model
def _str_id(self, model, field, value):
def _str_id(self, model, field, value, savepoint):
return value, []
_str_to_reference = _str_to_char = _str_to_text = _str_to_binary = _str_to_html = _str_id
@api.model
def _str_to_date(self, model, field, value):
def _str_to_date(self, model, field, value, savepoint):
try:
parsed_value = fields.Date.from_string(value)
return fields.Date.to_string(parsed_value), []
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be a valid date for field '%%(field)s'"),
self.env._("'%s' does not seem to be a valid date for field '%%(field)s'"),
value,
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31")}
{'moreinfo': self.env._("Use the format '%s'", u"2012-12-31")}
)
@api.model
def _input_tz(self):
# if there's a tz in context, try to use that
if self._context.get('tz'):
try:
return pytz.timezone(self._context['tz'])
except pytz.UnknownTimeZoneError:
pass
# if the current user has a tz set, try to use that
user = self.env.user
if user.tz:
try:
return pytz.timezone(user.tz)
except pytz.UnknownTimeZoneError:
pass
# fallback if no tz in context or on user: UTC
return pytz.UTC
return self.env.tz
@api.model
def _str_to_datetime(self, model, field, value):
def _str_to_datetime(self, model, field, value, savepoint):
try:
parsed_value = fields.Datetime.from_string(value)
except ValueError:
raise self._format_import_error(
ValueError,
_(u"'%s' does not seem to be a valid datetime for field '%%(field)s'"),
self.env._("'%s' does not seem to be a valid datetime for field '%%(field)s'"),
value,
{'moreinfo': _(u"Use the format '%s'", u"2012-12-31 23:59:59")}
{'moreinfo': self.env._("Use the format '%s'", u"2012-12-31 23:59:59")}
)
input_tz = self._input_tz()# Apply input tz to the parsed naive datetime
@ -419,7 +404,7 @@ class IrFieldsConverter(models.AbstractModel):
def _get_boolean_translations(self, src):
# Cache translations so they don't have to be reloaded from scratch on
# every row of the file
tnx_cache = self._cr.cache.setdefault(self._name, {})
tnx_cache = self.env.cr.cache.setdefault(self._name, {})
if src in tnx_cache:
return tnx_cache[src]
@ -438,7 +423,7 @@ class IrFieldsConverter(models.AbstractModel):
return []
# Cache translations so they don't have to be reloaded from scratch on
# every row of the file
tnx_cache = self._cr.cache.setdefault(self._name, {})
tnx_cache = self.env.cr.cache.setdefault(self._name, {})
if src in tnx_cache:
return tnx_cache[src]
@ -459,7 +444,7 @@ class IrFieldsConverter(models.AbstractModel):
return result
@api.model
def _str_to_selection(self, model, field, value):
def _str_to_selection(self, model, field, value, savepoint):
# get untranslated values
env = self.with_context(lang=None).env
selection = field.get_description(env)['selection']
@ -478,19 +463,19 @@ class IrFieldsConverter(models.AbstractModel):
if value.lower() == str(item).lower() or any(value.lower() == label.lower() for label in labels):
return item, []
if field.name in self._context.get('import_skip_records', []):
if field.name in self.env.context.get('import_skip_records', []):
return None, []
elif field.name in self._context.get('import_set_empty_fields', []):
elif field.name in self.env.context.get('import_set_empty_fields', []):
return False, []
raise self._format_import_error(
ValueError,
_(u"Value '%s' not found in selection field '%%(field)s'"),
self.env._("Value '%s' not found in selection field '%%(field)s'"),
value,
{'moreinfo': [_label or str(item) for item, _label in selection if _label or item]}
)
@api.model
def db_id_for(self, model, field, subfield, value):
def db_id_for(self, model, field, subfield, value, savepoint):
""" Finds a database id for the reference ``value`` in the referencing
subfield ``subfield`` of the provided field of the provided model.
@ -501,6 +486,7 @@ class IrFieldsConverter(models.AbstractModel):
``id`` for an external id and ``.id`` for a database
id
:param value: value of the reference to match to an actual record
:param savepoint: savepoint for rollback on errors
:return: a pair of the matched database identifier (if any), the
translated user-readable name for the field and the list of
warnings
@ -508,7 +494,7 @@ class IrFieldsConverter(models.AbstractModel):
"""
# the function 'flush' comes from BaseModel.load(), and forces the
# creation/update of former records (batch creation)
flush = self._context.get('import_flush', lambda **kw: None)
flush = self.env.context.get('import_flush', lambda **kw: None)
id = None
warnings = []
@ -519,7 +505,7 @@ class IrFieldsConverter(models.AbstractModel):
'view_mode': 'list,form',
'views': [(False, 'list'), (False, 'form')],
'context': {'create': False},
'help': _(u"See all possible values")}
'help': self.env._("See all possible values")}
if subfield is None:
action['res_model'] = field.comodel_name
elif subfield in ('id', '.id'):
@ -528,33 +514,33 @@ class IrFieldsConverter(models.AbstractModel):
RelatedModel = self.env[field.comodel_name]
if subfield == '.id':
field_type = _(u"database id")
if isinstance(value, str) and not self._str_to_boolean(model, field, value)[0]:
return False, field_type, warnings
field_type = self.env._("database id")
if isinstance(value, str) and not self._str_to_boolean(model, field, value, savepoint=savepoint)[0]:
return False, warnings
try:
tentative_id = int(value)
except ValueError:
raise self._format_import_error(
ValueError,
_(u"Invalid database id '%s' for the field '%%(field)s'"),
self.env._("Invalid database id '%s' for the field '%%(field)s'"),
value,
{'moreinfo': action})
if RelatedModel.browse(tentative_id).exists():
id = tentative_id
elif subfield == 'id':
field_type = _(u"external id")
if not self._str_to_boolean(model, field, value)[0]:
return False, field_type, warnings
field_type = self.env._("external id")
if not self._str_to_boolean(model, field, value, savepoint=savepoint)[0]:
return False, warnings
if '.' in value:
xmlid = value
else:
xmlid = "%s.%s" % (self._context.get('_import_current_module', ''), value)
xmlid = "%s.%s" % (self.env.context.get('_import_current_module', ''), value)
flush(xml_id=xmlid)
id = self._xmlid_to_record_id(xmlid, RelatedModel)
elif subfield is None:
field_type = _(u"name")
field_type = self.env._("name")
if value == '':
return False, field_type, warnings
return False, warnings
flush(model=field.comodel_name)
ids = RelatedModel.name_search(name=value, operator='=')
if ids:
@ -569,14 +555,15 @@ class IrFieldsConverter(models.AbstractModel):
name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {}
if name_create_enabled_fields.get(field.name):
try:
with self.env.cr.savepoint():
id, _name = RelatedModel.name_create(name=value)
except (Exception, psycopg2.IntegrityError):
error_msg = _("Cannot create new '%s' records from their name alone. Please create those records manually and try importing again.", RelatedModel._description)
id, _name = RelatedModel.name_create(name=value)
RelatedModel.env.flush_all()
except Exception: # noqa: BLE001
savepoint.rollback()
error_msg = self.env._("Cannot create new '%s' records from their name alone. Please create those records manually and try importing again.", RelatedModel._description)
else:
raise self._format_import_error(
Exception,
_("Unknown sub-field “%s", subfield),
self.env._("Unknown sub-field “%s", subfield),
)
set_empty = False
@ -588,9 +575,9 @@ class IrFieldsConverter(models.AbstractModel):
skip_record = field_path in self.env.context.get('import_skip_records', [])
if id is None and not set_empty and not skip_record:
if error_msg:
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s' and the following error was encountered when we attempted to create one: %(error_message)s")
message = self.env._("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s' and the following error was encountered when we attempted to create one: %(error_message)s")
else:
message = _("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s'")
message = self.env._("No matching record found for %(field_type)s '%(value)s' in field '%%(field)s'")
error_info_dict = {'moreinfo': action}
if self.env.context.get('import_file'):
@ -604,7 +591,7 @@ class IrFieldsConverter(models.AbstractModel):
message,
{'field_type': field_type, 'value': value, 'error_message': error_msg},
error_info_dict)
return id, field_type, warnings
return id, warnings
def _xmlid_to_record_id(self, xmlid, model):
""" Return the record id corresponding to the given external id,
@ -645,54 +632,54 @@ class IrFieldsConverter(models.AbstractModel):
fieldset = set(record)
if fieldset - REFERENCING_FIELDS:
raise ValueError(
_(u"Can not create Many-To-One records indirectly, import the field separately"))
self.env._("Can not create Many-To-One records indirectly, import the field separately"))
if len(fieldset) > 1:
raise ValueError(
_(u"Ambiguous specification for field '%(field)s', only provide one of name, external id or database id"))
self.env._("Ambiguous specification for field '%(field)s', only provide one of name, external id or database id"))
# only one field left possible, unpack
[subfield] = fieldset
return subfield, []
@api.model
def _str_to_many2one(self, model, field, values):
def _str_to_many2one(self, model, field, values, savepoint):
# Should only be one record, unpack
[record] = values
subfield, w1 = self._referencing_subfield(record)
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield])
id, w2 = self.db_id_for(model, field, subfield, record[subfield], savepoint)
return id, w1 + w2
@api.model
def _str_to_many2one_reference(self, model, field, value):
return self._str_to_integer(model, field, value)
def _str_to_many2one_reference(self, model, field, value, savepoint):
return self._str_to_integer(model, field, value, savepoint)
@api.model
def _str_to_many2many(self, model, field, value):
def _str_to_many2many(self, model, field, value, savepoint):
[record] = value
subfield, warnings = self._referencing_subfield(record)
ids = []
for reference in record[subfield].split(','):
id, _, ws = self.db_id_for(model, field, subfield, reference)
id, ws = self.db_id_for(model, field, subfield, reference, savepoint)
ids.append(id)
warnings.extend(ws)
if field.name in self._context.get('import_set_empty_fields', []) and any([id is None for id in ids]):
if field.name in self.env.context.get('import_set_empty_fields', []) and any(id is None for id in ids):
ids = [id for id in ids if id]
elif field.name in self._context.get('import_skip_records', []) and any([id is None for id in ids]):
elif field.name in self.env.context.get('import_skip_records', []) and any(id is None for id in ids):
return None, warnings
if self._context.get('update_many2many'):
if self.env.context.get('update_many2many'):
return [Command.link(id) for id in ids], warnings
else:
return [Command.set(ids)], warnings
@api.model
def _str_to_one2many(self, model, field, records):
name_create_enabled_fields = self._context.get('name_create_enabled_fields') or {}
def _str_to_one2many(self, model, field, records, savepoint):
name_create_enabled_fields = self.env.context.get('name_create_enabled_fields') or {}
prefix = field.name + '/'
relative_name_create_enabled_fields = {
k[len(prefix):]: v
@ -722,12 +709,12 @@ class IrFieldsConverter(models.AbstractModel):
# Complete the field hierarchy path
# E.g. For "parent/child/subchild", field hierarchy path for "subchild" is ['parent', 'child']
parent_fields_hierarchy = self._context.get('parent_fields_hierarchy', []) + [field.name]
parent_fields_hierarchy = self.env.context.get('parent_fields_hierarchy', []) + [field.name]
convert = self.with_context(
name_create_enabled_fields=relative_name_create_enabled_fields,
parent_fields_hierarchy=parent_fields_hierarchy
).for_model(self.env[field.comodel_name])
).for_model(self.env[field.comodel_name], savepoint=savepoint)
for record in records:
id = None
@ -737,7 +724,7 @@ class IrFieldsConverter(models.AbstractModel):
subfield, w1 = self._referencing_subfield(refs)
warnings.extend(w1)
try:
id, _, w2 = self.db_id_for(model, field, subfield, record[subfield])
id, w2 = self.db_id_for(model, field, subfield, record[subfield], savepoint)
warnings.extend(w2)
except ValueError:
if subfield != 'id':

View file

@ -1,9 +1,7 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
from odoo import api, fields, models, tools
from odoo.exceptions import UserError
from odoo.tools.safe_eval import safe_eval, datetime
from odoo import api, fields, models
class IrFilters(models.Model):
@ -12,9 +10,7 @@ class IrFilters(models.Model):
_order = 'model_id, name, id desc'
name = fields.Char(string='Filter Name', required=True)
user_id = fields.Many2one('res.users', string='User', ondelete='cascade',
help="The user this filter is private to. When left empty the filter is public "
"and available to all users.")
user_ids = fields.Many2many('res.users', string='Users', ondelete='cascade', help="The users the filter is shared with. If empty, the filter is shared with all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Char(default='[]', required=True)
@ -24,18 +20,33 @@ class IrFilters(models.Model):
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
embedded_action_id = fields.Many2one('ir.embedded.actions', help="The embedded action this filter is applied to", ondelete="cascade")
embedded_action_id = fields.Many2one('ir.embedded.actions', help="The embedded action this filter is applied to", ondelete="cascade", index='btree_not_null')
embedded_parent_res_id = fields.Integer(help="id of the record the filter should be applied to. Only used in combination with embedded actions")
active = fields.Boolean(default=True)
_get_filters_index = models.Index(
'(model_id, action_id, embedded_action_id, embedded_parent_res_id)',
)
# The embedded_parent_res_id can only be defined when the embedded_action_id field is set.
# As the embedded model is linked to only one res_model, It ensure the unicity of the filter regarding the
# embedded_parent_res_model and the embedded_parent_res_id
_check_res_id_only_when_embedded_action = models.Constraint(
'CHECK(NOT (embedded_parent_res_id IS NOT NULL AND embedded_action_id IS NULL))',
"Constraint to ensure that the embedded_parent_res_id is only defined when a top_action_id is defined.",
)
_check_sort_json = models.Constraint(
"CHECK(sort IS NULL OR jsonb_typeof(sort::jsonb) = 'array')",
"Invalid sort definition",
)
@api.model
def _list_all_models(self):
lang = self.env.lang or 'en_US'
self._cr.execute(
self.env.cr.execute(
"SELECT model, COALESCE(name->>%s, name->>'en_US') FROM ir_model ORDER BY 2",
[lang],
)
return self._cr.fetchall()
return self.env.cr.fetchall()
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
@ -52,11 +63,10 @@ class IrFilters(models.Model):
return new_filter
def _get_eval_domain(self):
self.ensure_one()
return safe_eval(self.domain, {
'datetime': datetime,
'context_today': datetime.datetime.now,
})
try:
return ast.literal_eval(self.domain)
except ValueError as e:
raise ValueError("Invalid domain: {self.domain}") from e
@api.model
def _get_action_domain(self, action_id=None, embedded_action_id=None, embedded_parent_res_id=None):
@ -78,114 +88,22 @@ class IrFilters(models.Model):
The action does not have to correspond to the model, it may only be
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``name``, ``is_default``, ``domain``, ``user_ids`` (m2m),
``action_id`` (m2o tuple), ``embedded_action_id`` (m2o tuple), ``embedded_parent_res_id``
and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# available filters: private filters (user_ids=uids) and public filters (uids=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
user_context = self.env['res.users'].context_get()
action_domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
return self.with_context(user_context).search_read(
action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])],
['name', 'is_default', 'domain', 'context', 'user_id', 'sort', 'embedded_action_id', 'embedded_parent_res_id'],
action_domain + [('model_id', '=', model), ('user_ids', 'in', [self.env.uid, False])],
['name', 'is_default', 'domain', 'context', 'user_ids', 'sort', 'embedded_action_id', 'embedded_parent_res_id'],
)
@api.model
def _check_global_default(self, vals, matching_filters):
""" _check_global_default(dict, list(dict), dict) -> None
Checks if there is a global default for the model_id requested.
If there is, and the default is different than the record being written
(-> we're not updating the current global default), raise an error
to avoid users unknowingly overwriting existing global defaults (they
have to explicitly remove the current default before setting a new one)
This method should only be called if ``vals`` is trying to set
``is_default``
:raises odoo.exceptions.UserError: if there is an existing default and
we're not updating it
"""
domain = self._get_action_domain(vals.get('action_id'), vals.get('embedded_action_id'), vals.get('embedded_parent_res_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
('is_default', '=', True),
])
if not defaults:
return
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(self.env._("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default", model=vals.get('model_id')))
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
def create_filter(self, vals):
embedded_action_id = vals.get('embedded_action_id')
if not embedded_action_id and 'embedded_parent_res_id' in vals:
del vals['embedded_parent_res_id']
embedded_parent_res_id = vals.get('embedded_parent_res_id')
current_filters = self.get_filters(vals['model_id'], action_id, embedded_action_id, embedded_parent_res_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
# f.user_id is False and vals.user_id is False or missing,
# or f.user_id.id == vals.user_id
if (f['user_id'] and f['user_id'][0]) == vals.get('user_id')]
if vals.get('is_default'):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
('is_default', '=', True),
])
if defaults:
defaults.write({'is_default': False})
else:
self._check_global_default(vals, matching_filters)
# When a filter exists for the same (name, model, user) triple, we simply
# replace its definition (considering action_id irrelevant here)
if matching_filters:
matching_filter = self.browse(matching_filters[0]['id'])
matching_filter.write(vals)
return matching_filter
return self.create(vals)
_sql_constraints = [
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (model_id, user_id, action_id, embedded_action_id, embedded_parent_res_id, name)',
'Filter names must be unique'),
# The embedded_parent_res_id can only be defined when the embedded_action_id field is set.
# As the embedded model is linked to only one res_model, It ensure the unicity of the filter regarding the
# embedded_parent_res_model and the embedded_parent_res_id
(
'check_res_id_only_when_embedded_action',
"""CHECK(
NOT (embedded_parent_res_id IS NOT NULL AND embedded_action_id IS NULL)
)""",
'Constraint to ensure that the embedded_parent_res_id is only defined when a top_action_id is defined.'
),
('check_sort_json', "CHECK(sort IS NULL OR jsonb_typeof(sort::jsonb) = 'array')", 'Invalid sort definition'),
]
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
tools.create_unique_index(self._cr, 'ir_filters_name_model_uid_unique_action_index',
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)',
'lower(name)', 'embedded_parent_res_id', 'COALESCE(embedded_action_id,-1)'])
return result

View file

@ -14,6 +14,8 @@ import werkzeug
import werkzeug.exceptions
import werkzeug.routing
import werkzeug.utils
from werkzeug.datastructures import WWWAuthenticate
from werkzeug.exceptions import Unauthorized
try:
from werkzeug.routing import NumberConverter
@ -27,9 +29,10 @@ except ImportError:
slugify_lib = None
import odoo
from odoo import api, http, models, tools, SUPERUSER_ID
from odoo import api, http, models, tools
from odoo.api import SUPERUSER_ID
from odoo.exceptions import AccessDenied
from odoo.http import request, Response, ROUTING_KEYS
from odoo.http import ROUTING_KEYS, SAFE_HTTP_METHODS, Response, request
from odoo.modules.registry import Registry
from odoo.service import security
from odoo.tools.json import json_default
@ -68,7 +71,7 @@ class ModelConverter(werkzeug.routing.BaseConverter):
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
env = api.Environment(request.env.cr, _uid, request.env.context)
return env[self.model].browse(self.unslug(value)[1])
def to_url(self, value: models.BaseModel) -> str:
@ -84,7 +87,7 @@ class ModelsConverter(werkzeug.routing.BaseConverter):
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
env = api.Environment(request.env.cr, _uid, request.env.context)
return env[self.model].browse(int(v) for v in value.split(','))
def to_url(self, value: models.BaseModel) -> str:
@ -137,12 +140,14 @@ class IrHttp(models.AbstractModel):
_description = "HTTP Routing"
@classmethod
def _slugify_one(cls, value: str, max_length: int = 0) -> str:
def _slugify_one(cls, value: str, max_length: int = None) -> str:
""" Transform a string to a slug that can be used in a url path.
This method will first try to do the job with python-slugify if present.
Otherwise it will process string by stripping leading and ending spaces,
converting unicode chars to ascii, lowering all chars and replacing spaces
and underscore with hyphen "-".
Otherwise it will process string by replacing spaces and underscores with
dashes '-',removing every character that is not a word or a dash,
collapsing multiple dashes like --- into a single dash, removing leading
and trailing dashes and converting to lowercase.
Example: ^he$#!l(%l}o 你好& becomes hello-你好
"""
if slugify_lib:
# There are 2 different libraries only python-slugify is supported
@ -150,12 +155,19 @@ class IrHttp(models.AbstractModel):
return slugify_lib.slugify(value, max_length=max_length)
except TypeError:
pass
uni = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
slug_str = re.sub(r'[\W_]+', '-', uni).strip('-').lower()
return slug_str[:max_length] if max_length > 0 else slug_str
uni = unicodedata.normalize('NFKD', value)
slugified_segments = []
for slug in re.split('-|_| ', uni):
slug = re.sub(r'([^\w-])+', '', slug)
slug = re.sub(r'--+', '-', slug)
slug = slug.strip('-')
if slug:
slugified_segments.append(slug.lower())
slugified_str = '-'.join(slugified_segments)
return slugified_str[:max_length]
@classmethod
def _slugify(cls, value: str, max_length: int = 0, path: bool = False) -> str:
def _slugify(cls, value: str, max_length: int = None, path: bool = False) -> str:
if not path:
return cls._slugify_one(value, max_length=max_length)
else:
@ -228,18 +240,19 @@ class IrHttp(models.AbstractModel):
# 'rpc' scope does not really exist, we basically require a global key (scope NULL)
uid = request.env['res.users.apikeys']._check_credentials(scope='rpc', key=token)
if not uid:
raise werkzeug.exceptions.Unauthorized(
"Invalid apikey",
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
e = "Invalid apikey"
raise Unauthorized(e, www_authenticate=WWWAuthenticate('bearer'))
if request.env.uid and request.env.uid != uid:
raise AccessDenied("Session user does not match the used apikey")
e = "Session user does not match the used apikey."
raise AccessDenied(e)
request.update_env(user=uid)
request.session.can_save = False # stateless
elif not request.env.uid:
raise werkzeug.exceptions.Unauthorized(
'User not authenticated, use the "Authorization" header',
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
e = "User not authenticated, use an API Key with a Bearer Authorization header."
raise Unauthorized(e, www_authenticate=WWWAuthenticate('bearer'))
elif not check_sec_headers():
raise AccessDenied("Missing \"Authorization\" or Sec-headers for interactive usage")
e = 'Missing "Authorization" or Sec-headers for interactive usage.'
raise werkzeug.exceptions.Unauthorized(e, www_authenticate=WWWAuthenticate('bearer'))
cls._auth_method_user()
@classmethod
@ -250,6 +263,7 @@ class IrHttp(models.AbstractModel):
@classmethod
def _auth_method_none(cls):
request.env = api.Environment(request.env.cr, None, request.env.context)
request.env.transaction.default_env = request.env
@classmethod
def _auth_method_public(cls):
@ -309,20 +323,25 @@ class IrHttp(models.AbstractModel):
env = request.env if request.env.uid else request.env['base'].with_user(SUPERUSER_ID).env
request.update_context(lang=get_lang(env).code)
# Replace uid and lang placeholder by the current request.env.uid and request.env.lang
# before checking the access.
for key, val in list(args.items()):
if not isinstance(val, models.BaseModel):
continue
# Replace uid and lang placeholder by the current request.env.uid and request.env.lang
args[key] = val.with_env(request.env)
for key, val in list(args.items()):
if not isinstance(val, models.BaseModel):
continue
try:
# explicitly crash now, instead of crashing later
args[key].check_access('read')
except (odoo.exceptions.AccessError, odoo.exceptions.MissingError) as e:
# custom behavior in case a record is not accessible / has been removed
if handle_error := rule.endpoint.routing.get('handle_params_access_error'):
if response := handle_error(e):
if response := handle_error(e, **args):
werkzeug.exceptions.abort(response)
if request.env.user.is_public or isinstance(e, odoo.exceptions.MissingError):
raise werkzeug.exceptions.NotFound() from e
@ -330,6 +349,11 @@ class IrHttp(models.AbstractModel):
@classmethod
def _dispatch(cls, endpoint):
# Verify the captcha in case it was set on @http.route
# https://httpwg.org/specs/rfc9110.html#safe.methods
captcha = endpoint.routing.get('captcha')
if captcha and request.httprequest.method not in SAFE_HTTP_METHODS:
request.env['ir.http']._verify_request_recaptcha_token(captcha)
result = endpoint(**request.params)
if isinstance(result, Response) and result.is_qweb:
result.flatten()
@ -365,7 +389,7 @@ class IrHttp(models.AbstractModel):
def routing_map(self, key=None):
_logger.info("Generating routing map for key %s", str(key))
registry = Registry(threading.current_thread().dbname)
installed = registry._init_modules.union(odoo.conf.server_wide_modules)
installed = registry._init_modules.union(odoo.tools.config['server_wide_modules'])
mods = sorted(installed)
# Note : when routing map is generated, we put it on the class `cls`
# to make it available for all instance. Since `env` create an new instance
@ -388,11 +412,9 @@ class IrHttp(models.AbstractModel):
http.root.session_store.vacuum(max_lifetime=http.get_session_max_inactivity(self.env))
@api.model
def get_translations_for_webclient(self, modules, lang):
if not modules:
modules = self.pool._init_modules
def _get_translations_for_webclient(self, modules, lang):
if not lang:
lang = self._context.get("lang")
lang = self.env.context.get("lang")
lang_data = self.env['res.lang']._get_data(code=lang)
lang_params = {
"name": lang_data.name,
@ -400,7 +422,6 @@ class IrHttp(models.AbstractModel):
"direction": lang_data.direction,
"date_format": lang_data.date_format,
"time_format": lang_data.time_format,
"short_time_format": lang_data.short_time_format,
"grouping": lang_data.grouping,
"decimal_point": lang_data.decimal_point,
"thousands_sep": lang_data.thousands_sep,
@ -417,14 +438,17 @@ class IrHttp(models.AbstractModel):
@api.model
@tools.ormcache('frozenset(modules)', 'lang')
def get_web_translations_hash(self, modules, lang):
translations, lang_params = self.get_translations_for_webclient(modules, lang)
def _get_web_translations_hash(self, modules, lang):
translations, lang_params = self._get_translations_for_webclient(modules, lang)
translation_cache = {
'lang_parameters': lang_params,
'modules': translations,
'lang': lang,
'multi_lang': len(self.env['res.lang'].sudo().get_installed()) > 1,
}
if self.env.context.get('cache_translation_data'):
# put in the transactional cache
self.env.cr.cache['translation_data'] = translation_cache
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True, default=json_default).encode()).hexdigest()
@classmethod
@ -432,5 +456,5 @@ class IrHttp(models.AbstractModel):
return True if cookie_type == 'required' else bool(request.env.user)
@api.model
def _verify_request_recaptcha_token(self, action):
return True
def _verify_request_recaptcha_token(self, action: str):
return

View file

@ -35,10 +35,10 @@ class IrLogging(models.Model):
def init(self):
super(IrLogging, self).init()
self._cr.execute("select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'")
if self._cr.rowcount:
self.env.cr.execute("select 1 from information_schema.constraint_column_usage where table_name = 'ir_logging' and constraint_name = 'ir_logging_write_uid_fkey'")
if self.env.cr.rowcount:
# DROP CONSTRAINT unconditionally takes an ACCESS EXCLUSIVE lock
# on the table, even "IF EXISTS" is set and not matching; disabling
# the relevant trigger instead acquires SHARE ROW EXCLUSIVE, which
# still conflicts with the ROW EXCLUSIVE needed for an insert
self._cr.execute("ALTER TABLE ir_logging DROP CONSTRAINT ir_logging_write_uid_fkey")
self.env.cr.execute("ALTER TABLE ir_logging DROP CONSTRAINT ir_logging_write_uid_fkey")

View file

@ -4,6 +4,7 @@ import base64
import datetime
import email
import email.policy
import functools
import idna
import logging
import re
@ -15,13 +16,20 @@ from socket import gaierror, timeout
from OpenSSL import crypto as SSLCrypto
from OpenSSL.crypto import Error as SSLCryptoError, FILETYPE_PEM
from OpenSSL.SSL import Error as SSLError
from urllib3.contrib.pyopenssl import PyOpenSSLContext
from OpenSSL.SSL import Error as SSLError, VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT
from urllib3.contrib.pyopenssl import PyOpenSSLContext, get_subj_alt_name
from odoo import api, fields, models, tools, _, modules
from odoo.exceptions import UserError
from odoo.tools import formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize, human_size
try:
# urllib3 1.26 (ubuntu jammy and up, debian bullseye and up)
from urllib3.util.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# urllib3 1.25 and below
from urllib3.packages.ssl_match_hostname import CertificateError, match_hostname
_logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('odoo.tests')
@ -32,31 +40,6 @@ class MailDeliveryException(Exception):
"""Specific exception subclass for mail delivery errors"""
def make_wrap_property(name):
return property(
lambda self: getattr(self.__obj__, name),
lambda self, value: setattr(self.__obj__, name, value),
)
class SMTPConnection:
"""Wrapper around smtplib.SMTP and smtplib.SMTP_SSL"""
def __init__(self, server, port, encryption, context=None):
if encryption == 'ssl':
self.__obj__ = smtplib.SMTP_SSL(server, port, timeout=SMTP_TIMEOUT, context=context)
else:
self.__obj__ = smtplib.SMTP(server, port, timeout=SMTP_TIMEOUT)
SMTP_ATTRIBUTES = [
'auth', 'auth_cram_md5', 'auth_login', 'auth_plain', 'close', 'data', 'docmd', 'ehlo', 'ehlo_or_helo_if_needed',
'expn', 'from_filter', 'getreply', 'has_extn', 'login', 'mail', 'noop', 'putcmd', 'quit', 'rcpt', 'rset',
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host', 'esmtp_features',
]
for name in SMTP_ATTRIBUTES:
setattr(SMTPConnection, name, make_wrap_property(name))
# Python 3: patch SMTP's internal printer/debugger
def _print_debug(self, *args):
_logger.debug(' '.join(str(a) for a in args))
@ -105,9 +88,27 @@ def extract_rfc2822_addresses(text):
return valid_addresses
class IrMailServer(models.Model):
def _verify_check_hostname_callback(cnx, x509, err_no, err_depth, return_code, *, hostname):
"""Callback used for pyOpenSSL.verify_mode, by default pyOpenSSL
only checkes :param:`err_no`, we enrich it to also verify that
the SMTP server :param:`hostname` matches the :param:`x509`'s
Common Name (CN) or Subject Alternative Name (SAN)."""
if err_no:
return False
if err_depth == 0: # leaf certificate
peercert = {
"subject": ((("commonName", x509.get_subject().CN),),),
"subjectAltName": get_subj_alt_name(x509),
}
match_hostname(peercert, hostname) # it raises when it does not match
return True
class IrMail_Server(models.Model):
"""Represents an SMTP server, able to send outgoing emails, with SSL and TLS capabilities."""
_name = "ir.mail_server"
_name = 'ir.mail_server'
_description = 'Mail Server'
_order = 'sequence, id'
_allow_sudo_commands = False
@ -137,13 +138,19 @@ class IrMailServer(models.Model):
smtp_user = fields.Char(string='Username', help="Optional username for SMTP authentication", groups='base.group_system')
smtp_pass = fields.Char(string='Password', help="Optional password for SMTP authentication", groups='base.group_system')
smtp_encryption = fields.Selection([('none', 'None'),
('starttls', 'TLS (STARTTLS)'),
('ssl', 'SSL/TLS')],
('starttls_strict', 'TLS (STARTTLS), encryption and validation'),
('starttls', 'TLS (STARTTLS), encryption only'),
('ssl_strict', 'SSL/TLS, encryption and validation'),
('ssl', 'SSL/TLS, encryption only')],
string='Connection Encryption', required=True, default='none',
help="Choose the connection encryption scheme:\n"
"- None: SMTP sessions are done in cleartext.\n"
"- TLS (STARTTLS): TLS encryption is requested at start of SMTP session (Recommended)\n"
"- SSL/TLS: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)")
"- SSL/TLS: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)\n"
"\n"
"Choose an additionnal variant for SSL or TLS:\n"
"- encryption and validation: encrypt the data and authentify the server using its SSL certificate (Recommended)\n"
"- encryption only: encrypt the data but skip server authentication")
smtp_ssl_certificate = fields.Binary(
'SSL Certificate', groups='base.group_system', attachment=False,
help='SSL certificate used for authentication')
@ -158,13 +165,10 @@ class IrMailServer(models.Model):
"is used. Default priority is 10 (smaller number = higher priority)")
active = fields.Boolean(default=True)
_sql_constraints = [
(
'certificate_requires_tls',
"CHECK(smtp_encryption != 'none' OR smtp_authentication != 'certificate')",
"Certificate-based authentication requires a TLS transport"
),
]
_certificate_requires_tls = models.Constraint(
"CHECK(smtp_encryption != 'none' OR smtp_authentication != 'certificate')",
"Certificate-based authentication requires a TLS transport",
)
@api.depends('smtp_authentication')
def _compute_smtp_authentication_info(self):
@ -245,7 +249,7 @@ class IrMailServer(models.Model):
def _get_test_email_from(self):
self.ensure_one()
email_from = False
if from_filter_parts := [part.strip() for part in (self.from_filter or '').split(",") if part.strip()]:
if from_filter_parts := self._parse_from_filter(self.from_filter):
# find first found complete email in filter parts
email_from = next((email for email in from_filter_parts if "@" in email), False)
# no complete email -> consider noreply
@ -266,19 +270,20 @@ class IrMailServer(models.Model):
"""Test the connection and if autodetect_max_email_size, set auto-detected max email size.
:param bool autodetect_max_email_size: whether to autodetect the max email size
:return (dict): client action to notify the user of the result of the operation (connection test or
auto-detection successful depending on the autodetect_max_email_size parameter)
:return: client action to notify the user of the result of the operation (connection test or
auto-detection successful depending on the ``autodetect_max_email_size`` parameter)
:rtype: dict
:raises UserError: if the connection fails and if autodetect_max_email_size and
:raises UserError: if the connection fails and if ``autodetect_max_email_size`` and
the server doesn't support the auto-detection of email max size
"""
for server in self:
smtp = False
try:
smtp = self.connect(mail_server_id=server.id, allow_archived=True)
# simulate sending an email from current user's address - without sending it!
email_from = server._get_test_email_from()
email_to = server._get_test_email_to()
smtp = self._connect__(mail_server_id=server.id, allow_archived=True, smtp_from=email_from)
# Testing the MAIL FROM step should detect sender filter problems
(code, repl) = smtp.mail(email_from)
if code != 250:
@ -311,6 +316,8 @@ class IrMailServer(models.Model):
raise UserError(_("An option is not supported by the server:\n %s", e)) from e
except smtplib.SMTPException as e:
raise UserError(_("An SMTP exception occurred. Check port number and connection security type.\n %s", e)) from e
except CertificateError as e:
raise UserError(_("An SSL exception occurred. Check connection security type.\n CertificateError: %s", e)) from e
except (ssl.SSLError, SSLError) as e:
raise UserError(_("An SSL exception occurred. Check connection security type.\n %s", e)) from e
except UserError:
@ -347,7 +354,13 @@ class IrMailServer(models.Model):
self.ensure_one()
return self.test_smtp_connection(autodetect_max_email_size=True)
def connect(self, host=None, port=None, user=None, password=None, encryption=None,
@classmethod
def _disable_send(cls):
"""Whether to disable sending e-mails"""
# no e-mails during testing or when registry is initializing
return modules.module.current_test or cls.pool._init
def _connect__(self, host=None, port=None, user=None, password=None, encryption=None, # noqa: PLW3201
smtp_from=None, ssl_certificate=None, ssl_private_key=None, smtp_debug=False, mail_server_id=None,
allow_archived=False):
"""Returns a new SMTP connection to the given SMTP server.
@ -357,7 +370,8 @@ class IrMailServer(models.Model):
:param int port: SMTP port to connect to
:param user: optional username to authenticate with
:param password: optional password to authenticate with
:param string encryption: optional, ``'ssl'`` | ``'starttls'``
:param str encryption: optional, ``'none'`` | ``'ssl'`` | ``'ssl_strict'`` | ``'starttls'`` | ``'starttls_strict'``.
The 'strict' variants verify the remote server's certificate against the operating system trust store.
:param smtp_from: FROM SMTP envelop, used to find the best mail server
:param ssl_certificate: filename of the SSL certificate used for authentication
Used when no mail server is given and overwrite the odoo-bin argument "smtp_ssl_certificate"
@ -367,17 +381,17 @@ class IrMailServer(models.Model):
will be output in logs)
:param mail_server_id: ID of specific mail server to use (overrides other parameters)
:param bool allow_archived: by default (False), an exception is raised when calling this method on an
archived record (using mail_server_id param). It can be set to True for testing so that the exception is no
longer raised.
archived record (using mail_server_id param). It can be set to True for testing so that the exception is
no longer raised.
"""
# Do not actually connect while running in test mode
if modules.module.current_test:
return
if self._disable_send():
return None
mail_server = smtp_encryption = None
if mail_server_id:
mail_server = self.sudo().browse(mail_server_id)
if not allow_archived and not mail_server.active:
raise UserError(_('The server "%s" cannot be used because it is archived.', mail_server.display_name))
self._check_forced_mail_server(mail_server, allow_archived, smtp_from)
elif not host:
mail_server, smtp_from = self.sudo()._find_mail_server(smtp_from)
@ -397,9 +411,18 @@ class IrMailServer(models.Model):
smtp_encryption = mail_server.smtp_encryption
smtp_debug = smtp_debug or mail_server.smtp_debug
from_filter = mail_server.from_filter
if mail_server.smtp_authentication == "certificate":
try:
ssl_context = PyOpenSSLContext(ssl.PROTOCOL_TLS)
if mail_server.smtp_encryption in ('ssl_strict', 'starttls_strict'):
ssl_context.set_default_verify_paths()
ssl_context._ctx.set_verify(
VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT,
functools.partial(_verify_check_hostname_callback, hostname=smtp_server)
)
else: # ssl, starttls
ssl_context.verify_mode = ssl.CERT_NONE
smtp_ssl_certificate = base64.b64decode(mail_server.smtp_ssl_certificate)
certificate = SSLCrypto.load_certificate(FILETYPE_PEM, smtp_ssl_certificate)
smtp_ssl_private_key = base64.b64decode(mail_server.smtp_ssl_private_key)
@ -412,6 +435,15 @@ class IrMailServer(models.Model):
raise UserError(_('The private key or the certificate is not a valid file. \n%s', str(e)))
except SSLError as e:
raise UserError(_('Could not load your certificate / private key. \n%s', str(e)))
elif mail_server.smtp_encryption != 'none':
if mail_server.smtp_encryption in ('ssl_strict', 'starttls_strict'):
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = True
ssl_context.verify_mode = ssl.CERT_REQUIRED
else: # ssl, starttls
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
else:
# we were passed individual smtp parameters or nothing and there is no default server
@ -433,6 +465,7 @@ class IrMailServer(models.Model):
if smtp_ssl_certificate_filename and smtp_ssl_private_key_filename:
try:
ssl_context = PyOpenSSLContext(ssl.PROTOCOL_TLS)
ssl_context.verify_mode = ssl.CERT_NONE
ssl_context.load_cert_chain(smtp_ssl_certificate_filename, keyfile=smtp_ssl_private_key_filename)
# Check that the private key match the certificate
ssl_context._ctx.check_privatekey()
@ -448,16 +481,12 @@ class IrMailServer(models.Model):
"or provide the SMTP parameters explicitly.",
))
if smtp_encryption == 'ssl':
if 'SMTP_SSL' not in smtplib.__all__:
raise UserError(
_("Your Odoo Server does not support SMTP-over-SSL. "
"You could use STARTTLS instead. "
"If SSL is needed, an upgrade to Python 2.6 on the server-side "
"should do the trick."))
connection = SMTPConnection(smtp_server, smtp_port, smtp_encryption, context=ssl_context)
if smtp_encryption in ('ssl', 'ssl_strict'):
connection = smtplib.SMTP_SSL(smtp_server, smtp_port, timeout=SMTP_TIMEOUT, context=ssl_context)
else:
connection = smtplib.SMTP(smtp_server, smtp_port, timeout=SMTP_TIMEOUT)
connection.set_debuglevel(smtp_debug)
if smtp_encryption == 'starttls':
if smtp_encryption in ('starttls', 'starttls_strict'):
# starttls() will perform ehlo() if needed first
# and will discard the previous list of services
# after successfully performing STARTTLS command,
@ -471,7 +500,7 @@ class IrMailServer(models.Model):
local, at, domain = smtp_user.rpartition('@')
if at:
smtp_user = local + at + idna.encode(domain).decode('ascii')
mail_server._smtp_login(connection, smtp_user, smtp_password or '')
mail_server._smtp_login__(connection, smtp_user, smtp_password or '')
# Some methods of SMTP don't check whether EHLO/HELO was sent.
# Anyway, as it may have been sent by login(), all subsequent usages should consider this command as sent.
@ -484,7 +513,11 @@ class IrMailServer(models.Model):
return connection
def _smtp_login(self, connection, smtp_user, smtp_password):
def _check_forced_mail_server(self, mail_server, allow_archived, smtp_from):
if not allow_archived and not mail_server.active:
raise UserError(_('The server "%s" cannot be used because it is archived.', mail_server.display_name))
def _smtp_login__(self, connection, smtp_user, smtp_password): # noqa: PLW3201
"""Authenticate the SMTP connection.
Can be overridden in other module for different authentication methods.Can be
@ -496,7 +529,7 @@ class IrMailServer(models.Model):
"""
connection.login(smtp_user, smtp_password)
def build_email(self, email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
def _build_email__(self, email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False, # noqa: PLW3201
attachments=None, message_id=None, references=None, object_id=False, subtype='plain', headers=None,
body_alternative=None, subtype_alternative='plain'):
"""Constructs an RFC2822 email.message.Message object based on the keyword arguments passed, and returns it.
@ -585,7 +618,8 @@ class IrMailServer(models.Model):
""" Computes the default bounce address. It is used to set the envelop
address if no envelop address is provided in the message.
:return str/None: defaults to the ``--email-from`` CLI/config parameter.
:return: defaults to the ``--email-from`` CLI/config parameter.
:rtype: str | None
"""
return tools.config.get("email_from")
@ -594,7 +628,8 @@ class IrMailServer(models.Model):
""" Computes the default from address. It is used for the "header from"
address when no other has been received.
:return str/None: defaults to the ``--email-from`` CLI/config parameter.
:return: defaults to the ``--email-from`` CLI/config parameter.
:rtype: str | None
"""
return tools.config.get("email_from")
@ -604,19 +639,21 @@ class IrMailServer(models.Model):
ir.mail_server is used when sending emails, hence having no value for
from_filter.
:return str/None: defaults to 'mail.default.from_filter', then
:return: defaults to 'mail.default.from_filter', then
``--from-filter`` CLI/config parameter.
:rtype: str | None
"""
return self.env['ir.config_parameter'].sudo().get_param(
'mail.default.from_filter', tools.config.get('from_filter')
)
def _prepare_email_message(self, message, smtp_session):
def _prepare_email_message__(self, message, smtp_session): # noqa: PLW3201
"""Prepare the SMTP information (from, to, message) before sending.
:param message: the email.message.Message to send, information like the
Return-Path, the From, etc... will be used to find the smtp_from and to smtp_to
:param smtp_session: the opened SMTP session to use to authenticate the sender
:return: smtp_from, smtp_to_list, message
smtp_from: email to used during the authentication to the mail server
smtp_to_list: list of email address which will receive the email
@ -631,34 +668,9 @@ class IrMailServer(models.Model):
smtp_from = message['From'] or bounce_address
assert smtp_from, self.NO_FOUND_SMTP_FROM
email_to = message['To']
email_cc = message['Cc']
email_bcc = message['Bcc']
del message['Bcc']
# All recipient addresses must only contain ASCII characters; support
# optional pre-validated To list, used notably when formatted emails may
# create fake emails using extract_rfc2822_addresses, e.g.
# '"Bike@Home" <email@domain.com>' which can be considered as containing
# 2 emails by extract_rfc2822_addresses
validated_to = self.env.context.get('send_validated_to') or []
smtp_to_list = [
address
for base in [email_to, email_cc, email_bcc]
# be sure a given address does not return duplicates (but duplicates
# in final smtp to list is still ok)
for address in tools.misc.unique(extract_rfc2822_addresses(base))
if address and (not validated_to or address in validated_to)
]
smtp_to_list = self._prepare_smtp_to_list(message, smtp_session)
assert smtp_to_list, self.NO_VALID_RECIPIENT
x_forge_to = message['X-Forge-To']
if x_forge_to:
# `To:` header forged, e.g. for posting on discuss.channels, to avoid confusion
del message['X-Forge-To']
del message['To'] # avoid multiple To: headers!
message['To'] = x_forge_to
# Try to not spoof the mail from headers; fetch session-based or contextualized
# values for encapsulation computation
from_filter = getattr(smtp_session, 'from_filter', False)
@ -669,9 +681,8 @@ class IrMailServer(models.Model):
if notifications_email and email_normalize(smtp_from) == notifications_email and email_normalize(message['From']) != notifications_email:
smtp_from = encapsulate_email(message['From'], notifications_email)
if message['From'] != smtp_from:
del message['From']
message['From'] = smtp_from
# alter message
self._alter_message__(message, smtp_from, smtp_to_list)
# Check if it's still possible to put the bounce address as smtp_from
if self._match_from_filter(bounce_address, from_filter):
@ -691,6 +702,70 @@ class IrMailServer(models.Model):
return smtp_from, smtp_to_list, message
@api.model
def _alter_message__(self, message, smtp_from, smtp_to_list): # noqa: PLW3201
# `To:` header forged, e.g. for posting on discuss.channels, to avoid confusion
if x_forge_to := message['X-Forge-To']:
message.replace_header('To', x_forge_to)
# `To:` header extended, e.g. for adding "virtual" recipients, aka fake recipients
# that do not impact SMTP To
elif x_msg_add_to := message['X-Msg-To-Add']:
to = message['To'] or ''
to_normalized = tools.mail.email_normalize_all(to)
message.replace_header(
'To', ', '.join([
to,
', '.join(
address for address in tools.mail.email_split_and_format(x_msg_add_to)
if tools.mail.email_normalize(address, strict=False) not in to_normalized
),
]
))
if message['From'] != smtp_from:
message.replace_header('From', smtp_from)
# cleanup unwanted headers
del message['Bcc'] # see odoo/odoo@2445f9e3c22db810d61996afde883e4ca608f15b
del message['X-Forge-To']
del message['X-Msg-To-Add']
del message['X-Msg-To-Consolidate']
@api.model
def _prepare_smtp_to_list(self, message, smtp_session):
""" Prepare SMTP To address list, based on To / Cc / Bcc.
Optional 'send_validated_to' context key filter restricts addresses to
be part of that list.
Optional 'send_smtp_skip_to' context key holds a recipients block list
"""
email_to = message['To']
email_cc = message['Cc']
email_bcc = message['Bcc']
# Support optional pre-validated To list, used notably when formatted
# emails may create fake emails using extract_rfc2822_addresses, e.g.
# '"Bike@Home" <email@domain.com>' which can be considered as containing
# 2 emails by extract_rfc2822_addresses
validated_to = self.env.context.get('send_validated_to') or []
# Support optional skip To list
skip_to_lst = self.env.context.get('send_smtp_skip_to') or []
# All recipient addresses must only contain ASCII characters
return [
address
for base in [email_to, email_cc, email_bcc]
# be sure a given address does not return duplicates (but duplicates
# in final smtp to list is still ok)
for address in tools.misc.unique(extract_rfc2822_addresses(base))
if (
address and (not validated_to or address in validated_to)
and email_normalize(address, strict=False) not in skip_to_lst
)
]
@api.model
def send_email(self, message, mail_server_id=None, smtp_server=None, smtp_port=None,
smtp_user=None, smtp_password=None, smtp_encryption=None,
@ -717,7 +792,8 @@ class IrMailServer(models.Model):
messages. The caller is in charge of disconnecting the session.
:param mail_server_id: optional id of ir.mail_server to use for sending. overrides other smtp_* arguments.
:param smtp_server: optional hostname of SMTP server to use
:param smtp_encryption: optional TLS mode, one of 'none', 'starttls' or 'ssl' (see ir.mail_server fields for explanation)
:param smtp_encryption: optional TLS mode, one of 'none', 'starttls', 'starttls_strict', 'ssl', or 'ssl_strict'.
The 'strict' variants verify the remote server's certificate against the operating system trust store.
:param smtp_port: optional SMTP port, if mail_server_id is not passed
:param smtp_user: optional SMTP user, if mail_server_id is not passed
:param smtp_password: optional SMTP password to use, if mail_server_id is not passed
@ -729,15 +805,15 @@ class IrMailServer(models.Model):
"""
smtp = smtp_session
if not smtp:
smtp = self.connect(
smtp = self._connect__(
smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption,
smtp_from=message['From'], ssl_certificate=smtp_ssl_certificate, ssl_private_key=smtp_ssl_private_key,
smtp_debug=smtp_debug, mail_server_id=mail_server_id,)
smtp_from, smtp_to_list, message = self._prepare_email_message(message, smtp)
smtp_from, smtp_to_list, message = self._prepare_email_message__(message, smtp)
# Do not actually send emails in testing mode!
if modules.module.current_test:
if self._disable_send():
_test_logger.debug("skip sending email in test mode")
return message['Message-Id']
@ -762,13 +838,19 @@ class IrMailServer(models.Model):
raise MailDeliveryException(_("Mail Delivery Failed"), msg)
return message_id
def _find_mail_server_allowed_domain(self):
"""Overridable domain getter for all mail servers that may be used as default."""
return fields.Domain.TRUE
def _find_mail_server(self, email_from, mail_servers=None):
"""Find the appropriate mail server for the given email address.
Returns: Record<ir.mail_server>, email_from
- Mail server to use to send the email (None if we use the odoo-bin arguments)
- Email FROM to use to send the email (in some case, it might be impossible
to use the given email address directly if no mail server is configured for)
:rtype: tuple[IrMail_Server | None, str]
:returns: A two-elements tuple: ``(Record<ir.mail_server>, email_from)``
1. Mail server to use to send the email (``None`` if we use the odoo-bin arguments)
2. Email FROM to use to send the email (in some case, it might be impossible
to use the given email address directly if no mail server is configured for)
"""
email_from_normalized = email_normalize(email_from)
email_from_domain = email_domain_extract(email_from_normalized)
@ -776,7 +858,7 @@ class IrMailServer(models.Model):
notifications_domain = email_domain_extract(notifications_email)
if mail_servers is None:
mail_servers = self.sudo().search([], order='sequence')
mail_servers = self.sudo().search(self._find_mail_server_allowed_domain(), order='sequence')
# 0. Archived mail server should never be used
mail_servers = mail_servers.filtered('active')
@ -797,6 +879,8 @@ class IrMailServer(models.Model):
if mail_server := first_match(email_from_domain, email_domain_normalize):
return mail_server, email_from
mail_servers = self._filter_mail_servers_fallback(mail_servers)
# 2. Try to find a mail server for <notifications@domain.com>
if notifications_email:
if mail_server := first_match(notifications_email, email_normalize):
@ -834,6 +918,11 @@ class IrMailServer(models.Model):
notifications_email or email_from)
return None, notifications_email or email_from
@api.model
def _filter_mail_servers_fallback(self, servers):
"""Filter the mail servers that can be used as fallback, or for default email from."""
return servers
@api.model
def _match_from_filter(self, email_from, from_filter):
"""Return True is the given email address match the "from_filter" field.
@ -847,13 +936,17 @@ class IrMailServer(models.Model):
normalized_mail_from = email_normalize(email_from)
normalized_domain = email_domain_extract(normalized_mail_from)
for email_filter in [part.strip() for part in (from_filter or '').split(',') if part.strip()]:
for email_filter in self._parse_from_filter(from_filter):
if '@' in email_filter and email_normalize(email_filter) == normalized_mail_from:
return True
if '@' not in email_filter and email_domain_normalize(email_filter) == normalized_domain:
return True
return False
@api.model
def _parse_from_filter(self, from_filter):
return [part.strip() for part in (from_filter or '').split(',') if part.strip()]
@api.onchange('smtp_encryption')
def _onchange_encryption(self):
result = {}

View file

@ -1,20 +1,13 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import warnings
import functools
from collections import defaultdict, OrderedDict
from decorator import decorator
from operator import attrgetter
from textwrap import dedent
import io
import logging
import os
import platform
import shutil
import threading
import zipfile
import requests
import werkzeug.urls
import typing
from docutils import nodes
from docutils.core import publish_string
@ -27,13 +20,14 @@ import odoo
from odoo import api, fields, models, modules, tools, _
from odoo.addons.base.models.ir_model import MODULE_UNINSTALL_FLAG
from odoo.exceptions import AccessDenied, UserError, ValidationError
from odoo.osv import expression
from odoo.fields import Domain
from odoo.tools.parse_version import parse_version
from odoo.tools.misc import topological_sort, get_flag
from odoo.tools.translate import TranslationImporter, get_po_paths
from odoo.tools.translate import TranslationImporter, get_po_paths, get_datafile_translation_path
from odoo.http import request
from odoo.modules import get_module_path
from odoo.modules.module import Manifest, MissingDependency
T = typing.TypeVar('T')
_logger = logging.getLogger(__name__)
ACTION_DICT = {
@ -58,13 +52,14 @@ def backup(path, raise_exception=True):
cnt += 1
def assert_log_admin_access(method):
def assert_log_admin_access(method: T, /) -> T:
"""Decorator checking that the calling user is an administrator, and logging the call.
Raises an AccessDenied error if the user does not have administrator privileges, according
to `user._is_admin()`.
"""
def check_and_log(method, self, *args, **kwargs):
@functools.wraps(method)
def check_and_log(self, *args, **kwargs):
user = self.env.user
origin = request.httprequest.remote_addr if request else 'n/a'
log_data = (method.__name__, self.sudo().mapped('display_name'), user.login, user.id, origin)
@ -73,18 +68,20 @@ def assert_log_admin_access(method):
raise AccessDenied()
_logger.info('ALLOW access to module.%s on %s to user %s #%s via %s', *log_data)
return method(self, *args, **kwargs)
return decorator(check_and_log, method)
return check_and_log
class ModuleCategory(models.Model):
_name = "ir.module.category"
class IrModuleCategory(models.Model):
_name = 'ir.module.category'
_description = "Application"
_order = 'name'
_order = 'sequence, name, id'
_allow_sudo_commands = False
name = fields.Char(string='Name', required=True, translate=True, index=True)
name = fields.Char(string='Name', required=True, translate=True)
parent_id = fields.Many2one('ir.module.category', string='Parent Application', index=True)
child_ids = fields.One2many('ir.module.category', 'parent_id', string='Child Applications')
module_ids = fields.One2many('ir.module.module', 'category_id', string='Modules')
privilege_ids = fields.One2many('res.groups.privilege', 'category_id', string='Privileges')
description = fields.Text(string='Description', translate=True)
sequence = fields.Integer(string='Sequence')
visible = fields.Boolean(string='Visible', default=True)
@ -155,8 +152,8 @@ XML_DECLARATION = (
)
class Module(models.Model):
_name = "ir.module.module"
class IrModuleModule(models.Model):
_name = 'ir.module.module'
_rec_name = "shortdesc"
_rec_names_search = ['name', 'shortdesc', 'summary']
_description = "Module"
@ -165,11 +162,13 @@ class Module(models.Model):
@classmethod
def get_module_info(cls, name):
try:
return modules.get_manifest(name)
except Exception:
_logger.debug('Error when trying to fetch information for module %s', name, exc_info=True)
return {}
if isinstance(name, str):
# we have no info for studio_customization
# imported modules are not found using this method
return modules.Manifest.for_addon(name, display_warning=False) or {}
if isinstance(name, modules.Manifest):
return name
return {}
@api.depends('name', 'description')
def _get_desc(self):
@ -187,25 +186,7 @@ class Module(models.Model):
path = os.path.join(module.name, 'static/description/index.html')
try:
with tools.file_open(path, 'rb') as desc_file:
doc = desc_file.read()
if doc.startswith(XML_DECLARATION):
warnings.warn(
f"XML declarations in HTML module descriptions are "
f"deprecated since Odoo 17, {module.name} can just "
f"have a UTF8 description with not need for a "
f"declaration.",
category=DeprecationWarning,
)
else:
try:
doc = doc.decode()
except UnicodeDecodeError:
warnings.warn(
f"Non-UTF8 module descriptions are deprecated "
f"since Odoo 17 ({module.name}'s description "
f"is not utf-8)",
category=DeprecationWarning,
)
doc = desc_file.read().decode()
module.description_html = _apply_description_images(doc)
except FileNotFoundError:
overrides = {
@ -262,17 +243,21 @@ class Module(models.Model):
for module in self:
if not module.id:
continue
manifest = self.get_module_info(module.name)
if module.icon:
path = os.path.join(module.icon.lstrip("/"))
path = module.icon or ''
elif manifest:
path = manifest.get('icon', '')
else:
path = modules.module.get_module_icon_path(module)
path = Manifest.for_addon('base').icon
path = path.removeprefix("/")
if path:
try:
with tools.file_open(path, 'rb', filter_ext=('.png', '.svg', '.gif', '.jpeg', '.jpg')) as image_file:
module.icon_image = base64.b64encode(image_file.read())
except FileNotFoundError:
except OSError:
module.icon_image = ''
countries = self.get_module_info(module.name).get('countries', [])
countries = manifest.get('countries', [])
country_code = len(countries) == 1 and countries[0]
module.icon_flag = get_flag(country_code.upper()) if country_code else ''
@ -330,9 +315,10 @@ class Module(models.Model):
to_buy = fields.Boolean('Odoo Enterprise Module', default=False)
has_iap = fields.Boolean(compute='_compute_has_iap')
_sql_constraints = [
('name_uniq', 'UNIQUE (name)', 'The name of the module must be unique!'),
]
_name_uniq = models.Constraint(
'UNIQUE (name)',
"The name of the module must be unique!",
)
def _compute_has_iap(self):
for module in self:
@ -345,36 +331,46 @@ class Module(models.Model):
raise UserError(_('You are trying to remove a module that is installed or will be installed.'))
def unlink(self):
self.env.registry.clear_cache()
return super(Module, self).unlink()
self.env.registry.clear_cache('stable')
return super().unlink()
def _get_modules_to_load_domain(self):
""" Domain to retrieve the modules that should be loaded by the registry. """
return [('state', '=', 'installed')]
def check_external_dependencies(self, module_name, newstate='to install'):
terp = self.get_module_info(module_name)
manifest = modules.Manifest.for_addon(module_name)
if not manifest:
return # unavailable module, there is no point in checking dependencies
try:
modules.check_manifest_dependencies(terp)
except Exception as e:
manifest.check_manifest_dependencies()
except MissingDependency as e:
if newstate == 'to install':
msg = _('Unable to install module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
msg = _('Unable to install module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.dependency)
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
msg = _('Unable to upgrade module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.dependency)
else:
msg = _('Unable to process module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
raise UserError(msg)
msg = _('Unable to process module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.dependency)
install_package = None
if platform.system() == 'Linux':
distro = platform.freedesktop_os_release()
id_likes = {distro['ID'], *distro.get('ID_LIKE').split()}
if 'debian' in id_likes or 'ubuntu' in id_likes:
if package := manifest['external_dependencies'].get('apt', {}).get(e.dependency):
install_package = f'apt install {package}'
if install_package:
msg += _("\nIt can be installed running: %s", install_package)
raise UserError(msg) from e
def _state_update(self, newstate, states_to_update, level=100):
if level < 1:
raise UserError(_('Recursion error in modules dependencies!'))
# whether some modules are installed with demo data
demo = False
for module in self:
if module.state not in states_to_update:
demo = demo or module.demo
continue
# determine dependency modules to update/others
@ -390,17 +386,13 @@ class Module(models.Model):
else:
update_mods += dep.depend_id
# update dependency modules that require it, and determine demo for module
update_demo = update_mods._state_update(newstate, states_to_update, level=level-1)
module_demo = module.demo or update_demo or any(mod.demo for mod in ready_mods)
demo = demo or module_demo
# update dependency modules that require it
update_mods._state_update(newstate, states_to_update, level=level-1)
if module.state in states_to_update:
# check dependencies and update module itself
self.check_external_dependencies(module.name, newstate)
module.write({'state': newstate, 'demo': module_demo})
return demo
module.write({'state': newstate})
@assert_log_admin_access
def button_install(self):
@ -487,10 +479,17 @@ class Module(models.Model):
return self._button_immediate_function(self.env.registry[self._name].button_install)
@assert_log_admin_access
def button_install_cancel(self):
self.write({'state': 'uninstalled', 'demo': False})
@api.model
def button_reset_state(self):
# reset the transient state for all modules in case the module operation is stopped in an unexpected way.
self.search([('state', '=', 'to install')]).state = 'uninstalled'
self.search([('state', 'in', ('to update', 'to remove'))]).state = 'installed'
return True
@api.model
def check_module_update(self):
return bool(self.sudo().search_count([('state', 'in', ('to install', 'to update', 'to remove'))], limit=1))
@assert_log_admin_access
def module_uninstall(self):
""" Perform the various steps required to uninstall a module completely
@ -512,11 +511,10 @@ class Module(models.Model):
It is important to remove these copies because using them will crash if
they rely on data that don't exist anymore if the module is removed.
"""
domain = expression.OR([[('key', '=like', m.name + '.%')] for m in self])
domain = Domain.OR(Domain('key', '=like', m.name + '.%') for m in self)
orphans = self.env['ir.ui.view'].with_context(**{'active_test': False, MODULE_UNINSTALL_FLAG: True}).search(domain)
orphans.unlink()
@api.returns('self')
def downstream_dependencies(self, known_deps=None,
exclude_states=('uninstalled', 'uninstallable', 'to remove')):
""" Return the modules that directly or indirectly depend on the modules
@ -534,15 +532,14 @@ class Module(models.Model):
d.name IN (SELECT name from ir_module_module where id in %s) AND
m.state NOT IN %s AND
m.id NOT IN %s """
self._cr.execute(query, (tuple(self.ids), tuple(exclude_states), tuple(known_deps.ids or self.ids)))
new_deps = self.browse([row[0] for row in self._cr.fetchall()])
self.env.cr.execute(query, (tuple(self.ids), tuple(exclude_states), tuple(known_deps.ids or self.ids)))
new_deps = self.browse([row[0] for row in self.env.cr.fetchall()])
missing_mods = new_deps - known_deps
known_deps |= new_deps
if missing_mods:
known_deps |= missing_mods.downstream_dependencies(known_deps, exclude_states)
return known_deps
@api.returns('self')
def upstream_dependencies(self, known_deps=None,
exclude_states=('installed', 'uninstallable', 'to remove')):
""" Return the dependency tree of modules of the modules in `self`, and
@ -560,8 +557,8 @@ class Module(models.Model):
m.name IN (SELECT name from ir_module_module_dependency where module_id in %s) AND
m.state NOT IN %s AND
m.id NOT IN %s """
self._cr.execute(query, (tuple(self.ids), tuple(exclude_states), tuple(known_deps.ids or self.ids)))
new_deps = self.browse([row[0] for row in self._cr.fetchall()])
self.env.cr.execute(query, (tuple(self.ids), tuple(exclude_states), tuple(known_deps.ids or self.ids)))
new_deps = self.browse([row[0] for row in self.env.cr.fetchall()])
missing_mods = new_deps - known_deps
known_deps |= new_deps
if missing_mods:
@ -589,32 +586,44 @@ class Module(models.Model):
if not self.env.registry.ready or self.env.registry._init:
raise UserError(_('The method _button_immediate_install cannot be called on init or non loaded registries. Please use button_install instead.'))
if getattr(threading.current_thread(), 'testing', False):
if modules.module.current_test:
raise RuntimeError(
"Module operations inside tests are not transactional and thus forbidden.\n"
"If you really need to perform module operations to test a specific behavior, it "
"is best to write it as a standalone script, and ask the runbot/metastorm team "
"for help."
)
# raise error if database is updating for module operations
if self.search_count([('state', 'in', ('to install', 'to upgrade', 'to remove'))], limit=1):
raise UserError(_("Odoo is currently processing another module operation.\n"
"Please try again later or contact your system administrator."))
try:
# raise error if another transaction is trying to schedule module operations concurrently
self.env.cr.execute("LOCK ir_module_module IN EXCLUSIVE MODE NOWAIT")
except psycopg2.OperationalError:
raise UserError(_("Odoo is currently processing another module operation.\n"
"Please try again later or contact your system administrator."))
try:
# This is done because the installation/uninstallation/upgrade can modify a currently
# running cron job and prevent it from finishing, and since the ir_cron table is locked
# during execution, the lock won't be released until timeout.
self._cr.execute("SELECT * FROM ir_cron FOR UPDATE NOWAIT")
self.env.cr.execute("SELECT FROM ir_cron FOR UPDATE NOWAIT")
except psycopg2.OperationalError:
raise UserError(_("Odoo is currently processing a scheduled action.\n"
"Module operations are not possible at this time, "
"please try again later or contact your system administrator."))
function(self)
self._cr.commit()
registry = modules.registry.Registry.new(self._cr.dbname, update_module=True)
self._cr.commit()
self.env.cr.commit()
registry = modules.registry.Registry.new(self.env.cr.dbname, update_module=True)
self.env.cr.commit()
if request and request.registry is self.env.registry:
request.env.cr.reset()
request.registry = request.env.registry
assert request.env.registry is registry
self._cr.reset()
self.env.cr.reset()
assert self.env.registry is registry
# pylint: disable=next-method-called
@ -641,7 +650,7 @@ class Module(models.Model):
@assert_log_admin_access
def button_uninstall(self):
un_installable_modules = set(odoo.conf.server_wide_modules) & set(self.mapped('name'))
un_installable_modules = set(odoo.tools.config['server_wide_modules']) & set(self.mapped('name'))
if un_installable_modules:
raise UserError(_("Those modules cannot be uninstalled: %s", ', '.join(un_installable_modules)))
if any(state not in ('installed', 'to upgrade') for state in self.mapped('state')):
@ -662,13 +671,9 @@ class Module(models.Model):
'name': _('Uninstall module'),
'view_mode': 'form',
'res_model': 'base.module.uninstall',
'context': {'default_module_id': self.id},
'context': {'default_module_ids': self.ids},
}
def button_uninstall_cancel(self):
self.write({'state': 'installed'})
return True
@assert_log_admin_access
def button_immediate_upgrade(self):
"""
@ -726,11 +731,6 @@ class Module(models.Model):
self.browse(to_install).button_install()
return dict(ACTION_DICT, name=_('Apply Schedule Upgrade'))
@assert_log_admin_access
def button_upgrade_cancel(self):
self.write({'state': 'installed'})
return True
@staticmethod
def get_values_from_terp(terp):
return {
@ -774,9 +774,9 @@ class Module(models.Model):
known_mods_names = {mod.name: mod for mod in known_mods}
# iterate through detected modules and update/create them in db
for mod_name in modules.get_modules():
mod = known_mods_names.get(mod_name)
terp = self.get_module_info(mod_name)
for manifest in modules.Manifest.all_addon_manifests():
mod = known_mods_names.get(manifest.name)
terp = self.get_module_info(manifest)
values = self.get_values_from_terp(terp)
if mod:
@ -791,12 +791,11 @@ class Module(models.Model):
res[0] += 1
if updated_values:
mod.write(updated_values)
elif not manifest or not terp:
continue
else:
mod_path = modules.get_module_path(mod_name)
if not mod_path or not terp:
continue
state = "uninstalled" if terp.get('installable', True) else "uninstallable"
mod = self.create(dict(name=mod_name, state=state, **values))
mod = self.create(dict(name=manifest.name, state=state, **values))
res[1] += 1
mod._update_from_terp(terp)
@ -811,13 +810,13 @@ class Module(models.Model):
def _update_dependencies(self, depends=None, auto_install_requirements=()):
self.env['ir.module.module.dependency'].flush_model()
existing = set(dep.name for dep in self.dependencies_id)
existing = {dep.name for dep in self.dependencies_id}
needed = set(depends or [])
for dep in (needed - existing):
self._cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (self.id, dep))
self.env.cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (self.id, dep))
for dep in (existing - needed):
self._cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s and name = %s', (self.id, dep))
self._cr.execute('UPDATE ir_module_module_dependency SET auto_install_required = (name = any(%s)) WHERE module_id = %s',
self.env.cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s and name = %s', (self.id, dep))
self.env.cr.execute('UPDATE ir_module_module_dependency SET auto_install_required = (name = any(%s)) WHERE module_id = %s',
(list(auto_install_requirements or ()), self.id))
self.env['ir.module.module.dependency'].invalidate_model(['auto_install_required'])
self.invalidate_recordset(['dependencies_id'])
@ -826,20 +825,20 @@ class Module(models.Model):
existing = set(self.country_ids.ids)
needed = set(self.env['res.country'].search([('code', 'in', [c.upper() for c in countries])]).ids)
for dep in (needed - existing):
self._cr.execute('INSERT INTO module_country (module_id, country_id) values (%s, %s)', (self.id, dep))
self.env.cr.execute('INSERT INTO module_country (module_id, country_id) values (%s, %s)', (self.id, dep))
for dep in (existing - needed):
self._cr.execute('DELETE FROM module_country WHERE module_id = %s and country_id = %s', (self.id, dep))
self.env.cr.execute('DELETE FROM module_country WHERE module_id = %s and country_id = %s', (self.id, dep))
self.invalidate_recordset(['country_ids'])
self.env['res.company'].invalidate_model(['uninstalled_l10n_module_ids'])
def _update_exclusions(self, excludes=None):
self.env['ir.module.module.exclusion'].flush_model()
existing = set(excl.name for excl in self.exclusion_ids)
existing = {excl.name for excl in self.exclusion_ids}
needed = set(excludes or [])
for name in (needed - existing):
self._cr.execute('INSERT INTO ir_module_module_exclusion (module_id, name) VALUES (%s, %s)', (self.id, name))
self.env.cr.execute('INSERT INTO ir_module_module_exclusion (module_id, name) VALUES (%s, %s)', (self.id, name))
for name in (existing - needed):
self._cr.execute('DELETE FROM ir_module_module_exclusion WHERE module_id=%s AND name=%s', (self.id, name))
self.env.cr.execute('DELETE FROM ir_module_module_exclusion WHERE module_id=%s AND name=%s', (self.id, name))
self.invalidate_recordset(['exclusion_ids'])
def _update_category(self, category='Uncategorized'):
@ -856,7 +855,7 @@ class Module(models.Model):
categs = category.split('/')
if categs != current_category_path:
cat_id = modules.db.create_categories(self._cr, categs)
cat_id = modules.db.create_categories(self.env.cr, categs)
self.write({'category_id': cat_id})
def _update_translations(self, filter_lang=None, overwrite=False):
@ -886,14 +885,14 @@ class Module(models.Model):
model_id = self._get_id(name) if name else False
return self.browse(model_id).sudo()
@tools.ormcache('name')
@tools.ormcache('name', cache='stable')
def _get_id(self, name):
self.flush_model(['name'])
self.env.cr.execute("SELECT id FROM ir_module_module WHERE name=%s", (name,))
return self.env.cr.fetchone()
@api.model
@tools.ormcache()
@tools.ormcache(cache='stable')
def _installed(self):
""" Return the set of installed modules as a dictionary {name: id} """
return {
@ -905,12 +904,12 @@ class Module(models.Model):
def search_panel_select_range(self, field_name, **kwargs):
if field_name == 'category_id':
enable_counters = kwargs.get('enable_counters', False)
domain = [
domain = Domain([
('parent_id', '=', False),
'|',
('module_ids.application', '!=', False),
('child_ids.module_ids', '!=', False),
]
])
excluded_xmlids = [
'base.module_category_website_theme',
@ -927,10 +926,7 @@ class Module(models.Model):
excluded_category_ids.append(categ.id)
if excluded_category_ids:
domain = expression.AND([
domain,
[('id', 'not in', excluded_category_ids)],
])
domain &= Domain('id', 'not in', excluded_category_ids)
records = self.env['ir.module.category'].search_read(domain, ['display_name'], order="sequence")
@ -938,7 +934,7 @@ class Module(models.Model):
for record in records:
record_id = record['id']
if enable_counters:
model_domain = expression.AND([
model_domain = Domain.AND([
kwargs.get('search_domain', []),
kwargs.get('category_domain', []),
kwargs.get('filter_domain', []),
@ -952,35 +948,38 @@ class Module(models.Model):
'values': list(values_range.values()),
}
return super(Module, self).search_panel_select_range(field_name, **kwargs)
return super().search_panel_select_range(field_name, **kwargs)
@api.model
def _load_module_terms(self, modules, langs, overwrite=False, imported_module=False):
def _load_module_terms(self, modules, langs, overwrite=False):
""" Load PO files of the given modules for the given languages. """
# load i18n files
translation_importer = TranslationImporter(self.env.cr, verbose=False)
for module_name in modules:
modpath = get_module_path(module_name, downloaded=imported_module)
if not modpath:
if not Manifest.for_addon(module_name, display_warning=False):
continue
for lang in langs:
is_lang_imported = False
env = self.env if imported_module else None
for po_path in get_po_paths(module_name, lang, env=env):
for po_path in get_po_paths(module_name, lang):
_logger.info('module %s: loading translation file %s for language %s', module_name, po_path, lang)
translation_importer.load_file(po_path, lang)
is_lang_imported = True
if lang != 'en_US' and not is_lang_imported:
for data_path in get_datafile_translation_path(module_name):
translation_importer.load_file(data_path, lang, module=module_name)
if lang != 'en_US' and lang not in translation_importer.imported_langs:
_logger.info('module %s: no translation for language %s', module_name, lang)
translation_importer.save(overwrite=overwrite)
@api.model
def _extract_resource_attachment_translations(self, module, lang):
yield from ()
DEP_STATES = STATES + [('unknown', 'Unknown')]
class ModuleDependency(models.Model):
_name = "ir.module.module.dependency"
class IrModuleModuleDependency(models.Model):
_name = 'ir.module.module.dependency'
_description = "Module dependency"
_log_access = False # inserts are done manually, create and write uid, dates are always null
_allow_sudo_commands = False
@ -1004,17 +1003,18 @@ class ModuleDependency(models.Model):
@api.depends('name')
def _compute_depend(self):
# retrieve all modules corresponding to the dependency names
names = list(set(dep.name for dep in self))
names = {dep.name for dep in self}
mods = self.env['ir.module.module'].search([('name', 'in', names)])
# index modules by name, and assign dependencies
name_mod = dict((mod.name, mod) for mod in mods)
name_mod = {mod.name: mod for mod in mods}
for dep in self:
dep.depend_id = name_mod.get(dep.name)
def _search_depend(self, operator, value):
assert operator == 'in'
modules = self.env['ir.module.module'].browse(set(value))
if operator not in ('in', 'any'):
return NotImplemented
modules = self.env['ir.module.module'].browse(value)
return [('name', 'in', modules.mapped('name'))]
@api.depends('depend_id.state')
@ -1027,7 +1027,7 @@ class ModuleDependency(models.Model):
to_search = {key: True for key in module_names}
res = {}
def search_direct_deps(to_search, res):
to_search_list = list(to_search.keys())
to_search_list = to_search.keys()
dependencies = self.web_search_read(domain=[("module_id.name", "in", to_search_list)], specification={"module_id":{"fields":{"name":{}}}, "name": {}, })["records"]
to_search.clear()
for dependency in dependencies:
@ -1036,7 +1036,7 @@ class ModuleDependency(models.Model):
if dep_name not in res and dep_name not in to_search and dep_name not in to_search_list:
to_search[dep_name] = True
if mod_name not in res:
res[mod_name] = list()
res[mod_name] = []
res[mod_name].append(dep_name)
search_direct_deps(to_search, res)
while to_search:
@ -1044,8 +1044,8 @@ class ModuleDependency(models.Model):
return res
class ModuleExclusion(models.Model):
_name = "ir.module.module.exclusion"
class IrModuleModuleExclusion(models.Model):
_name = 'ir.module.module.exclusion'
_description = "Module exclusion"
_allow_sudo_commands = False
@ -1063,7 +1063,7 @@ class ModuleExclusion(models.Model):
@api.depends('name')
def _compute_exclusion(self):
# retrieve all modules corresponding to the exclusion names
names = list(set(excl.name for excl in self))
names = {excl.name for excl in self}
mods = self.env['ir.module.module'].search([('name', 'in', names)])
# index modules by name, and assign dependencies
@ -1072,8 +1072,9 @@ class ModuleExclusion(models.Model):
excl.exclusion_id = name_mod.get(excl.name)
def _search_exclusion(self, operator, value):
assert operator == 'in'
modules = self.env['ir.module.module'].browse(set(value))
if operator not in ('in', 'any'):
return NotImplemented
modules = self.env['ir.module.module'].browse(value)
return [('name', 'in', modules.mapped('name'))]
@api.depends('exclusion_id.state')

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
@ -8,9 +7,11 @@ import logging
from dateutil.relativedelta import relativedelta
from odoo import fields, models, api, _
from odoo import fields, models, api
from odoo.exceptions import UserError
from odoo.http import request
from odoo.tools.misc import str2bool
from odoo.tools.constants import GC_UNLINK_LIMIT
from odoo.tools.profiler import make_session
from odoo.tools.speedscope import Speedscope
@ -28,7 +29,10 @@ class IrProfile(models.Model):
session = fields.Char('Session', index=True)
name = fields.Char('Description')
duration = fields.Float('Duration')
duration = fields.Float('Duration', digits=(9, 3),
help="Real elapsed time")
cpu_duration = fields.Float('CPU Duration', digits=(9, 3),
help="CPU clock (not including other processes or SQL)")
init_stack_trace = fields.Text('Initial stack trace', prefetch=False)
@ -36,31 +40,113 @@ class IrProfile(models.Model):
sql_count = fields.Integer('Queries Count')
traces_async = fields.Text('Traces Async', prefetch=False)
traces_sync = fields.Text('Traces Sync', prefetch=False)
others = fields.Text('others', prefetch=False)
qweb = fields.Text('Qweb', prefetch=False)
entry_count = fields.Integer('Entry count')
speedscope = fields.Binary('Speedscope', compute='_compute_speedscope')
speedscope_url = fields.Text('Open', compute='_compute_speedscope_url')
config_url = fields.Text('Open profiles config', compute='_compute_config_url')
@api.autovacuum
def _gc_profile(self):
# remove profiles older than 30 days
domain = [('create_date', '<', fields.Datetime.now() - datetime.timedelta(days=30))]
return self.sudo().search(domain).unlink()
records = self.sudo().search(domain, limit=GC_UNLINK_LIMIT)
records.unlink()
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining
def _compute_has_memory(self):
for profile in self:
if not bool(profile.others and json.loads(profile.others).get("memory")):
return False
return True
def _generate_memory_profile(self, params):
memory_graph = []
memory_limit = params.get('memory_limit', 0)
for profile in self:
if profile.others:
memory = json.loads(profile.others).get("memory", "[{}]")
memory_tracebacks = json.loads(memory)[:-1]
for entry in memory_tracebacks:
memory_graph.append({
"samples": [
sample for sample in entry["memory_tracebacks"]
if sample.get("size", False) >= memory_limit
]
, "start": entry["start"]})
return memory_graph
def _compute_config_url(self):
for profile in self:
profile.config_url = f'/web/profile_config/{profile.id}'
@api.depends('init_stack_trace')
def _compute_speedscope(self):
# The params variable is done to control input from the user
# When expanding this, it should be select from an enum to input only the correct values
params = self._parse_params(self.env.context)
for execution in self:
sp = Speedscope(init_stack_trace=json.loads(execution.init_stack_trace))
if execution.sql:
sp.add('sql', json.loads(execution.sql))
if execution.traces_async:
sp.add('frames', json.loads(execution.traces_async))
if execution.traces_sync:
sp.add('settrace', json.loads(execution.traces_sync))
execution.speedscope = base64.b64encode(execution._generate_speedscope(params))
result = json.dumps(sp.add_default().make())
execution.speedscope = base64.b64encode(result.encode('utf-8'))
def _default_profile_params(self):
has_sql = any(profile.sql for profile in self)
has_traces = any(profile.traces_async for profile in self)
return {
'combined_profile': has_sql and has_traces,
'sql_no_gap_profile': has_sql and not has_traces,
'sql_density_profile': False,
'frames_profile': has_traces and not has_sql,
}
def _parse_params(self, params):
return {
'constant_time': str2bool(params.get('constant_time', False)),
'aggregate_sql': str2bool(params.get('aggregate_sql', False)),
'use_context': str2bool(params.get('use_execution_context', True)),
'combined_profile': str2bool(params.get('combined_profile', False)),
'sql_no_gap_profile': str2bool(params.get('sql_no_gap_profile', False)),
'sql_density_profile': str2bool(params.get('sql_density_profile', False)),
'frames_profile': str2bool(params.get('frames_profile', False)),
'profile_aggregation_mode': params.get('profile_aggregation_mode', 'tabs'),
'memory_limit': int(params.get('memory_limit', 0)),
}
def _generate_speedscope(self, params):
init_stack_trace = self[0].init_stack_trace
for record in self:
if record.init_stack_trace != init_stack_trace:
raise UserError(self.env._('All profiles must have the same initial stack trace to be displayed together.'))
sp = Speedscope(init_stack_trace=json.loads(init_stack_trace))
for profile in self:
if (params['sql_no_gap_profile'] or params['sql_density_profile'] or params['combined_profile']) and profile.sql:
sp.add(f'sql {profile.id}', json.loads(profile.sql))
if (params['frames_profile'] or params['combined_profile']) and profile.traces_async:
sp.add(f'frames {profile.id}', json.loads(profile.traces_async))
if params['profile_aggregation_mode'] == 'tabs':
profile._add_outputs(sp, f'{profile.id} {profile.name}' if len(self) > 1 else '', params)
if params['profile_aggregation_mode'] == 'temporal':
self._add_outputs(sp, 'all', params)
result = json.dumps(sp.make(**params))
return result.encode('utf-8')
def _add_outputs(self, sp, suffix, params):
sql = [f'sql {profile.id}' for profile in self]
frames = [f'frames {profile.id}' for profile in self]
if params['combined_profile']:
sp.add_output(sql + frames, display_name=f'Combined {suffix}', **params)
if params['sql_no_gap_profile']:
sp.add_output(sql, hide_gaps=True, display_name=f'Sql (no gap) {suffix}', **params)
if params['sql_density_profile']:
sp.add_output(sql , continuous=False, complete=False, display_name=f'Sql (density) {suffix}',**params)
if params['frames_profile']:
sp.add_output(frames, display_name=f'Frames {suffix}',**params)
@api.depends('speedscope')
def _compute_speedscope_url(self):
for profile in self:
profile.speedscope_url = f'/web/speedscope/{profile.id}'
@ -90,7 +176,7 @@ class IrProfile(models.Model):
limit = self._enabled_until()
_logger.info("User %s started profiling", self.env.user.name)
if not limit:
request.session.profile_session = None
request.session['profile_session'] = None
if self.env.user._is_system():
return {
'type': 'ir.actions.act_window',
@ -99,31 +185,38 @@ class IrProfile(models.Model):
'target': 'new',
'views': [[False, 'form']],
}
raise UserError(_('Profiling is not enabled on this database. Please contact an administrator.'))
if not request.session.profile_session:
request.session.profile_session = make_session(self.env.user.name)
request.session.profile_expiration = limit
if request.session.profile_collectors is None:
request.session.profile_collectors = []
if request.session.profile_params is None:
request.session.profile_params = {}
raise UserError(self.env._('Profiling is not enabled on this database. Please contact an administrator.'))
if not request.session.get('profile_session'):
request.session['profile_session'] = make_session(self.env.user.name)
request.session['profile_expiration'] = limit
if request.session.get('profile_collectors') is None:
request.session['profile_collectors'] = []
if request.session.get('profile_params') is None:
request.session['profile_params'] = {}
elif profile is not None:
request.session.profile_session = None
request.session['profile_session'] = None
if collectors is not None:
request.session.profile_collectors = collectors
request.session['profile_collectors'] = collectors
if params is not None:
request.session.profile_params = params
request.session['profile_params'] = params
return {
'session': request.session.profile_session,
'collectors': request.session.profile_collectors,
'params': request.session.profile_params,
'session': request.session.get('profile_session'),
'collectors': request.session.get('profile_collectors'),
'params': request.session.get('profile_params'),
}
def action_view_speedscope(self):
ids = ",".join(str(p.id) for p in self)
return {
'type': 'ir.actions.act_url',
'url': f'/web/profile_config/{ids}',
'target': 'new',
}
class EnableProfilingWizard(models.TransientModel):
class BaseEnableProfilingWizard(models.TransientModel):
_name = 'base.enable.profiling.wizard'
_description = "Enable profiling for some time"

File diff suppressed because it is too large Load diff

View file

@ -43,7 +43,8 @@ def nl2br_enclose(string: str, enclosure_tag: str = 'div') -> Markup:
# QWeb Fields converters
#--------------------------------------------------------------------
class FieldConverter(models.AbstractModel):
class IrQwebField(models.AbstractModel):
""" Used to convert a t-field specification into an output HTML field.
:meth:`~.to_html` is the entry point of this conversion from QWeb, it:
@ -58,19 +59,53 @@ class FieldConverter(models.AbstractModel):
@api.model
def get_available_options(self):
"""
Get the available option informations.
""" Get the available option informations.
Returns a dict of dict with:
* key equal to the option key.
* dict: type, params, name, description, default_value
* type:
'string'
'integer'
'float'
'model' (e.g. 'res.partner')
'array'
'selection' (e.g. [key1, key2...])
:rtype: dict[str, dict[str, Any]]
:return: A dictionnary that maps option names' to their settings.
The settings are dict themselves and have the following keys:
type
Guaranteed, one of ``'string'``, ``'integer'``, ``'float'``,
``'model'``, ``'array'``, or ``'selection'``.
string
Guaranteed
description
Optional
required
Optional, is assumed ``False`` when absent, otherwise
is either ``True`` or a string.
params
Optional
default_value
Optional, the default value, as a json-friendly type.
Example::
{
<option>: {
# guaranteed
'type': ...,
'string': ...,
# optional
'default_value': ...,
'description': ...,
'params': ...,
'required': ...,
}
}
"""
return {}
@ -146,14 +181,14 @@ class FieldConverter(models.AbstractModel):
return self.env['res.lang'].browse(get_lang(self.env).id)
class IntegerConverter(models.AbstractModel):
class IrQwebFieldInteger(models.AbstractModel):
_name = 'ir.qweb.field.integer'
_description = 'Qweb Field Integer'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(IntegerConverter, self).get_available_options()
options = super().get_available_options()
options.update(
format_decimalized_number=dict(type='boolean', string=_('Decimalized number')),
precision_digits=dict(type='integer', string=_('Precision Digits')),
@ -167,14 +202,14 @@ class IntegerConverter(models.AbstractModel):
return self.user_lang().format('%d', value, grouping=True).replace(r'-', '-\N{ZERO WIDTH NO-BREAK SPACE}')
class FloatConverter(models.AbstractModel):
class IrQwebFieldFloat(models.AbstractModel):
_name = 'ir.qweb.field.float'
_description = 'Qweb Field Float'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(FloatConverter, self).get_available_options()
options = super().get_available_options()
options.update(
precision=dict(type='integer', string=_('Rounding precision')),
)
@ -209,17 +244,17 @@ class FloatConverter(models.AbstractModel):
if 'precision' not in options and 'decimal_precision' not in options:
_, precision = record._fields[field_name].get_digits(record.env) or (None, None)
options = dict(options, precision=precision)
return super(FloatConverter, self).record_to_html(record, field_name, options)
return super().record_to_html(record, field_name, options)
class DateConverter(models.AbstractModel):
class IrQwebFieldDate(models.AbstractModel):
_name = 'ir.qweb.field.date'
_description = 'Qweb Field Date'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(DateConverter, self).get_available_options()
options = super().get_available_options()
options.update(
format=dict(type='string', string=_('Date format'))
)
@ -230,14 +265,14 @@ class DateConverter(models.AbstractModel):
return format_date(self.env, value, date_format=options.get('format'))
class DateTimeConverter(models.AbstractModel):
class IrQwebFieldDatetime(models.AbstractModel):
_name = 'ir.qweb.field.datetime'
_description = 'Qweb Field Datetime'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(DateTimeConverter, self).get_available_options()
options = super().get_available_options()
options.update(
format=dict(type='string', string=_('Pattern to format')),
tz_name=dict(type='char', string=_('Optional timezone name')),
@ -288,10 +323,10 @@ class DateTimeConverter(models.AbstractModel):
return babel.dates.format_datetime(value, format=pattern, tzinfo=tzinfo, locale=locale)
class TextConverter(models.AbstractModel):
class IrQwebFieldText(models.AbstractModel):
_name = 'ir.qweb.field.text'
_description = 'Qweb Field Text'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
@ -301,14 +336,14 @@ class TextConverter(models.AbstractModel):
return nl2br(value) if value else ''
class SelectionConverter(models.AbstractModel):
class IrQwebFieldSelection(models.AbstractModel):
_name = 'ir.qweb.field.selection'
_description = 'Qweb Field Selection'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(SelectionConverter, self).get_available_options()
options = super().get_available_options()
options.update(
selection=dict(type='selection', string=_('Selection'), description=_('By default the widget uses the field information'), required=True)
)
@ -327,13 +362,13 @@ class SelectionConverter(models.AbstractModel):
def record_to_html(self, record, field_name, options):
if 'selection' not in options:
options = dict(options, selection=dict(record._fields[field_name].get_description(self.env)['selection']))
return super(SelectionConverter, self).record_to_html(record, field_name, options)
return super().record_to_html(record, field_name, options)
class ManyToOneConverter(models.AbstractModel):
class IrQwebFieldMany2one(models.AbstractModel):
_name = 'ir.qweb.field.many2one'
_description = 'Qweb Field Many to One'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
@ -345,10 +380,10 @@ class ManyToOneConverter(models.AbstractModel):
return nl2br(value)
class ManyToManyConverter(models.AbstractModel):
class IrQwebFieldMany2many(models.AbstractModel):
_name = 'ir.qweb.field.many2many'
_description = 'Qweb field many2many'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
@ -358,10 +393,10 @@ class ManyToManyConverter(models.AbstractModel):
return nl2br(text)
class HTMLConverter(models.AbstractModel):
class IrQwebFieldHtml(models.AbstractModel):
_name = 'ir.qweb.field.html'
_description = 'Qweb Field HTML'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
@ -378,7 +413,7 @@ class HTMLConverter(models.AbstractModel):
return Markup(etree.tostring(body, encoding='unicode', method='html')[6:-7])
class ImageConverter(models.AbstractModel):
class IrQwebFieldImage(models.AbstractModel):
""" ``image`` widget rendering, inserts a data:uri-using image tag in the
document. May be overridden by e.g. the website module to generate links
instead.
@ -389,7 +424,7 @@ class ImageConverter(models.AbstractModel):
"""
_name = 'ir.qweb.field.image'
_description = 'Qweb Field Image'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def _get_src_data_b64(self, value, options):
@ -417,19 +452,21 @@ class ImageConverter(models.AbstractModel):
def value_to_html(self, value, options):
return Markup('<img src="%s">') % self._get_src_data_b64(value, options)
class ImageUrlConverter(models.AbstractModel):
class IrQwebFieldImage_Url(models.AbstractModel):
""" ``image_url`` widget rendering, inserts an image tag in the
document.
"""
_name = 'ir.qweb.field.image_url'
_description = 'Qweb Field Image'
_inherit = 'ir.qweb.field.image'
_inherit = ['ir.qweb.field.image']
@api.model
def value_to_html(self, value, options):
return Markup('<img src="%s">' % (value))
class MonetaryConverter(models.AbstractModel):
class IrQwebFieldMonetary(models.AbstractModel):
""" ``monetary`` converter, has a mandatory option
``display_currency`` only if field is not of type Monetary.
Otherwise, if we are in presence of a monetary field, the field definition must
@ -445,11 +482,11 @@ class MonetaryConverter(models.AbstractModel):
"""
_name = 'ir.qweb.field.monetary'
_description = 'Qweb Field Monetary'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(MonetaryConverter, self).get_available_options()
options = super().get_available_options()
options.update(
from_currency=dict(type='model', params='res.currency', string=_('Original currency')),
display_currency=dict(type='model', params='res.currency', string=_('Display currency'), required="value_to_html"),
@ -517,11 +554,11 @@ class MonetaryConverter(models.AbstractModel):
if currency_fields:
options['display_currency'] = record[currency_fields[0]]
if 'date' not in options:
options['date'] = record._context.get('date')
options['date'] = record.env.context.get('date')
if 'company_id' not in options:
options['company_id'] = record._context.get('company_id')
options['company_id'] = record.env.context.get('company_id')
return super(MonetaryConverter, self).record_to_html(record, field_name, options)
return super().record_to_html(record, field_name, options)
TIMEDELTA_UNITS = (
@ -535,7 +572,7 @@ TIMEDELTA_UNITS = (
)
class FloatTimeConverter(models.AbstractModel):
class IrQwebFieldFloat_Time(models.AbstractModel):
""" ``float_time`` converter, to display integral or fractional values as
human-readable time spans (e.g. 1.5 as "01:30").
@ -543,14 +580,14 @@ class FloatTimeConverter(models.AbstractModel):
"""
_name = 'ir.qweb.field.float_time'
_description = 'Qweb Field Float Time'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
return format_duration(value)
class TimeConverter(models.AbstractModel):
class IrQwebFieldTime(models.AbstractModel):
""" ``time`` converter, to display integer or fractional value as
human-readable time (e.g. 1.5 as "1:30 AM"). The unit of this value
is in hours.
@ -559,7 +596,7 @@ class TimeConverter(models.AbstractModel):
"""
_name = 'ir.qweb.field.time'
_description = 'QWeb Field Time'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def value_to_html(self, value, options):
@ -576,7 +613,7 @@ class TimeConverter(models.AbstractModel):
return babel.dates.format_time(t, format=pattern, tzinfo=None, locale=locale)
class DurationConverter(models.AbstractModel):
class IrQwebFieldDuration(models.AbstractModel):
""" ``duration`` converter, to display integral or fractional values as
human-readable time spans (e.g. 1.5 as "1 hour 30 minutes").
@ -594,11 +631,11 @@ class DurationConverter(models.AbstractModel):
"""
_name = 'ir.qweb.field.duration'
_description = 'Qweb Field Duration'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(DurationConverter, self).get_available_options()
options = super().get_available_options()
unit = [(value, str(label)) for value, label, ratio in TIMEDELTA_UNITS]
options.update(
digital=dict(type="boolean", string=_('Digital formatting')),
@ -642,7 +679,7 @@ class DurationConverter(models.AbstractModel):
sign = '-'
if options.get('digital'):
for unit, label, secs_per_unit in TIMEDELTA_UNITS:
for _unit, _label, secs_per_unit in TIMEDELTA_UNITS:
if secs_per_unit > 3600:
continue
v, r = divmod(r, secs_per_unit)
@ -651,7 +688,7 @@ class DurationConverter(models.AbstractModel):
sections.append(u"%02.0f" % int(round(v)))
return sign + u':'.join(sections)
for unit, label, secs_per_unit in TIMEDELTA_UNITS:
for _unit, _label, secs_per_unit in TIMEDELTA_UNITS:
v, r = divmod(r, secs_per_unit)
if not v:
continue
@ -683,14 +720,14 @@ class DurationConverter(models.AbstractModel):
return u' '.join(sections)
class RelativeDatetimeConverter(models.AbstractModel):
class IrQwebFieldRelative(models.AbstractModel):
_name = 'ir.qweb.field.relative'
_description = 'Qweb Field Relative'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(RelativeDatetimeConverter, self).get_available_options()
options = super().get_available_options()
options.update(
now=dict(type='datetime', string=_('Reference date'), description=_('Date to compare with the field value, by default use the current date.'))
)
@ -712,21 +749,21 @@ class RelativeDatetimeConverter(models.AbstractModel):
def record_to_html(self, record, field_name, options):
if 'now' not in options:
options = dict(options, now=record._fields[field_name].now())
return super(RelativeDatetimeConverter, self).record_to_html(record, field_name, options)
return super().record_to_html(record, field_name, options)
class BarcodeConverter(models.AbstractModel):
class IrQwebFieldBarcode(models.AbstractModel):
""" ``barcode`` widget rendering, inserts a data:uri-using image tag in the
document. May be overridden by e.g. the website module to generate links
instead.
"""
_name = 'ir.qweb.field.barcode'
_description = 'Qweb Field Barcode'
_inherit = 'ir.qweb.field'
_inherit = ['ir.qweb.field']
@api.model
def get_available_options(self):
options = super(BarcodeConverter, self).get_available_options()
options = super().get_available_options()
options.update(
symbology=dict(type='string', string=_('Barcode symbology'), description=_('Barcode type, eg: UPCA, EAN13, Code128'), default_value='Code128'),
width=dict(type='integer', string=_('Width'), default_value=600),
@ -759,19 +796,18 @@ class BarcodeConverter(models.AbstractModel):
return Markup(html.tostring(img_element, encoding='unicode'))
class Contact(models.AbstractModel):
class IrQwebFieldContact(models.AbstractModel):
_name = 'ir.qweb.field.contact'
_description = 'Qweb Field Contact'
_inherit = 'ir.qweb.field.many2one'
_inherit = ['ir.qweb.field.many2one']
@api.model
def get_available_options(self):
options = super(Contact, self).get_available_options()
options = super().get_available_options()
contact_fields = [
{'field_name': 'name', 'label': _('Name'), 'default': True},
{'field_name': 'address', 'label': _('Address'), 'default': True},
{'field_name': 'phone', 'label': _('Phone'), 'default': True},
{'field_name': 'mobile', 'label': _('Mobile'), 'default': True},
{'field_name': 'email', 'label': _('Email'), 'default': True},
{'field_name': 'vat', 'label': _('VAT')},
]
@ -801,7 +837,7 @@ class Contact(models.AbstractModel):
return self.env['ir.qweb']._render('base.no_contact', val, **template_options)
return ''
opf = options.get('fields') or ["name", "address", "phone", "mobile", "email"]
opf = options.get('fields') or ["name", "address", "phone", "email"]
sep = options.get('separator')
if sep:
opsep = escape(sep)
@ -824,7 +860,6 @@ class Contact(models.AbstractModel):
'name': display_name.split("\n")[0],
'address': address,
'phone': value.phone,
'mobile': value.mobile,
'city': value.city,
'country_id': value.country_id.display_name,
'website': value.website,
@ -838,10 +873,10 @@ class Contact(models.AbstractModel):
return self.env['ir.qweb']._render('base.contact', val, minimal_qcontext=True)
class QwebView(models.AbstractModel):
class IrQwebFieldQweb(models.AbstractModel):
_name = 'ir.qweb.field.qweb'
_description = 'Qweb Field qweb'
_inherit = 'ir.qweb.field.many2one'
_inherit = ['ir.qweb.field.many2one']
@api.model
def record_to_html(self, record, field_name, options):

View file

@ -2,13 +2,16 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, fields, models, tools, _
from odoo import _, api, fields, models, tools
from odoo.exceptions import AccessError, ValidationError
from odoo.osv import expression
from odoo.fields import Domain
from odoo.tools import config, SQL
from odoo.tools.safe_eval import safe_eval, time
_logger = logging.getLogger(__name__)
class IrRule(models.Model):
_name = 'ir.rule'
_description = 'Record Rule'
@ -16,7 +19,7 @@ class IrRule(models.Model):
_MODES = ('read', 'write', 'create', 'unlink')
_allow_sudo_commands = False
name = fields.Char(index=True)
name = fields.Char()
active = fields.Boolean(default=True, help="If you uncheck the active field, it will disable the record rule without deleting it (if you delete a native record rule, it may be re-created when you reload the module).")
model_id = fields.Many2one('ir.model', string='Model', index=True, required=True, ondelete="cascade")
groups = fields.Many2many('res.groups', 'rule_group_rel', 'rule_group_id', 'group_id', ondelete='restrict')
@ -26,11 +29,10 @@ class IrRule(models.Model):
perm_create = fields.Boolean(string='Create', default=True)
perm_unlink = fields.Boolean(string='Delete', default=True)
_sql_constraints = [
('no_access_rights',
'CHECK (perm_read!=False or perm_write!=False or perm_create!=False or perm_unlink!=False)',
'Rule must have at least one checked access right!'),
]
_no_access_rights = models.Constraint(
'CHECK (perm_read!=False or perm_write!=False or perm_create!=False or perm_unlink!=False)',
"Rule must have at least one checked access right!",
)
@api.model
def _eval_context(self):
@ -44,7 +46,6 @@ class IrRule(models.Model):
# independent from the context
return {
'user': self.env.user.with_context({}),
'time': time,
'company_ids': self.env.companies.ids,
'company_id': self.env.company.id,
}
@ -67,7 +68,8 @@ class IrRule(models.Model):
if rule.active and rule.domain_force:
try:
domain = safe_eval(rule.domain_force, eval_context)
expression.expression(domain, self.env[rule.model_id.model].sudo())
model = self.env[rule.model_id.model].sudo()
Domain(domain).validate(model)
except Exception as e:
raise ValidationError(_('Invalid domain: %s', e))
@ -90,22 +92,19 @@ class IrRule(models.Model):
# first check if the group rules fail for any record (aka if
# searching on (records, group_rules) filters out some of the records)
group_rules = all_rules.filtered(lambda r: r.groups and r.groups & self.env.user.groups_id)
group_domains = expression.OR([
group_rules = all_rules.filtered(lambda r: r.groups and r.groups & self.env.user.all_group_ids)
group_domains = Domain.OR(
safe_eval(r.domain_force, eval_context) if r.domain_force else []
for r in group_rules
])
)
# if all records get returned, the group rules are not failing
if Model.search_count(expression.AND([[('id', 'in', for_records.ids)], group_domains])) == len(for_records):
if Model.search_count(group_domains & Domain('id', 'in', for_records.ids)) == len(for_records):
group_rules = self.browse(())
# failing rules are previously selected group rules or any failing global rule
def is_failing(r, ids=for_records.ids):
dom = safe_eval(r.domain_force, eval_context) if r.domain_force else []
return Model.search_count(expression.AND([
[('id', 'in', ids)],
expression.normalize_domain(dom)
])) < len(ids)
dom = Domain(safe_eval(r.domain_force, eval_context) if r.domain_force else [])
return Model.search_count(dom & Domain('id', 'in', ids)) < len(ids)
return all_rules.filtered(lambda r: r in group_rules or (not r.groups and is_failing(r))).with_user(self.env.user)
@ -137,39 +136,43 @@ class IrRule(models.Model):
tools.ormcache('self.env.uid', 'self.env.su', 'model_name', 'mode',
'tuple(self._compute_domain_context_values())'),
)
def _compute_domain(self, model_name, mode="read"):
global_domains = [] # list of domains
def _compute_domain(self, model_name: str, mode: str = "read") -> Domain:
model = self.env[model_name]
# add rules for parent models
for parent_model_name, parent_field_name in self.env[model_name]._inherits.items():
global_domains: list[Domain] = []
for parent_model_name, parent_field_name in model._inherits.items():
if not model._fields[parent_field_name].store:
continue
if domain := self._compute_domain(parent_model_name, mode):
global_domains.append([(parent_field_name, 'any', domain)])
global_domains.append(Domain(parent_field_name, 'any', domain))
rules = self._get_rules(model_name, mode=mode)
if not rules:
return expression.AND(global_domains) if global_domains else []
return Domain.AND(global_domains).optimize(model)
# browse user and rules with sudo to avoid access errors!
eval_context = self._eval_context()
user_groups = self.env.user.groups_id
group_domains = [] # list of domains
user_groups = self.env.user.all_group_ids
group_domains: list[Domain] = []
for rule in rules.sudo():
if rule.groups and not (rule.groups & user_groups):
continue
# evaluate the domain for the current user
dom = safe_eval(rule.domain_force, eval_context) if rule.domain_force else []
dom = expression.normalize_domain(dom)
if not rule.groups:
global_domains.append(dom)
elif rule.groups & user_groups:
dom = Domain(safe_eval(rule.domain_force, eval_context)) if rule.domain_force else Domain.TRUE
if rule.groups:
group_domains.append(dom)
else:
global_domains.append(dom)
# combine global domains and group domains
if not group_domains:
return expression.AND(global_domains)
return expression.AND(global_domains + [expression.OR(group_domains)])
if group_domains:
global_domains.append(Domain.OR(group_domains))
return Domain.AND(global_domains).optimize(model)
def _compute_domain_context_values(self):
for k in self._compute_domain_keys():
v = self._context.get(k)
v = self.env.context.get(k)
if isinstance(v, list):
# currently this could be a frozenset (to avoid depending on
# the order of allowed_company_ids) but it seems safer if
@ -201,7 +204,7 @@ class IrRule(models.Model):
return res
def _make_access_error(self, operation, records):
_logger.info('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, records.ids[:6], self._uid, records._name)
_logger.info('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, records.ids[:6], self.env.uid, records._name)
self = self.with_context(self.env.user.context_get())
model = records._name
@ -224,7 +227,7 @@ class IrRule(models.Model):
# so it is relatively safe here to include the list of rules and record names.
rules = self._get_failing(records, mode=operation).sudo()
records_sudo = records[:6].sudo()
display_records = records[:6].sudo()
company_related = any('company_id' in (r.domain_force or '') for r in rules)
def get_record_description(rec):
@ -236,7 +239,7 @@ class IrRule(models.Model):
context = None
if company_related:
suggested_companies = records_sudo._get_redirect_suggested_company()
suggested_companies = display_records._get_redirect_suggested_company()
if suggested_companies and len(suggested_companies) != 1:
resolution_info += _('\n\nNote: this might be a multi-company issue. Switching company may help - in Odoo, not in real life!')
elif suggested_companies and suggested_companies in self.env.user.company_ids:
@ -249,13 +252,13 @@ class IrRule(models.Model):
msg = f"{operation_error}\n{failing_model}\n\n{resolution_info}"
else:
# This extended AccessError is only displayed in debug mode.
failing_records = '\n'.join(f'- {get_record_description(rec)}' for rec in records_sudo)
failing_records = '\n'.join(f'- {get_record_description(rec)}' for rec in display_records)
rules_description = '\n'.join(f'- {rule.name}' for rule in rules)
failing_rules = _("Blame the following rules:\n%s", rules_description)
msg = f"{operation_error}\n{failing_records}\n\n{failing_rules}\n\n{resolution_info}"
# clean up the cache of records prefetched with display_name above
records_sudo.invalidate_recordset()
# clean up the cache of records because of filtered_domain to check ir.rule + display_name above
records.invalidate_recordset()
exception = AccessError(msg)
if context:

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
import logging
@ -54,8 +53,8 @@ def _select_nextval(cr, seq_name):
def _update_nogap(self, number_increment):
self.flush_recordset(['number_next'])
number_next = self.number_next
self._cr.execute("SELECT number_next FROM %s WHERE id=%%s FOR UPDATE NOWAIT" % self._table, [self.id])
self._cr.execute("UPDATE %s SET number_next=number_next+%%s WHERE id=%%s " % self._table, (number_increment, self.id))
self.env.cr.execute("SELECT number_next FROM %s WHERE id=%%s FOR UPDATE NOWAIT" % self._table, [self.id])
self.env.cr.execute("UPDATE %s SET number_next=number_next+%%s WHERE id=%%s " % self._table, (number_increment, self.id))
self.invalidate_recordset(['number_next'])
return number_next
@ -88,7 +87,7 @@ class IrSequence(models.Model):
"""
_name = 'ir.sequence'
_description = 'Sequence'
_order = 'name'
_order = 'name, id'
_allow_sudo_commands = False
def _get_number_next_actual(self):
@ -155,47 +154,47 @@ class IrSequence(models.Model):
seqs = super().create(vals_list)
for seq in seqs:
if seq.implementation == 'standard':
_create_sequence(self._cr, "ir_sequence_%03d" % seq.id, seq.number_increment or 1, seq.number_next or 1)
_create_sequence(self.env.cr, "ir_sequence_%03d" % seq.id, seq.number_increment or 1, seq.number_next or 1)
return seqs
def unlink(self):
_drop_sequences(self._cr, ["ir_sequence_%03d" % x.id for x in self])
_drop_sequences(self.env.cr, ["ir_sequence_%03d" % x.id for x in self])
return super(IrSequence, self).unlink()
def write(self, values):
new_implementation = values.get('implementation')
def write(self, vals):
new_implementation = vals.get('implementation')
for seq in self:
# 4 cases: we test the previous impl. against the new one.
i = values.get('number_increment', seq.number_increment)
n = values.get('number_next', seq.number_next)
i = vals.get('number_increment', seq.number_increment)
n = vals.get('number_next', seq.number_next)
if seq.implementation == 'standard':
if new_implementation in ('standard', None):
# Implementation has NOT changed.
# Only change sequence if really requested.
if values.get('number_next'):
_alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_next=n)
if vals.get('number_next'):
_alter_sequence(self.env.cr, "ir_sequence_%03d" % seq.id, number_next=n)
if seq.number_increment != i:
_alter_sequence(self._cr, "ir_sequence_%03d" % seq.id, number_increment=i)
_alter_sequence(self.env.cr, "ir_sequence_%03d" % seq.id, number_increment=i)
seq.date_range_ids._alter_sequence(number_increment=i)
else:
_drop_sequences(self._cr, ["ir_sequence_%03d" % seq.id])
_drop_sequences(self.env.cr, ["ir_sequence_%03d" % seq.id])
for sub_seq in seq.date_range_ids:
_drop_sequences(self._cr, ["ir_sequence_%03d_%03d" % (seq.id, sub_seq.id)])
_drop_sequences(self.env.cr, ["ir_sequence_%03d_%03d" % (seq.id, sub_seq.id)])
else:
if new_implementation in ('no_gap', None):
pass
else:
_create_sequence(self._cr, "ir_sequence_%03d" % seq.id, i, n)
_create_sequence(self.env.cr, "ir_sequence_%03d" % seq.id, i, n)
for sub_seq in seq.date_range_ids:
_create_sequence(self._cr, "ir_sequence_%03d_%03d" % (seq.id, sub_seq.id), i, n)
res = super(IrSequence, self).write(values)
_create_sequence(self.env.cr, "ir_sequence_%03d_%03d" % (seq.id, sub_seq.id), i, n)
res = super().write(vals)
# DLE P179
self.flush_model(values.keys())
self.flush_model(vals.keys())
return res
def _next_do(self):
if self.implementation == 'standard':
number_next = _select_nextval(self._cr, 'ir_sequence_%03d' % self.id)
number_next = _select_nextval(self.env.cr, 'ir_sequence_%03d' % self.id)
else:
number_next = _update_nogap(self, self.number_increment)
return self.get_next_char(number_next)
@ -205,15 +204,16 @@ class IrSequence(models.Model):
return (s % d) if s else ''
def _interpolation_dict():
now = range_date = effective_date = datetime.now(pytz.timezone(self._context.get('tz') or 'UTC'))
if date or self._context.get('ir_sequence_date'):
effective_date = fields.Datetime.from_string(date or self._context.get('ir_sequence_date'))
if date_range or self._context.get('ir_sequence_date_range'):
range_date = fields.Datetime.from_string(date_range or self._context.get('ir_sequence_date_range'))
now = range_date = effective_date = datetime.now(self.env.tz)
if date or self.env.context.get('ir_sequence_date'):
effective_date = fields.Datetime.from_string(date or self.env.context.get('ir_sequence_date'))
if date_range or self.env.context.get('ir_sequence_date_range'):
range_date = fields.Datetime.from_string(date_range or self.env.context.get('ir_sequence_date_range'))
sequences = {
'year': '%Y', 'month': '%m', 'day': '%d', 'y': '%y', 'doy': '%j', 'woy': '%W',
'weekday': '%w', 'h24': '%H', 'h12': '%I', 'min': '%M', 'sec': '%S'
'weekday': '%w', 'h24': '%H', 'h12': '%I', 'min': '%M', 'sec': '%S',
'isoyear': '%G', 'isoy': '%g', 'isoweek': '%V',
}
res = {}
for key, format in sequences.items():
@ -258,7 +258,7 @@ class IrSequence(models.Model):
if not self.use_date_range:
return self._next_do()
# date mode
dt = sequence_date or self._context.get('ir_sequence_date', fields.Date.today())
dt = sequence_date or self.env.context.get('ir_sequence_date', fields.Date.today())
seq_date = self.env['ir.sequence.date_range'].search([('sequence_id', '=', self.id), ('date_from', '<=', dt), ('date_to', '>=', dt)], limit=1)
if not seq_date:
seq_date = self._create_date_range_seq(dt)
@ -285,44 +285,17 @@ class IrSequence(models.Model):
seq_id = seq_ids[0]
return seq_id._next(sequence_date=sequence_date)
@api.model
def get_id(self, sequence_code_or_id, code_or_id='id'):
""" Draw an interpolated string using the specified sequence.
The sequence to use is specified by the ``sequence_code_or_id``
argument, which can be a code or an id (as controlled by the
``code_or_id`` argument. This method is deprecated.
"""
_logger.warning("ir_sequence.get() and ir_sequence.get_id() are deprecated. "
"Please use ir_sequence.next_by_code() or ir_sequence.next_by_id().")
if code_or_id == 'id':
return self.browse(sequence_code_or_id).next_by_id()
else:
return self.next_by_code(sequence_code_or_id)
@api.model
def get(self, code):
""" Draw an interpolated string using the specified sequence.
The sequence to use is specified by its code. This method is
deprecated.
"""
return self.get_id(code, 'code')
class IrSequenceDateRange(models.Model):
class IrSequenceDate_Range(models.Model):
_name = 'ir.sequence.date_range'
_description = 'Sequence Date Range'
_rec_name = "sequence_id"
_allow_sudo_commands = False
_sql_constraints = [
(
'unique_range_per_sequence',
'UNIQUE(sequence_id, date_from, date_to)',
"You cannot create two date ranges for the same sequence with the same date range.",
),
]
_unique_range_per_sequence = models.Constraint(
'UNIQUE(sequence_id, date_from, date_to)',
"You cannot create two date ranges for the same sequence with the same date range.",
)
def _get_number_next_actual(self):
'''Return number from ir_sequence row when no_gap implementation,
@ -340,7 +313,7 @@ class IrSequenceDateRange(models.Model):
@api.model
def default_get(self, fields):
result = super(IrSequenceDateRange, self).default_get(fields)
result = super().default_get(fields)
if 'number_next_actual' in fields:
result['number_next_actual'] = 1
return result
@ -356,14 +329,14 @@ class IrSequenceDateRange(models.Model):
def _next(self):
if self.sequence_id.implementation == 'standard':
number_next = _select_nextval(self._cr, 'ir_sequence_%03d_%03d' % (self.sequence_id.id, self.id))
number_next = _select_nextval(self.env.cr, 'ir_sequence_%03d_%03d' % (self.sequence_id.id, self.id))
else:
number_next = _update_nogap(self, self.sequence_id.number_increment)
return self.sequence_id.get_next_char(number_next)
def _alter_sequence(self, number_increment=None, number_next=None):
for seq in self:
_alter_sequence(self._cr, "ir_sequence_%03d_%03d" % (seq.sequence_id.id, seq.id), number_increment=number_increment, number_next=number_next)
_alter_sequence(self.env.cr, "ir_sequence_%03d_%03d" % (seq.sequence_id.id, seq.id), number_increment=number_increment, number_next=number_next)
@api.model_create_multi
def create(self, vals_list):
@ -373,17 +346,17 @@ class IrSequenceDateRange(models.Model):
for seq in seqs:
main_seq = seq.sequence_id
if main_seq.implementation == 'standard':
_create_sequence(self._cr, "ir_sequence_%03d_%03d" % (main_seq.id, seq.id), main_seq.number_increment, seq.number_next_actual or 1)
_create_sequence(self.env.cr, "ir_sequence_%03d_%03d" % (main_seq.id, seq.id), main_seq.number_increment, seq.number_next_actual or 1)
return seqs
def unlink(self):
_drop_sequences(self._cr, ["ir_sequence_%03d_%03d" % (x.sequence_id.id, x.id) for x in self])
return super(IrSequenceDateRange, self).unlink()
_drop_sequences(self.env.cr, ["ir_sequence_%03d_%03d" % (x.sequence_id.id, x.id) for x in self])
return super().unlink()
def write(self, values):
if values.get('number_next'):
def write(self, vals):
if vals.get('number_next'):
seq_to_alter = self.filtered(lambda seq: seq.sequence_id.implementation == 'standard')
seq_to_alter._alter_sequence(number_next=values.get('number_next'))
seq_to_alter._alter_sequence(number_next=vals.get('number_next'))
# DLE P179: `test_in_invoice_line_onchange_sequence_number_1`
# _update_nogap do a select to get the next sequence number_next
# When changing (writing) the number next of a sequence, the number next must be flushed before doing the select.
@ -392,6 +365,6 @@ class IrSequenceDateRange(models.Model):
# - Changing the number next of a sequence is really really rare,
# - But selecting the number next happens a lot,
# Therefore, if I chose to put the flush just above the select, it would check the flush most of the time for no reason.
res = super(IrSequenceDateRange, self).write(values)
self.flush_model(values.keys())
res = super().write(vals)
self.flush_model(vals.keys())
return res

View file

@ -1,16 +1,13 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
from collections import defaultdict
from os.path import join as opj
import operator
import re
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools
from odoo.exceptions import ValidationError
from odoo.http import request
from odoo.osv import expression
MENU_ITEM_SEPARATOR = "/"
NUMBER_PARENS = re.compile(r"\(([0-9]+)\)")
@ -29,7 +26,7 @@ class IrUiMenu(models.Model):
child_id = fields.One2many('ir.ui.menu', 'parent_id', string='Child IDs')
parent_id = fields.Many2one('ir.ui.menu', string='Parent Menu', index=True, ondelete="restrict")
parent_path = fields.Char(index=True)
groups_id = fields.Many2many('res.groups', 'ir_ui_menu_group_rel',
group_ids = fields.Many2many('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', string='Groups',
help="If you have groups, the visibility of this menu will be based on these groups. "\
"If this field is empty, Odoo will compute visibility based on the related object's read access.")
@ -71,65 +68,73 @@ class IrUiMenu(models.Model):
@api.constrains('parent_id')
def _check_parent_id(self):
if self._has_cycle():
raise ValidationError(_('Error! You cannot create recursive menus.'))
raise ValidationError(self.env._('Error! You cannot create recursive menus.'))
@api.model
@tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug')
@tools.ormcache('frozenset(self.env.user._get_group_ids())', 'debug')
def _visible_menu_ids(self, debug=False):
""" Return the ids of the menu items visible to the user. """
# retrieve all menus, and determine which ones are visible
context = {'ir.ui.menu.full_list': True}
menus = self.with_context(context).search_fetch([], ['action', 'parent_id']).sudo()
# first discard all menus with groups the user does not have
group_ids = set(self.env.user._get_group_ids())
if not debug:
group_ids = group_ids - {self.env['ir.model.data']._xmlid_to_res_id('base.group_no_one', raise_if_not_found=False)}
menus = menus.filtered(
lambda menu: not (menu.groups_id and group_ids.isdisjoint(menu.groups_id._ids)))
group_ids.discard(self.env['ir.model.data']._xmlid_to_res_id('base.group_no_one', raise_if_not_found=False))
# retrieve menus with a domain to filter out menus with groups the user does not have.
# It will be used to determine which ones are visible
menus = self.with_context({}).search_fetch(
# Don't use 'any' operator in the domain to avoid ir.rule
['|', ('group_ids', '=', False), ('group_ids', 'in', tuple(group_ids))],
['parent_id', 'action'], order='id',
).sudo()
# take apart menus that have an action
actions_by_model = defaultdict(set)
action_ids_by_model = defaultdict(list)
for action in menus.mapped('action'):
if action:
actions_by_model[action._name].add(action.id)
existing_actions = {
action
for model_name, action_ids in actions_by_model.items()
for action in self.env[model_name].browse(action_ids).exists()
}
action_menus = menus.filtered(lambda m: m.action and m.action in existing_actions)
folder_menus = menus - action_menus
visible = self.browse()
action_ids_by_model[action._name].append(action.id)
# process action menus, check whether their action is allowed
access = self.env['ir.model.access']
MODEL_BY_TYPE = {
'ir.actions.act_window': 'res_model',
'ir.actions.report': 'model',
'ir.actions.server': 'model_name',
}
def exists_actions(model_name, action_ids):
""" Return existing actions and fetch model name field if exists"""
if model_name not in MODEL_BY_TYPE:
return self.env[model_name].browse(action_ids).exists()
records = self.env[model_name].sudo().with_context(active_test=False).search_fetch(
[('id', 'in', action_ids)], [MODEL_BY_TYPE[model_name]], order='id',
)
if model_name == 'ir.actions.server':
# Because it is computed, `search_fetch` doesn't fill the cache for it
records.mapped('model_name')
return records
# performance trick: determine the ids to prefetch by type
prefetch_ids = defaultdict(list)
for action in action_menus.mapped('action'):
prefetch_ids[action._name].append(action.id)
for menu in action_menus:
existing_actions = {
action
for model_name, action_ids in action_ids_by_model.items()
for action in exists_actions(model_name, action_ids)
}
menu_ids = set(menus._ids)
visible_ids = set()
access = self.env['ir.model.access']
# process action menus, check whether their action is allowed
for menu in menus:
action = menu.action
action = action.with_prefetch(prefetch_ids[action._name])
model_name = action._name in MODEL_BY_TYPE and action[MODEL_BY_TYPE[action._name]]
if not model_name or access.check(model_name, 'read', False):
# make menu visible, and its folder ancestors, too
visible += menu
if not action or action not in existing_actions:
continue
model_fname = MODEL_BY_TYPE.get(action._name)
# action[model_fname] has been fetched in batch in `exists_actions`
if model_fname and not access.check(action[model_fname], 'read', False):
continue
# make menu visible, and its folder ancestors, too
menu_id = menu.id
while menu_id not in visible_ids and menu_id in menu_ids:
visible_ids.add(menu_id)
menu = menu.parent_id
while menu and menu in folder_menus and menu not in visible:
visible += menu
menu = menu.parent_id
menu_id = menu.id
return set(visible.ids)
return frozenset(visible_ids)
@api.returns('self')
def _filter_visible_menus(self):
""" Filter `self` to only keep the menu items that should be visible in
the menu hierarchy of the current user.
@ -138,24 +143,6 @@ class IrUiMenu(models.Model):
visible_ids = self._visible_menu_ids(request.session.debug if request else False)
return self.filtered(lambda menu: menu.id in visible_ids)
@api.model
def search_fetch(self, domain, field_names, offset=0, limit=None, order=None):
menus = super().search_fetch(domain, field_names, order=order)
if menus:
# menu filtering is done only on main menu tree, not other menu lists
if not self._context.get('ir.ui.menu.full_list'):
menus = menus._filter_visible_menus()
if offset:
menus = menus[offset:]
if limit:
menus = menus[:limit]
return menus
@api.model
def search_count(self, domain, limit=None):
# to be consistent with search() above
return len(self.search(domain, limit=limit))
@api.depends('parent_id')
def _compute_display_name(self):
for menu in self:
@ -167,20 +154,24 @@ class IrUiMenu(models.Model):
for values in vals_list:
if 'web_icon' in values:
values['web_icon_data'] = self._compute_web_icon_data(values.get('web_icon'))
return super(IrUiMenu, self).create(vals_list)
return super().create(vals_list)
def write(self, values):
def write(self, vals):
self.env.registry.clear_cache()
if 'web_icon' in values:
values['web_icon_data'] = self._compute_web_icon_data(values.get('web_icon'))
return super(IrUiMenu, self).write(values)
if 'web_icon' in vals:
vals['web_icon_data'] = self._compute_web_icon_data(vals.get('web_icon'))
return super().write(vals)
def _compute_web_icon_data(self, web_icon):
""" Returns the image associated to `web_icon`.
`web_icon` can either be:
- an image icon [module, path]
- a built icon [icon_class, icon_color, background_color]
and it only has to call `_read_image` if it's an image.
""" Returns the image associated to ``web_icon``.
:param str web_icon: a comma-separated value string for either:
* an image icon: ``f"{module},{path}"``
* a built icon: ``f"{icon_class},{icon_color},{background_color}"``
The ``web_icon_data`` computed field uses :meth:`_read_image` for image
web icons, and is ``False`` for built icons.
"""
if web_icon and len(web_icon.split(',')) == 2:
return self._read_image(web_icon)
@ -190,9 +181,7 @@ class IrUiMenu(models.Model):
# cascade-delete submenus blindly. We also can't use ondelete=set null because
# that is not supported when _parent_store is used (would silently corrupt it).
# TODO: ideally we should move them under a generic "Orphans" menu somewhere?
extra = {'ir.ui.menu.full_list': True,
'active_test': False}
direct_children = self.with_context(**extra).search([('parent_id', 'in', self.ids)])
direct_children = self.with_context(active_test=False).search([('parent_id', 'in', self.ids)])
direct_children.write({'parent_id': False})
self.env.registry.clear_cache()
@ -210,20 +199,19 @@ class IrUiMenu(models.Model):
return new_menus
@api.model
@api.returns('self')
def get_user_roots(self):
""" Return all root menu ids visible for the user.
:return: the root menu ids
:rtype: list(int)
"""
return self.search([('parent_id', '=', False)])
return self.search([('parent_id', '=', False)])._filter_visible_menus()
def _load_menus_blacklist(self):
return []
@api.model
@tools.ormcache_context('self._uid', keys=('lang',))
@tools.ormcache('self.env.uid', 'self.env.lang')
def load_menus_root(self):
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
menu_roots = self.get_user_roots()
@ -244,94 +232,91 @@ class IrUiMenu(models.Model):
return menu_root
@api.model
@tools.ormcache_context('self._uid', 'debug', keys=('lang',))
@tools.ormcache('self.env.uid', 'debug', 'self.env.lang')
def load_menus(self, debug):
""" Loads all menu items (all applications and their sub-menus).
:return: the menu root
:rtype: dict('children': menu_nodes)
"""
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon']
menu_roots = self.get_user_roots()
menu_roots_data = menu_roots.read(fields) if menu_roots else []
menu_root = {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': [menu['id'] for menu in menu_roots_data],
}
all_menus = {'root': menu_root}
if not menu_roots_data:
return all_menus
# menus are loaded fully unlike a regular tree view, cause there are a
# limited number of items (752 when all 6.1 addons are installed)
menus_domain = [('id', 'child_of', menu_roots.ids)]
blacklisted_menu_ids = self._load_menus_blacklist()
if blacklisted_menu_ids:
menus_domain = expression.AND([menus_domain, [('id', 'not in', blacklisted_menu_ids)]])
menus = self.search(menus_domain)
menu_items = menus.read(fields)
xmlids = (menu_roots + menus)._get_menuitems_xmlids()
visible_menus = self.search_fetch(
[('id', 'not in', blacklisted_menu_ids)],
['name', 'parent_id', 'action', 'web_icon'],
)._filter_visible_menus()
# add roots at the end of the sequence, so that they will overwrite
# equivalent menu items from full menu read when put into id:item
# mapping, resulting in children being correctly set on the roots.
menu_items.extend(menu_roots_data)
children_dict = defaultdict(list) # {parent_id: []} / parent_id == False for root menus
for menu in visible_menus:
children_dict[menu.parent_id.id].append(menu.id)
mi_attachments = self.env['ir.attachment'].sudo().search_read(
app_info = {}
# recursively set app ids to related children
def _set_app_id(menu_app_id, menu_id):
app_info[menu_id] = menu_app_id
for child_id in children_dict[menu_id]:
_set_app_id(menu_app_id, child_id)
for root_menu_id in children_dict[False]:
_set_app_id(root_menu_id, root_menu_id)
# Filter out menus not related to an app (+ keep root menu), it happens when
# some parent menu are not visible for group.
visible_menus = visible_menus.filtered(lambda menu: menu.id in app_info)
xmlids = visible_menus._get_menuitems_xmlids()
icon_attachments = self.env['ir.attachment'].sudo().search_read(
domain=[('res_model', '=', 'ir.ui.menu'),
('res_id', 'in', [menu_item['id'] for menu_item in menu_items if menu_item['id']]),
('res_id', 'in', visible_menus._ids),
('res_field', '=', 'web_icon_data')],
fields=['res_id', 'datas', 'mimetype'])
icon_attachments_res_id = {attachment['res_id']: attachment for attachment in icon_attachments}
mi_attachment_by_res_id = {attachment['res_id']: attachment for attachment in mi_attachments}
menus_dict = {}
action_ids_by_type = defaultdict(list)
for menu in visible_menus:
# set children ids and xmlids
menu_items_map = {menu_item["id"]: menu_item for menu_item in menu_items}
for menu_item in menu_items:
menu_item.setdefault('children', [])
parent = menu_item['parent_id'] and menu_item['parent_id'][0]
menu_item['xmlid'] = xmlids.get(menu_item['id'], "")
if parent in menu_items_map:
menu_items_map[parent].setdefault(
'children', []).append(menu_item['id'])
attachment = mi_attachment_by_res_id.get(menu_item['id'])
if attachment:
menu_item['web_icon_data'] = attachment['datas'].decode()
menu_item['web_icon_data_mimetype'] = attachment['mimetype']
menu_id = menu.id
attachment = icon_attachments_res_id.get(menu_id)
if action := menu.action:
action_model = action._name
action_id = action.id
action_ids_by_type[action_model].append(action_id)
else:
menu_item['web_icon_data'] = False
menu_item['web_icon_data_mimetype'] = False
all_menus.update(menu_items_map)
action_model = False
action_id = False
# sort by sequence
for menu_id in all_menus:
all_menus[menu_id]['children'].sort(key=lambda id: all_menus[id]['sequence'])
menus_dict[menu_id] = {
'id': menu_id,
'name': menu.name,
'app_id': app_info[menu_id],
'action_model': action_model,
'action_id': action_id,
'web_icon': menu.web_icon,
'web_icon_data': attachment['datas'].decode() if attachment else False,
'web_icon_data_mimetype': attachment['mimetype'] if attachment else False,
'xmlid': xmlids.get(menu_id, ""),
}
# recursively set app ids to related children
def _set_app_id(app_id, menu):
menu['app_id'] = app_id
for child_id in menu['children']:
_set_app_id(app_id, all_menus[child_id])
# prefetch action.path
for model_name, action_ids in action_ids_by_type.items():
self.env[model_name].sudo().browse(action_ids).fetch(['path'])
for app in menu_roots_data:
app_id = app['id']
_set_app_id(app_id, all_menus[app_id])
# set children + model_path
for menu_dict in menus_dict.values():
if menu_dict['action_model']:
menu_dict['action_path'] = self.env[menu_dict['action_model']].sudo().browse(menu_dict['action_id']).path
else:
menu_dict['action_path'] = False
menu_dict['children'] = children_dict[menu_dict['id']]
# filter out menus not related to an app (+ keep root menu)
all_menus = {menu['id']: menu for menu in all_menus.values() if menu.get('app_id')}
all_menus['root'] = menu_root
return all_menus
menus_dict['root'] = {
'id': False,
'name': 'root',
'children': children_dict[False],
}
return menus_dict
def _get_menuitems_xmlids(self):
menuitems = self.env['ir.model.data'].sudo().search([
('res_id', 'in', self.ids),
('model', '=', 'ir.ui.menu')
])
menuitems = self.env['ir.model.data'].sudo().search_fetch(
[('res_id', 'in', self.ids), ('model', '=', 'ir.ui.menu')],
['res_id', 'complete_name'],
)
return {
menu.res_id: menu.complete_name

View file

@ -0,0 +1,67 @@
from odoo import _, api, fields, models
from odoo.exceptions import AccessError, ValidationError
from odoo.tools import ormcache
class PropertiesBaseDefinition(models.Model):
"""Models storing the properties definition of the record without parent."""
_name = "properties.base.definition"
_description = "Properties Base Definition"
properties_field_id = fields.Many2one(
"ir.model.fields",
required=True,
ondelete="cascade",
)
properties_definition = fields.PropertiesDefinition("Properties Definition")
_unique_properties_field_id = models.Constraint(
"UNIQUE(properties_field_id)",
"Only one definition per properties field",
)
@api.depends("properties_field_id")
def _compute_display_name(self):
for definition in self:
if not definition.properties_field_id.model:
definition.display_name = False
continue
definition.display_name = _(
"%s Properties",
self.env[definition.properties_field_id.model]._description,
)
@api.constrains("properties_field_id")
def _check_properties_field_id(self):
if invalid_fields := self.mapped("properties_field_id").filtered(lambda f: f.ttype != 'properties'):
raise ValidationError(
_("The definition needs to be linked to a properties field. Those fields are not: %s.", ', '.join(invalid_fields.mapped('name')))
)
def write(self, vals):
if 'properties_field_id' in vals:
raise AccessError(_("You can not change the field of a base definition"))
return super().write(vals)
def _get_definition_for_property_field(self, model_name, field_name):
return self.browse(self._get_definition_id_for_property_field(model_name, field_name))
@ormcache("model_name", "field_name", cache='stable')
def _get_definition_id_for_property_field(self, model_name, field_name):
definition_record = self.sudo().search(
[
("properties_field_id.model", "=", model_name),
("properties_field_id.name", "=", field_name),
],
limit=1,
)
if not definition_record:
field = self.env["ir.model.fields"].sudo()._get(model_name, field_name)
definition_record = self.sudo().create(
{
"properties_field_id": field.id,
},
)
return definition_record.id

View file

@ -0,0 +1,56 @@
from collections.abc import Iterable
from odoo import models, api, fields
from odoo.fields import Domain
from odoo.tools import SQL
class PropertiesBaseDefinitionMixin(models.AbstractModel):
"""Mixin that add properties without parent on a model."""
_name = "properties.base.definition.mixin"
_description = "Properties Base Definition Mixin"
properties = fields.Properties(
string="Properties",
definition="properties_base_definition_id.properties_definition",
copy=True,
)
properties_base_definition_id = fields.Many2one(
"properties.base.definition",
compute="_compute_properties_base_definition_id",
search="_search_properties_base_definition_id",
)
def _compute_properties_base_definition_id(self):
self.properties_base_definition_id = self.env["properties.base.definition"] \
.sudo()._get_definition_for_property_field(self._name, "properties")
def _search_properties_base_definition_id(self, operator, value):
if operator != "in":
return NotImplemented
properties_base_definition_id = self.env["properties.base.definition"] \
.sudo()._get_definition_id_for_property_field(self._name, "properties")
if not isinstance(value, Iterable):
value = (value,)
return Domain.TRUE if properties_base_definition_id in value else Domain.FALSE
@api.model_create_multi
def create(self, vals_list):
parent = self.env["properties.base.definition"] \
._get_definition_id_for_property_field(self._name, "properties")
for vals in vals_list:
# Needed to add the default properties values
vals["properties_base_definition_id"] = parent
return super().create(vals_list)
def _field_to_sql(self, alias, fname, query=None):
if fname == 'properties_base_definition_id':
# Allow the export to work
parent = self.env["properties.base.definition"] \
._get_definition_id_for_property_field(self._name, "properties")
return SQL("%s", parent)
return super()._field_to_sql(alias, fname, query)

View file

@ -5,9 +5,9 @@ from odoo import fields, models
class ReportLayout(models.Model):
_name = "report.layout"
_name = 'report.layout'
_description = 'Report Layout'
_order = 'sequence'
_order = 'sequence, id'
view_id = fields.Many2one('ir.ui.view', 'Document Template', required=True)
image = fields.Char(string="Preview image src")

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo import api, fields, models
from odoo.exceptions import ValidationError
# see http://doc.qt.io/archives/qt-4.8/qprinter.html#PaperSize-enum
@ -163,8 +162,8 @@ PAPER_SIZES = [
]
class report_paperformat(models.Model):
_name = "report.paperformat"
class ReportPaperformat(models.Model):
_name = 'report.paperformat'
_description = "Paper Format Config"
name = fields.Char('Name', required=True)
@ -192,7 +191,7 @@ class report_paperformat(models.Model):
@api.constrains('format')
def _check_format_or_page(self):
if self.filtered(lambda x: x.format != 'custom' and (x.page_width or x.page_height)):
raise ValidationError(_('You can select either a format or a specific page width/height, but not both.'))
raise ValidationError(self.env._('You can select either a format or a specific page width/height, but not both.'))
def _compute_print_page_size(self):
for record in self:

View file

@ -12,10 +12,10 @@ def sanitize_account_number(acc_number):
return False
class Bank(models.Model):
_description = 'Bank'
class ResBank(models.Model):
_name = 'res.bank'
_order = 'name'
_description = 'Bank'
_order = 'name, id'
_rec_names_search = ['name', 'bic']
name = fields.Char(required=True)
@ -46,6 +46,18 @@ class Bank(models.Model):
return domain
return super()._search_display_name(operator, value)
@api.model_create_multi
def create(self, vals_list):
for vals in vals_list:
if vals.get('bic', False):
vals['bic'] = vals['bic'].upper()
return super().create(vals_list)
def write(self, vals):
if vals.get('bic', False):
vals['bic'] = vals['bic'].upper()
return super().write(vals)
@api.onchange('country')
def _onchange_country_id(self):
if self.country and self.country != self.state.country_id:
@ -73,7 +85,8 @@ class ResPartnerBank(models.Model):
active = fields.Boolean(default=True)
acc_type = fields.Selection(selection=lambda x: x.env['res.partner.bank'].get_supported_account_types(), compute='_compute_acc_type', string='Type', help='Bank account type: Normal or IBAN. Inferred from the bank account number.')
acc_number = fields.Char('Account Number', required=True)
acc_number = fields.Char('Account Number', required=True, search='_search_acc_number')
clearing_number = fields.Char('Clearing Number')
sanitized_acc_number = fields.Char(compute='_compute_sanitized_acc_number', string='Sanitized Account Number', readonly=True, store=True)
acc_holder_name = fields.Char(string='Account Holder Name', help="Account holder name, in case it is different than the name of the Account Holder", compute='_compute_account_holder_name', readonly=False, store=True)
partner_id = fields.Many2one('res.partner', 'Account Holder', ondelete='cascade', index=True, domain=['|', ('is_company', '=', True), ('parent_id', '=', False)], required=True)
@ -85,18 +98,26 @@ class ResPartnerBank(models.Model):
currency_id = fields.Many2one('res.currency', string='Currency')
company_id = fields.Many2one('res.company', 'Company', related='partner_id.company_id', store=True, readonly=True)
country_code = fields.Char(related='partner_id.country_code', string="Country Code")
note = fields.Text('Notes')
color = fields.Integer(compute='_compute_color')
_sql_constraints = [(
'unique_number',
_unique_number = models.Constraint(
'unique(sanitized_acc_number, partner_id)',
'The combination Account Number/Partner must be unique.'
)]
"The combination Account Number/Partner must be unique.",
)
@api.depends('acc_number')
def _compute_sanitized_acc_number(self):
for bank in self:
bank.sanitized_acc_number = sanitize_account_number(bank.acc_number)
def _search_acc_number(self, operator, value):
if operator in ('in', 'not in'):
value = [sanitize_account_number(i) for i in value]
else:
value = sanitize_account_number(value)
return [('sanitized_acc_number', operator, value)]
@api.depends('acc_number')
def _compute_acc_type(self):
for bank in self:
@ -118,30 +139,10 @@ class ResPartnerBank(models.Model):
for acc in self:
acc.display_name = f'{acc.acc_number} - {acc.bank_id.name}' if acc.bank_id else acc.acc_number
def _condition_to_sql(self, alias: str, fname: str, operator: str, value, query) -> SQL:
if fname == 'acc_number':
fname = 'sanitized_acc_number'
if not isinstance(value, str) and isinstance(value, Iterable):
value = [sanitize_account_number(i) for i in value]
else:
value = sanitize_account_number(value)
return super()._condition_to_sql(alias, fname, operator, value, query)
def _sanitize_vals(self, vals):
if 'sanitized_acc_number' in vals: # do not allow to write on sanitized directly
vals['acc_number'] = vals.pop('sanitized_acc_number')
if 'acc_number' in vals:
vals['sanitized_acc_number'] = sanitize_account_number(vals['acc_number'])
@api.model_create_multi
def create(self, vals_list):
for vals in vals_list:
self._sanitize_vals(vals)
return super().create(vals_list)
def write(self, vals):
self._sanitize_vals(vals)
return super().write(vals)
@api.depends('allow_out_payment')
def _compute_color(self):
for bank in self:
bank.color = 10 if bank.allow_out_payment else 1
def action_archive_bank(self):
"""

View file

@ -1,28 +1,27 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import logging
import threading
import warnings
from odoo import api, fields, models, tools, _, Command, SUPERUSER_ID
from odoo import api, fields, models, modules, tools
from odoo.api import SUPERUSER_ID
from odoo.exceptions import ValidationError, UserError
from odoo.osv import expression
from odoo.fields import Command, Domain
from odoo.tools import html2plaintext, file_open, ormcache
from odoo.tools.image import image_process
_logger = logging.getLogger(__name__)
class Company(models.Model):
_name = "res.company"
class ResCompany(models.Model):
_name = 'res.company'
_description = 'Companies'
_order = 'sequence, name'
_inherit = ['format.address.mixin', 'format.vat.label.mixin']
_parent_store = True
def copy(self, default=None):
raise UserError(_('Duplicating a company is not allowed. Please create a new company instead.'))
raise UserError(self.env._('Duplicating a company is not allowed. Please create a new company instead.'))
def _get_logo(self):
with file_open('base/static/img/res_company_logo.png', 'rb') as file:
@ -40,7 +39,7 @@ class Company(models.Model):
parent_path = fields.Char(index=True)
parent_ids = fields.Many2many('res.company', compute='_compute_parent_ids', compute_sudo=True)
root_id = fields.Many2one('res.company', compute='_compute_parent_ids', compute_sudo=True)
partner_id = fields.Many2one('res.partner', string='Partner', required=True)
partner_id = fields.Many2one('res.partner', string='Partner', required=True, index=True)
report_header = fields.Html(string='Company Tagline', translate=True, help="Company tagline, which is included in a printed document's header or footer (depending on the selected layout).")
report_footer = fields.Html(string='Report Footer', translate=True, help="Footer text displayed at the bottom of all reports.")
company_details = fields.Html(string='Company Details', translate=True, help="Header text displayed at the top of all reports.")
@ -66,10 +65,10 @@ class Company(models.Model):
country_code = fields.Char(related='country_id.code', depends=['country_id'])
email = fields.Char(related='partner_id.email', store=True, readonly=False)
phone = fields.Char(related='partner_id.phone', store=True, readonly=False)
mobile = fields.Char(related='partner_id.mobile', store=True, readonly=False)
website = fields.Char(related='partner_id.website', readonly=False)
vat = fields.Char(related='partner_id.vat', string="Tax ID", readonly=False)
company_registry = fields.Char(related='partner_id.company_registry', string="Company ID", readonly=False)
company_registry_placeholder = fields.Char(related='partner_id.company_registry_placeholder')
paperformat_id = fields.Many2one('report.paperformat', 'Paper format', default=lambda self: self.env.ref('base.paperformat_euro', raise_if_not_found=False))
external_report_layout_id = fields.Many2one('ir.ui.view', 'Document Template')
font = fields.Selection([("Lato", "Lato"), ("Roboto", "Roboto"), ("Open_Sans", "Open Sans"), ("Montserrat", "Montserrat"), ("Oswald", "Oswald"), ("Raleway", "Raleway"), ('Tajawal', 'Tajawal'), ('Fira_Mono', 'Fira Mono')], default="Lato")
@ -79,16 +78,18 @@ class Company(models.Model):
layout_background = fields.Selection([('Blank', 'Blank'), ('Demo logo', 'Demo logo'), ('Custom', 'Custom')], default="Blank", required=True)
layout_background_image = fields.Binary("Background Image")
uninstalled_l10n_module_ids = fields.Many2many('ir.module.module', compute='_compute_uninstalled_l10n_module_ids')
_sql_constraints = [
('name_uniq', 'unique (name)', 'The company name must be unique!')
]
_name_uniq = models.Constraint(
'unique (name)',
"The company name must be unique!",
)
def init(self):
for company in self.search([('paperformat_id', '=', False)]):
paperformat_euro = self.env.ref('base.paperformat_euro', False)
if paperformat_euro:
company.write({'paperformat_id': paperformat_euro.id})
sup = super(Company, self)
sup = super()
if hasattr(sup, 'init'):
sup.init()
@ -153,7 +154,7 @@ class Company(models.Model):
def _compute_logo_web(self):
for company in self:
img = company.partner_id.image_1920
company.logo_web = img and base64.b64encode(tools.image_process(base64.b64decode(img), size=(180, 0)))
company.logo_web = img and base64.b64encode(image_process(base64.b64decode(img), size=(180, 0)))
@api.depends('partner_id.image_1920')
def _compute_uses_default_logo(self):
@ -229,7 +230,8 @@ class Company(models.Model):
is_ready_and_not_test = (
not tools.config['test_enable']
and (self.env.registry.ready or not self.env.registry._init)
and not getattr(threading.current_thread(), 'testing', False)
and not modules.module.current_test
and not self.env.context.get('install_mode') # due to savepoint when importing the file
)
if uninstalled_modules and is_ready_and_not_test:
return uninstalled_modules.button_immediate_install()
@ -248,27 +250,18 @@ class Company(models.Model):
def _search_display_name(self, operator, value):
context = dict(self.env.context)
newself = self
constraint = []
constraint = Domain.TRUE
if context.pop('user_preference', None):
# We browse as superuser. Otherwise, the user would be able to
# select only the currently visible companies (according to rules,
# which are probably to allow to see the child companies) even if
# she belongs to some other companies.
companies = self.env.user.company_ids
constraint = [('id', 'in', companies.ids)]
constraint = Domain('id', 'in', companies.ids)
newself = newself.sudo()
newself = newself.with_context(context)
domain = super(Company, newself)._search_display_name(operator, value)
return expression.AND([domain, constraint])
@api.model
@api.returns('self', lambda value: value.id)
def _company_default_get(self, object=False, field=False):
""" Returns the user's company
- Deprecated
"""
_logger.warning("The method '_company_default_get' on res.company is deprecated and shouldn't be used anymore")
return self.env.company
domain = super(ResCompany, newself)._search_display_name(operator, value)
return domain & constraint
@api.depends('company_details')
def _compute_empty_company_details(self):
@ -345,40 +338,38 @@ class Company(models.Model):
self.env.registry.clear_cache()
return res
def write(self, values):
def write(self, vals):
if 'parent_id' in vals:
raise UserError(self.env._("The company hierarchy cannot be changed."))
if vals.get('currency_id'):
currency = self.env['res.currency'].browse(vals['currency_id'])
if not currency.active:
currency.write({'active': True})
res = super().write(vals)
invalidation_fields = self.cache_invalidation_fields()
asset_invalidation_fields = {'font', 'primary_color', 'secondary_color', 'external_report_layout_id'}
companies_needs_l10n = (
values.get('country_id')
vals.get('country_id')
and self.filtered(lambda company: not company.country_id)
or self.browse()
)
if not invalidation_fields.isdisjoint(values):
) or self.browse()
if not invalidation_fields.isdisjoint(vals):
self.env.registry.clear_cache()
if not asset_invalidation_fields.isdisjoint(values):
if not asset_invalidation_fields.isdisjoint(vals):
# this is used in the content of an asset (see asset_styles_company_report)
# and thus needs to invalidate the assets cache when this is changed
self.env.registry.clear_cache('assets') # not 100% it is useful a test is missing if it is the case
if 'parent_id' in values:
raise UserError(_("The company hierarchy cannot be changed."))
if values.get('currency_id'):
currency = self.env['res.currency'].browse(values['currency_id'])
if not currency.active:
currency.write({'active': True})
res = super(Company, self).write(values)
# Archiving a company should also archive all of its branches
if values.get('active') is False:
if vals.get('active') is False:
self.child_ids.active = False
for company in self:
# Copy modified delegated fields from root to branches
if (changed := set(values) & set(self._get_company_root_delegated_field_names())) and not company.parent_id:
if (changed := set(vals) & set(self._get_company_root_delegated_field_names())) and not company.parent_id:
branches = self.sudo().search([
('id', 'child_of', company.id),
('id', '!=', company.id),
@ -391,7 +382,7 @@ class Company(models.Model):
# invalidate company cache to recompute address based on updated partner
company_address_fields = self._get_company_address_field_names()
company_address_fields_upd = set(company_address_fields) & set(values.keys())
company_address_fields_upd = set(company_address_fields) & set(vals.keys())
if company_address_fields_upd:
self.invalidate_model(company_address_fields)
return res
@ -406,7 +397,7 @@ class Company(models.Model):
])
if company_active_users:
# You cannot disable companies with active users
raise ValidationError(_(
raise ValidationError(self.env._(
'The company %(company_name)s cannot be archived because it is still used '
'as the default company of %(active_users)s users.',
company_name=company.name,
@ -420,7 +411,7 @@ class Company(models.Model):
for fname in company._get_company_root_delegated_field_names():
if company[fname] != company.parent_id[fname]:
description = self.env['ir.model.fields']._get("res.company", fname).field_description
raise ValidationError(_("The %s of a subsidiary must be the same as it's root company.", description))
raise ValidationError(self.env._("The %s of a subsidiary must be the same as it's root company.", description))
@api.model
def _get_main_company(self):
@ -467,7 +458,7 @@ class Company(models.Model):
self.ensure_one()
return {
'type': 'ir.actions.act_window',
'name': _('Branches'),
'name': self.env._('Branches'),
'res_model': 'res.company',
'domain': [('parent_id', '=', self.id)],
'context': {
@ -480,7 +471,7 @@ class Company(models.Model):
def _get_public_user(self):
self.ensure_one()
# We need sudo to be able to see public users from others companies too
public_users = self.env.ref('base.group_public').sudo().with_context(active_test=False).users
public_users = self.env.ref('base.group_public').sudo().with_context(active_test=False).all_user_ids
public_users_for_company = public_users.filtered(lambda user: user.company_id == self)
if public_users_for_company:

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import re
@ -10,26 +9,7 @@ from odoo.exceptions import AccessError, RedirectWarning, UserError
_logger = logging.getLogger(__name__)
class ResConfigModuleInstallationMixin(object):
__slots__ = ()
@api.model
def _install_modules(self, modules):
""" Install the requested modules.
:param modules: a recordset of ir.module.module records
:return: the next action to execute
"""
result = None
to_install_modules = modules.filtered(lambda module: module.state == 'uninstalled')
if to_install_modules:
result = to_install_modules.button_immediate_install()
return result
class ResConfigConfigurable(models.TransientModel):
class ResConfig(models.TransientModel):
''' Base classes for new-style configuration items
Configuration items should inherit from this class, implement
@ -115,14 +95,14 @@ class ResConfigConfigurable(models.TransientModel):
return self.cancel() or self.next()
class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin):
class ResConfigSettings(models.TransientModel):
""" Base configuration wizard for application settings. It provides support for setting
default values, assigning groups to employee users, and installing modules.
To make such a 'settings' wizard, define a model like::
class MyConfigWizard(models.TransientModel):
_name = 'my.settings'
_inherit = 'res.config.settings'
_inherit = ['res.config.settings']
default_foo = fields.type(..., default_model='my.model'),
group_bar = fields.Boolean(..., group='base.group_user', implied_group='my.group'),
@ -178,33 +158,26 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
def copy(self, default=None):
raise UserError(_("Cannot duplicate configuration!"))
def onchange_module(self, field_value, module_name):
module_sudo = self.env['ir.module.module']._get(module_name[7:])
if not int(field_value) and module_sudo.state in ('to install', 'installed', 'to upgrade'):
deps = module_sudo.downstream_dependencies()
dep_names = (deps | module_sudo).mapped('shortdesc')
message = '\n'.join(dep_names)
return {
'warning': {
'title': _('Warning!'),
'message': _('Disabling this option will also uninstall the following modules \n%s', message),
}
}
return {}
@api.model
def _install_modules(self, modules):
""" Install the requested modules.
def _register_hook(self):
""" Add an onchange method for each module field. """
def make_method(name):
return lambda self: self.onchange_module(self[name], name)
:param modules: a recordset of ir.module.module records
:return: the next action to execute
"""
result = None
for name in self._fields:
if name.startswith('module_'):
method = make_method(name)
self._onchange_methods[name].append(method)
to_install_modules = modules.filtered(lambda module: module.state == 'uninstalled')
if to_install_modules:
result = to_install_modules.button_immediate_install()
return result
@api.model
def _get_classified_fields(self, fnames=None):
""" return a dictionary with the fields classified by category::
""" return a dictionary with the fields classified by category:
.. code-block:: python
{ 'default': [('default_foo', 'model', 'foo'), ...],
'group': [('group_bar', [browse_group], browse_implied_group), ...],
@ -253,6 +226,7 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
return {'default': defaults, 'group': groups, 'module': modules, 'config': configs, 'other': others}
@api.model
def get_values(self):
"""
Return values for the fields other that `default`, `group` and `module`
@ -277,7 +251,7 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
# groups: which groups are implied by the group Employee
for name, groups, implied_group in classified['group']:
res[name] = all(implied_group in group.implied_ids for group in groups)
res[name] = all(implied_group in group.all_implied_ids for group in groups)
if self._fields[name].type == 'selection':
res[name] = str(int(res[name])) # True, False -> '1', '0'
@ -406,15 +380,23 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
self.env.flush_all()
if to_uninstall:
to_uninstall.button_immediate_uninstall()
return {
'type': 'ir.actions.act_window',
'target': 'new',
'name': _('Uninstall modules'),
'view_mode': 'form',
'res_model': 'base.module.uninstall',
'context': {
'default_module_ids': to_uninstall.ids,
},
}
installation_status = self._install_modules(to_install)
if installation_status or to_uninstall:
# After the uninstall/install calls, the registry and environments
# are no longer valid. So we reset the environment.
self.env.reset()
self = self.env()[self._name]
self.env.transaction.reset()
# pylint: disable=next-method-called
config = self.env['res.config'].next() or {}
@ -447,9 +429,10 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
:param string menu_xml_id: the xml id of the menuitem where the view is located,
structured as follows: module_name.menuitem_xml_id (e.g.: "sales_team.menu_sale_config")
:return tuple:
- t[0]: string: full path to the menuitem (e.g.: "Settings/Configuration/Sales")
- t[1]: int or long: id of the menuitem's action
:return: a 2-value tuple where
- t[0]: string: full path to the menuitem (e.g.: "Settings/Configuration/Sales")
- t[1]: int or long: id of the menuitem's action
"""
ir_ui_menu = self.env.ref(menu_xml_id)
return (ir_ui_menu.complete_name, ir_ui_menu.action.id)
@ -461,7 +444,8 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
:param string full_field_name: the full name of the field, structured as follows:
model_name.field_name (e.g.: "sale.config.settings.fetchmail_lead")
:return string: human readable name of the field (e.g.: "Create leads from incoming mails")
:return: human readable name of the field (e.g.: "Create leads from incoming mails")
:rtype: str
"""
model_name, field_name = full_field_name.rsplit('.', 1)
return self.env[model_name].fields_get([field_name])[field_name]['string']
@ -469,31 +453,51 @@ class ResConfigSettings(models.TransientModel, ResConfigModuleInstallationMixin)
@api.model
def get_config_warning(self, msg):
"""
Helper: return a Warning exception with the given message where the %(field:xxx)s
and/or %(menu:yyy)s are replaced by the human readable field's name and/or menuitem's
full path.
Helper: return a Warning exception with the given message where the ``%(field:xxx)s``
and/or ``%(menu:yyy)s`` are replaced by the human readable field's name and/or
menuitem's full path.
Usage:
------
Just include in your error message %(field:model_name.field_name)s to obtain the human
readable field's name, and/or %(menu:module_name.menuitem_xml_id)s to obtain the menuitem's
full path.
Just include in your error message ``%(field:model_name.field_name)s`` to obtain the
human readable field's name, and/or %(menu:module_name.menuitem_xml_id)s to obtain the
menuitem's full path.
Example of use:
---------------
from odoo.addons.base.models.res_config import get_warning_config
raise get_warning_config(cr, _("Error: this action is prohibited. You should check the field %(field:sale.config.settings.fetchmail_lead)s in %(menu:sales_team.menu_sale_config)s."), context=context)
.. code-block:: python
raise env['ir..config.settings'](_(
"Error: this action is prohibited. You should check the "
"field %(field:sale.config.settings.fetchmail_lead)s in "
"%(menu:sales_team.menu_sale_config)s."))
This will return an exception containing the following message:
Error: this action is prohibited. You should check the field Create leads from incoming mails in Settings/Configuration/Sales.
Error: this action is prohibited. You should check the field Create
leads from incoming mails in Settings/Configuration/Sales.
What if there is another substitution in the message already?
-------------------------------------------------------------
You could have a situation where the error message you want to upgrade already contains a substitution. Example:
Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\\Journals\\Journals.
What you want to do here is simply to replace the path by %menu:account.menu_account_config)s, and leave the rest alone.
In order to do that, you can use the double percent (%%) to escape your new substitution, like so:
Cannot find any account journal of %s type for this company.\n\nYou can create one in the %%(menu:account.menu_account_config)s.
You could have a situation where the error message you want to upgrade already contains
a substitution.
Example:
Cannot find any account journal of %s type for this company.
You can create one in the menu:
Configuration/Journals/Journals.
What you want to do here is simply to replace the path by
``%menu:account.menu_account_config)s``, and leave the rest alone.
In order to do that, you can use the double percent (``%%``) to escape your new
substitution, like so:
Cannot find any account journal of %s type for this company.
You can create one in the %%(menu:account.menu_account_config)s.
"""
self = self.sudo()

View file

@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
import logging
from odoo import api, fields, models, tools
from odoo.osv import expression
from odoo.exceptions import UserError
from psycopg2 import IntegrityError
from odoo.fields import Domain
from odoo.tools.translate import _
_logger = logging.getLogger(__name__)
@ -29,10 +28,10 @@ NO_FLAG_COUNTRIES = [
]
class Country(models.Model):
class ResCountry(models.Model):
_name = 'res.country'
_description = 'Country'
_order = 'name'
_order = 'name, id'
_rec_names_search = ['name', 'code']
name = fields.Char(
@ -65,6 +64,7 @@ class Country(models.Model):
phone_code = fields.Integer(string='Country Calling Code')
country_group_ids = fields.Many2many('res.country.group', 'res_country_res_country_group_rel',
'res_country_id', 'res_country_group_id', string='Country Groups')
country_group_codes = fields.Json(compute="_compute_country_group_codes")
state_ids = fields.One2many('res.country.state', 'country_id', string='States')
name_position = fields.Selection([
('before', 'Before Address'),
@ -76,22 +76,24 @@ class Country(models.Model):
state_required = fields.Boolean(default=False)
zip_required = fields.Boolean(default=True)
_sql_constraints = [
('name_uniq', 'unique (name)',
'The name of the country must be unique!'),
('code_uniq', 'unique (code)',
'The code of the country must be unique!')
]
_name_uniq = models.Constraint(
'unique (name)',
"The name of the country must be unique!",
)
_code_uniq = models.Constraint(
'unique (code)',
"The code of the country must be unique!",
)
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
def name_search(self, name='', domain=None, operator='ilike', limit=100):
result = []
domain = args or []
domain = Domain(domain or Domain.TRUE)
# first search by code
if operator not in expression.NEGATIVE_TERM_OPERATORS and name and len(name) == 2:
countries = self.search_fetch(expression.AND([domain, [('code', operator, name)]]), ['display_name'], limit=limit)
if not operator in Domain.NEGATIVE_OPERATORS and name and len(name) == 2:
countries = self.search_fetch(domain & Domain('code', operator, name), ['display_name'], limit=limit)
result.extend((country.id, country.display_name) for country in countries.sudo())
domain = expression.AND([domain, [('id', 'not in', countries.ids)]])
domain &= Domain('id', 'not in', countries.ids)
if limit is not None:
limit -= len(countries)
if limit <= 0:
@ -101,16 +103,17 @@ class Country(models.Model):
return result
@api.model
@tools.ormcache('code')
@tools.ormcache('code', cache='stable')
def _phone_code_for(self, code):
return self.search([('code', '=', code)]).phone_code
@api.model_create_multi
def create(self, vals_list):
self.env.registry.clear_cache('stable')
for vals in vals_list:
if vals.get('code'):
vals['code'] = vals['code'].upper()
return super(Country, self).create(vals_list)
return super().create(vals_list)
def write(self, vals):
if vals.get('code'):
@ -118,7 +121,7 @@ class Country(models.Model):
res = super().write(vals)
if ('code' in vals or 'phone_code' in vals):
# Intentionally simplified by not clearing the cache in create and unlink.
self.env.registry.clear_cache()
self.env.registry.clear_cache('stable')
if 'address_view_id' in vals or 'vat_label' in vals:
# Changing the address view of the company must invalidate the view cached for res.partner
# because of _view_get_address
@ -127,6 +130,10 @@ class Country(models.Model):
self.env.registry.clear_cache('templates')
return res
def unlink(self):
self.env.registry.clear_cache('stable')
return super().unlink()
def get_address_fields(self):
self.ensure_one()
return re.findall(r'\((.+?)\)', self.address_format)
@ -150,39 +157,77 @@ class Country(models.Model):
except (ValueError, KeyError):
raise UserError(_('The layout contains an invalid format key'))
class CountryGroup(models.Model):
_description = "Country Group"
@api.depends('country_group_ids')
def _compute_country_group_codes(self):
'''If a country has no associated country groups, assign [''] to country_group_codes.
This prevents storing [] as False, which helps avoid iteration over a False value and
maintains a valid structure.
'''
for country in self:
country.country_group_codes = [g.code for g in country.country_group_ids if g.code] or ['']
class ResCountryGroup(models.Model):
_name = 'res.country.group'
_description = "Country Group"
name = fields.Char(required=True, translate=True)
code = fields.Char(string="Code")
country_ids = fields.Many2many('res.country', 'res_country_res_country_group_rel',
'res_country_group_id', 'res_country_id', string='Countries')
_check_code_uniq = models.Constraint(
'unique(code)',
'The country group code must be unique!',
)
class CountryState(models.Model):
_description = "Country state"
def _sanitize_vals(self, vals):
if code := vals.get('code'):
vals['code'] = code.upper()
return vals
@api.model_create_multi
def create(self, vals_list):
return super().create([self._sanitize_vals(vals) for vals in vals_list])
def write(self, vals):
return super().write(self._sanitize_vals(vals))
class ResCountryState(models.Model):
_name = 'res.country.state'
_order = 'code'
_description = "Country state"
_order = 'code, id'
_rec_names_search = ['name', 'code']
country_id = fields.Many2one('res.country', string='Country', required=True)
country_id = fields.Many2one('res.country', string='Country', required=True, index=True)
name = fields.Char(string='State Name', required=True,
help='Administrative divisions of a country. E.g. Fed. State, Departement, Canton')
code = fields.Char(string='State Code', help='The state code.', required=True)
_sql_constraints = [
('name_code_uniq', 'unique(country_id, code)', 'The code of the state must be unique by country!')
]
_name_code_uniq = models.Constraint(
'unique(country_id, code)',
"The code of the state must be unique by country!",
)
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
def name_search(self, name='', domain=None, operator='ilike', limit=100):
result = []
domain = args or []
domain = Domain(domain or Domain.TRUE)
# accepting 'in' as operator (see odoo/addons/base/tests/test_res_country.py)
if operator == 'in':
if limit is None:
limit = 100 # force a limit
for item in name:
result.extend(self.name_search(item, domain, operator='=', limit=limit - len(result)))
if len(result) == limit:
break
return result
# first search by code (with =ilike)
if operator not in expression.NEGATIVE_TERM_OPERATORS and name:
states = self.search_fetch(expression.AND([domain, [('code', '=like', name)]]), ['display_name'], limit=limit)
if not operator in Domain.NEGATIVE_OPERATORS and name:
states = self.search_fetch(domain & Domain('code', '=like', name), ['display_name'], limit=limit)
result.extend((state.id, state.display_name) for state in states.sudo())
domain = expression.AND([domain, [('id', 'not in', states.ids)]])
domain &= Domain('id', 'not in', states.ids)
if limit is not None:
limit -= len(states)
if limit <= 0:
@ -194,31 +239,32 @@ class CountryState(models.Model):
@api.model
def _search_display_name(self, operator, value):
domain = super()._search_display_name(operator, value)
if value and operator not in expression.NEGATIVE_TERM_OPERATORS:
if value and not operator in Domain.NEGATIVE_OPERATORS:
if operator in ('ilike', '='):
domain = expression.OR([
domain, self._get_name_search_domain(value, operator),
])
domain |= self._get_name_search_domain(value, operator)
elif operator == 'in':
domain = expression.OR([
domain,
*(self._get_name_search_domain(name, '=') for name in value),
])
domain |= Domain.OR(
self._get_name_search_domain(name, '=') for name in value
)
if country_id := self.env.context.get('country_id'):
domain = expression.AND([domain, [('country_id', '=', country_id)]])
domain &= Domain('country_id', '=', country_id)
return domain
def _get_name_search_domain(self, name, operator):
m = re.fullmatch(r"(?P<name>.+)\((?P<country>.+)\)", name)
if m:
return [
return Domain([
('name', operator, m['name'].strip()),
'|', ('country_id.name', 'ilike', m['country'].strip()),
('country_id.code', '=', m['country'].strip()),
]
return [expression.FALSE_LEAF]
])
return Domain.FALSE
@api.depends('country_id')
@api.depends_context('formatted_display_name')
def _compute_display_name(self):
for record in self:
record.display_name = f"{record.name} ({record.country_id.code})"
if self.env.context.get('formatted_display_name'):
record.display_name = f"{record.name} \t --{record.country_id.code}--"
else:
record.display_name = f"{record.name} ({record.country_id.code})"

View file

@ -1,14 +1,12 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import math
from collections.abc import Iterable
from lxml import etree
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools
from odoo.exceptions import UserError, ValidationError
from odoo.tools import parse_date, SQL
from odoo.tools import ormcache, parse_date, SQL
_logger = logging.getLogger(__name__)
@ -19,8 +17,8 @@ except ImportError:
num2words = None
class Currency(models.Model):
_name = "res.currency"
class ResCurrency(models.Model):
_name = 'res.currency'
_description = "Currency"
_rec_names_search = ['name', 'full_name']
_order = 'active desc, name'
@ -48,34 +46,35 @@ class Currency(models.Model):
currency_subunit_label = fields.Char(string="Currency Subunit", translate=True)
is_current_company_currency = fields.Boolean(compute='_compute_is_current_company_currency')
_sql_constraints = [
('unique_name', 'unique (name)', 'The currency code must be unique!'),
('rounding_gt_zero', 'CHECK (rounding>0)', 'The rounding factor must be greater than 0!')
]
_unique_name = models.Constraint(
'unique (name)',
"The currency code must be unique!",
)
_rounding_gt_zero = models.Constraint(
'CHECK (rounding>0)',
"The rounding factor must be greater than 0!",
)
@api.model_create_multi
def create(self, vals_list):
res = super().create(vals_list)
self._toggle_group_multi_currency()
# Currency info is cached to reduce the number of SQL queries when building the session
# info. See `ir_http.get_currencies`.
self.env.registry.clear_cache()
# invalidate cache for get_all_currencies
self.env.registry.clear_cache('stable')
return res
def unlink(self):
res = super().unlink()
self._toggle_group_multi_currency()
# Currency info is cached to reduce the number of SQL queries when building the session
# info. See `ir_http.get_currencies`.
self.env.registry.clear_cache()
# invalidate cache for get_all_currencies
self.env.registry.clear_cache('stable')
return res
def write(self, vals):
res = super().write(vals)
if vals.keys() & {'active', 'digits', 'position', 'symbol'}:
# Currency info is cached to reduce the number of SQL queries when building the session
# info. See `ir_http.get_currencies`.
self.env.registry.clear_cache()
if vals.keys() & {'active', 'digits', 'name', 'position', 'symbol'}:
# invalidate cache for get_all_currencies
self.env.registry.clear_cache('stable')
if 'active' not in vals:
return res
self._toggle_group_multi_currency()
@ -108,7 +107,7 @@ class Currency(models.Model):
@api.constrains('active')
def _check_company_currency_stays_active(self):
if self._context.get('install_mode') or self._context.get('force_deactivate'):
if self.env.context.get('install_mode') or self.env.context.get('force_deactivate'):
# install_mode : At install, when this check is run, the "active" field of a currency added to a company will
# still be evaluated as False, despite it's automatically set at True when added to the company.
# force_deactivate : Allows deactivation of a currency in tests to enable non multi_currency behaviors
@ -116,25 +115,24 @@ class Currency(models.Model):
currencies = self.filtered(lambda c: not c.active)
if self.env['res.company'].search_count([('currency_id', 'in', currencies.ids)], limit=1):
raise UserError(_("This currency is set on a company and therefore cannot be deactivated."))
raise UserError(self.env._("This currency is set on a company and therefore cannot be deactivated."))
def _get_rates(self, company, date):
if not self.ids:
return {}
currency_query = self.env['res.currency']._where_calc([
('id', 'in', self.ids),
], active_test=False)
currency_query = self._as_query(ordered=False)
currency_id = self.env['res.currency']._field_to_sql(currency_query.table, 'id')
rate_query = self.env['res.currency.rate']._search([
Rate = self.env['res.currency.rate']
rate_query = Rate._search([
('name', '<=', date),
('company_id', 'in', (False, company.root_id.id)),
('currency_id', '=', currency_id),
], order='company_id.id, name DESC', limit=1)
rate_fallback = self.env['res.currency.rate']._search([
rate_query.add_where(SQL("%s = %s", Rate._field_to_sql(rate_query.table, 'currency_id'), currency_id))
rate_fallback = Rate._search([
('company_id', 'in', (False, company.root_id.id)),
('currency_id', '=', currency_id),
], order='company_id.id, name ASC', limit=1)
rate = self.env['res.currency.rate']._field_to_sql(rate_query.table, 'rate')
rate_fallback.add_where(SQL("%s = %s", Rate._field_to_sql(rate_fallback.table, 'currency_id'), currency_id))
rate = Rate._field_to_sql(rate_query.table, 'rate')
return dict(self.env.execute_query(currency_query.select(
currency_id,
SQL("COALESCE((%s), (%s), 1.0)", rate_query.select(rate), rate_fallback.select(rate))
@ -148,8 +146,8 @@ class Currency(models.Model):
@api.depends('rate_ids.rate')
@api.depends_context('to_currency', 'date', 'company', 'company_id')
def _compute_current_rate(self):
date = self._context.get('date') or fields.Date.context_today(self)
company = self.env['res.company'].browse(self._context.get('company_id')) or self.env.company
date = self.env.context.get('date') or fields.Date.context_today(self)
company = self.env['res.company'].browse(self.env.context.get('company_id')) or self.env.company
to_currency = self.browse(self.env.context.get('to_currency')) or company.currency_id
# the subquery selects the last rate before 'date' for the given currency/company
currency_rates = (self + to_currency)._get_rates(self.env.company, date)
@ -190,13 +188,13 @@ class Currency(models.Model):
integer_value = int(integral)
lang = tools.get_lang(self.env)
if self.is_zero(amount - integer_value):
return _(
return self.env._(
'%(integral_amount)s %(currency_unit)s',
integral_amount=_num2words(integer_value, lang=lang.iso_code),
currency_unit=self.currency_unit_label,
)
else:
return _(
return self.env._(
'%(integral_amount)s %(currency_unit)s and %(fractional_amount)s %(currency_subunit)s',
integral_amount=_num2words(integer_value, lang=lang.iso_code),
currency_unit=self.currency_unit_label,
@ -262,6 +260,15 @@ class Currency(models.Model):
self.ensure_one()
return tools.float_is_zero(amount, precision_rounding=self.rounding)
@ormcache(cache='stable')
@api.model
def get_all_currencies(self):
currencies = self.sudo().search_fetch([('active', '=', True)], ['name', 'symbol', 'position', 'decimal_places'])
return {
c.id: {'name': c.name, 'symbol': c.symbol, 'position': c.position, 'digits': [69, c.decimal_places]}
for c in currencies
}
@api.model
def _get_conversion_rate(self, from_currency, to_currency, company=None, date=None):
if from_currency == to_currency:
@ -313,16 +320,16 @@ class Currency(models.Model):
"""The override of _get_view changing the rate field labels according to the company currency
makes the view cache dependent on the company currency"""
key = super()._get_view_cache_key(view_id, view_type, **options)
return key + ((self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,)
return key + ((self.env['res.company'].browse(self.env.context.get('company_id')) or self.env.company).currency_id.name,)
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
if view_type in ('list', 'form'):
currency_name = (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name
currency_name = (self.env['res.company'].browse(self.env.context.get('company_id')) or self.env.company).currency_id.name
fields_maps = [
[['company_rate', 'rate'], _('Unit per %s', currency_name)],
[['inverse_company_rate', 'inverse_rate'], _('%s per Unit', currency_name)],
[['company_rate', 'rate'], self.env._('Unit per %s', currency_name)],
[['inverse_company_rate', 'inverse_rate'], self.env._('%s per Unit', currency_name)],
]
for fnames, label in fields_maps:
xpath_expression = '//list//field[' + " or ".join(f"@name='{f}'" for f in fnames) + "][1]"
@ -332,11 +339,11 @@ class Currency(models.Model):
return arch, view
class CurrencyRate(models.Model):
_name = "res.currency.rate"
class ResCurrencyRate(models.Model):
_name = 'res.currency.rate'
_description = "Currency Rate"
_rec_names_search = ['name', 'rate']
_order = "name desc"
_order = "name desc, id"
_check_company_domain = models.check_company_domain_parent_of
name = fields.Date(string='Date', required=True, index=True,
@ -361,14 +368,18 @@ class CurrencyRate(models.Model):
aggregator="avg",
help="The rate of the currency to the currency of rate 1 ",
)
currency_id = fields.Many2one('res.currency', string='Currency', readonly=True, required=True, ondelete="cascade")
currency_id = fields.Many2one('res.currency', string='Currency', readonly=True, required=True, index=True, ondelete="cascade")
company_id = fields.Many2one('res.company', string='Company',
default=lambda self: self.env.company.root_id)
_sql_constraints = [
('unique_name_per_day', 'unique (name,currency_id,company_id)', 'Only one currency rate per day allowed!'),
('currency_rate_check', 'CHECK (rate>0)', 'The currency rate must be strictly positive.'),
]
_unique_name_per_day = models.Constraint(
'unique (name,currency_id,company_id)',
"Only one currency rate per day allowed!",
)
_currency_rate_check = models.Constraint(
'CHECK (rate>0)',
"The currency rate must be strictly positive.",
)
def _sanitize_vals(self, vals):
if 'inverse_company_rate' in vals and ('company_rate' in vals or 'rate' in vals):
@ -389,7 +400,7 @@ class CurrencyRate(models.Model):
def _get_latest_rate(self):
# Make sure 'name' is defined when creating a new rate.
if not self.name:
raise UserError(_("The name for the current rate is empty.\nPlease set it."))
raise UserError(self.env._("The name for the current rate is empty.\nPlease set it."))
return self.currency_id.rate_ids.sudo().filtered(lambda x: (
x.rate
and x.company_id == (self.company_id or self.env.company.root_id)
@ -447,8 +458,8 @@ class CurrencyRate(models.Model):
if abs(diff) > 0.2:
return {
'warning': {
'title': _("Warning for %s", self.currency_id.name),
'message': _(
'title': self.env._("Warning for %s", self.currency_id.name),
'message': self.env._(
"The new rate is quite far from the previous rate.\n"
"Incorrect currency rates may cause critical problems, make sure the rate is correct!"
)
@ -459,11 +470,14 @@ class CurrencyRate(models.Model):
def _check_company_id(self):
for rate in self:
if rate.company_id.sudo().parent_id:
raise ValidationError("Currency rates should only be created for main companies")
raise ValidationError(self.env._("Currency rates should only be created for main companies"))
@api.model
def _search_display_name(self, operator, value):
value = parse_date(self.env, value)
if isinstance(value, Iterable) and not isinstance(value, str):
value = [parse_date(self.env, v) for v in value]
else:
value = parse_date(self.env, value)
return super()._search_display_name(operator, value)
@api.model
@ -471,18 +485,18 @@ class CurrencyRate(models.Model):
"""The override of _get_view changing the rate field labels according to the company currency
makes the view cache dependent on the company currency"""
key = super()._get_view_cache_key(view_id, view_type, **options)
return key + ((self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,)
return key + ((self.env['res.company'].browse(self.env.context.get('company_id')) or self.env.company).currency_id.name,)
@api.model
def _get_view(self, view_id=None, view_type='form', **options):
arch, view = super()._get_view(view_id, view_type, **options)
if view_type == 'list':
names = {
'company_currency_name': (self.env['res.company'].browse(self._context.get('company_id')) or self.env.company).currency_id.name,
'rate_currency_name': self.env['res.currency'].browse(self._context.get('active_id')).name or 'Unit',
'company_currency_name': (self.env['res.company'].browse(self.env.context.get('company_id')) or self.env.company).currency_id.name,
'rate_currency_name': self.env['res.currency'].browse(self.env.context.get('active_id')).name or 'Unit',
}
for name, label in [['company_rate', _('%(rate_currency_name)s per %(company_currency_name)s', **names)],
['inverse_company_rate', _('%(company_currency_name)s per %(rate_currency_name)s', **names)]]:
for name, label in [['company_rate', self.env._('%(rate_currency_name)s per %(company_currency_name)s', **names)],
['inverse_company_rate', self.env._('%(company_currency_name)s per %(rate_currency_name)s', **names)]]:
if (node := arch.find(f"./field[@name='{name}']")) is not None:
node.set('string', label)

View file

@ -5,7 +5,7 @@ from datetime import datetime
import logging
from odoo import api, fields, models, tools
from odoo.http import GeoIP, request, root
from odoo.http import GeoIP, request, root, STORED_SESSION_BYTES
from odoo.tools import SQL, OrderedSet, unique
from odoo.tools.translate import _
from .res_users import check_identity
@ -34,13 +34,8 @@ class ResDeviceLog(models.Model):
is_current = fields.Boolean("Current Device", compute="_compute_is_current")
linked_ip_addresses = fields.Text("Linked IP address", compute="_compute_linked_ip_addresses")
def init(self):
self.env.cr.execute(SQL("""
CREATE INDEX IF NOT EXISTS res_device_log__composite_idx ON %s
(user_id, session_identifier, platform, browser, last_activity, id) WHERE revoked = False
""",
SQL.identifier(self._table)
))
_composite_idx = models.Index("(user_id, session_identifier, platform, browser, last_activity, id) WHERE revoked IS NOT TRUE")
_revoked_idx = models.Index("(revoked) WHERE revoked IS NOT TRUE")
def _compute_display_name(self):
for device in self:
@ -68,8 +63,8 @@ class ResDeviceLog(models.Model):
)
def _order_field_to_sql(self, alias, field_name, direction, nulls, query):
if field_name == 'is_current' and request:
return SQL("session_identifier = %s DESC", request.session.sid[:42])
if field_name == 'is_current' and request and request.session.sid:
return SQL("session_identifier = %s DESC", request.session.sid[:STORED_SESSION_BYTES])
return super()._order_field_to_sql(alias, field_name, direction, nulls, query)
def _is_mobile(self, platform):
@ -92,7 +87,7 @@ class ResDeviceLog(models.Model):
geoip = GeoIP(trace['ip_address'])
user_id = request.session.uid
session_identifier = request.session.sid[:42]
session_identifier = request.session.sid[:STORED_SESSION_BYTES]
if self.env.cr.readonly:
self.env.cr.rollback()
@ -136,9 +131,34 @@ class ResDeviceLog(models.Model):
""")
_logger.info("GC device logs delete %d entries", self.env.cr.rowcount)
@api.autovacuum
def __update_revoked(self):
"""
Set the field ``revoked`` to ``True`` for ``res.device.log``
for which the session file no longer exists on the filesystem.
"""
device_logs_by_session_identifier = {}
for session_identifier, device_logs in self.env['res.device.log']._read_group(
domain=[('revoked', '=', False)],
groupby=['session_identifier'],
aggregates=['id:recordset'],
):
device_logs_by_session_identifier[session_identifier] = device_logs
revoked_session_identifiers = root.session_store.get_missing_session_identifiers(
device_logs_by_session_identifier.keys()
)
device_logs_to_revoke = self.env['res.device.log'].concat(*map(
device_logs_by_session_identifier.get,
revoked_session_identifiers
))
# Initial run may take 5-10 minutes due to many non-revoked sessions,
# marking them enables index use on ``revoked IS NOT TRUE``.
device_logs_to_revoke.sudo().write({'revoked': True})
class ResDevice(models.Model):
_name = "res.device"
_name = 'res.device'
_inherit = ["res.device.log"]
_description = "Devices"
_auto = False
@ -184,9 +204,9 @@ class ResDevice(models.Model):
D2.last_activity > D.last_activity
OR (D2.last_activity = D.last_activity AND D2.id > D.id)
)
AND D2.revoked = False
AND D2.revoked IS NOT TRUE
)
AND D.revoked = False
AND D.revoked IS NOT TRUE
"""
@property

Some files were not shown because too many files have changed in this diff Show more