18.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:06:50 +02:00
parent d72e748793
commit 0a7ae8db93
337 changed files with 399651 additions and 232598 deletions

View file

@ -3,7 +3,6 @@
from . import controllers
from . import models
from . import populate
from . import report
from . import wizard

View file

@ -41,6 +41,7 @@ The kernel of Odoo, needed for all installation.
'views/ir_ui_menu_views.xml',
'views/ir_ui_view_views.xml',
'views/ir_default_views.xml',
'data/ir_config_parameter_data.xml',
'data/ir_cron_data.xml',
'report/ir_model_report.xml',
'report/ir_model_templates.xml',
@ -67,8 +68,8 @@ The kernel of Odoo, needed for all installation.
'views/res_country_views.xml',
'views/res_currency_views.xml',
'views/res_users_views.xml',
'views/res_device_views.xml',
'views/res_users_identitycheck_views.xml',
'views/ir_property_views.xml',
'views/res_config_settings_views.xml',
'views/report_paperformat_views.xml',
'security/ir.model.access.csv',

View file

@ -10,7 +10,7 @@ from markupsafe import Markup
import odoo
from odoo.http import Controller, route, dispatch_rpc, request, Response
from odoo.fields import Date, Datetime, Command
from odoo.tools import lazy, ustr
from odoo.tools import lazy
from odoo.tools.misc import frozendict
# ==========================================================
@ -45,7 +45,7 @@ def xmlrpc_handle_exception_int(e):
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(RPC_FAULT_CODE_APPLICATION_ERROR, formatted_info)
return xmlrpc.client.dumps(fault, allow_none=None)
return dumps(fault)
def xmlrpc_handle_exception_string(e):
@ -65,7 +65,7 @@ def xmlrpc_handle_exception_string(e):
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpc.client.Fault(odoo.tools.exception_to_unicode(e), formatted_info)
return xmlrpc.client.dumps(fault, allow_none=None, encoding=None)
return dumps(fault)
class OdooMarshaller(xmlrpc.client.Marshaller):
@ -78,9 +78,8 @@ class OdooMarshaller(xmlrpc.client.Marshaller):
# By default, in xmlrpc, bytes are converted to xmlrpc.client.Binary object.
# Historically, odoo is sending binary as base64 string.
# In python 3, base64.b64{de,en}code() methods now works on bytes.
# Convert them to str to have a consistent behavior between python 2 and python 3.
def dump_bytes(self, value, write):
self.dump_unicode(ustr(value), write)
self.dump_unicode(value.decode(), write)
def dump_datetime(self, value, write):
# override to marshall as a string for backwards compatibility
@ -111,21 +110,34 @@ class OdooMarshaller(xmlrpc.client.Marshaller):
dispatch[Markup] = lambda self, value, write: self.dispatch[str](self, str(value), write)
# monkey-patch xmlrpc.client's marshaller
xmlrpc.client.Marshaller = OdooMarshaller
def dumps(params: list | tuple | xmlrpc.client.Fault) -> str:
response = OdooMarshaller(allow_none=False).dumps(params)
return f"""\
<?xml version="1.0"?>
<methodResponse>
{response}
</methodResponse>
"""
# ==========================================================
# RPC Controller
# ==========================================================
def _check_request():
if request.db:
request.env.cr.close()
class RPC(Controller):
"""Handle RPC connections."""
def _xmlrpc(self, service):
"""Common method to handle an XML-RPC request."""
_check_request()
data = request.httprequest.get_data()
params, method = xmlrpc.client.loads(data)
params, method = xmlrpc.client.loads(data, use_datetime=True)
result = dispatch_rpc(service, method, params)
return xmlrpc.client.dumps((result,), methodresponse=1, allow_none=False)
return dumps((result,))
@route("/xmlrpc/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_1(self, service):
@ -134,6 +146,7 @@ class RPC(Controller):
This entrypoint is historical and non-compliant, but kept for
backwards-compatibility.
"""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
@ -147,6 +160,7 @@ class RPC(Controller):
@route("/xmlrpc/2/<service>", auth="none", methods=["POST"], csrf=False, save_session=False)
def xmlrpc_2(self, service):
"""XML-RPC service that returns faultCode as int."""
_check_request()
try:
response = self._xmlrpc(service)
except Exception as error:
@ -160,4 +174,5 @@ class RPC(Controller):
@route('/jsonrpc', type='json', auth="none", save_session=False)
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
_check_request()
return dispatch_rpc(service, method, args)

View file

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<data noupdate="1">
<record id="default_max_email_size" model="ir.config_parameter">
<field name="key">base.default_max_email_size</field>
<field name="value">10</field>
</record>
</data>
</odoo>

View file

@ -7,7 +7,6 @@
<field name="code">model._run_vacuum_cleaner()</field>
<field name='interval_number'>1</field>
<field name='interval_type'>days</field>
<field name="numbercall">-1</field>
<field name="priority">3</field>
</record>
@ -15,9 +14,9 @@
<field name="name">Base: Portal Users Deletion</field>
<field name="model_id" ref="base.model_res_users_deletion"/>
<field name="state">code</field>
<field name="code">model._gc_portal_users()</field>
<field name="code">model._gc_portal_users(batch_size=50)</field>
<field name='interval_number'>1</field>
<field name='interval_type'>days</field>
<field name="numbercall">-1</field>
<field name="priority">8</field>
</record>
</odoo>

View file

@ -30,10 +30,10 @@ action = {
module(s) failed to install and were disabled
</h3>
<field name="failure_ids">
<tree>
<list>
<field name="module_id"/>
<field name="error"/>
</tree>
</list>
</field>
<footer>
<button string="Ok" class="oe_highlight" type="object" name="done" data-hotkey="q"/>

View file

@ -97,7 +97,7 @@
<record model="ir.module.category" id="module_category_services_helpdesk">
<field name="name">Helpdesk</field>
<field name="description">After-sales services</field>
<field name="description" />
<field name="sequence">14</field>
</record>

View file

@ -30,8 +30,8 @@
<field name="website">https://www.odoo.com/app/timesheet?utm_source=db&amp;utm_medium=module</field>
</record>
<record model="ir.module.module" id="base.module_account_accountant">
<field name="name">account_accountant</field>
<record model="ir.module.module" id="base.module_accountant">
<field name="name">accountant</field>
<field name="sequence">30</field>
<field name="shortdesc">Accounting</field>
<field name="category_id" ref="base.module_category_accounting_accounting"/>
@ -156,20 +156,6 @@
<field name="website">https://www.odoo.com/app/amazon-connector?utm_source=db&amp;utm_medium=module</field>
</record>
<record model="ir.module.module" id="base.module_sale_ebay">
<field name="name">sale_ebay</field>
<field name="shortdesc">eBay Connector</field>
<field name="sequence">325</field>
<field name="category_id" ref="base.module_category_sales_sales"/>
<field name="application" eval="True"/>
<field name="summary">Sell on eBay easily</field>
<field name="license">OEEL-1</field>
<field name="author">Odoo S.A.</field>
<field name="to_buy" eval="True"/>
<field name="icon">/base/static/img/icons/sale_ebay.png</field>
<field name="website">https://www.odoo.com/app/sales?utm_source=db&amp;utm_medium=module</field>
</record>
<record model="ir.module.module" id="base.module_planning">
<field name="name">planning</field>
<field name="shortdesc">Planning</field>
@ -296,17 +282,6 @@
<field name="website">https://play.google.com/store/apps/details?id=com.odoo.mobile</field>
</record>
<record model="ir.module.module" id="base.module_website_twitter_wall">
<field name="name">website_twitter_wall</field>
<field name="shortdesc">Twitter Wall</field>
<field name="summary">Interactive twitter wall for events</field>
<field name="category_id" ref="base.module_category_website_website"/>
<field name="license">OEEL-1</field>
<field name="author">Odoo S.A.</field>
<field name="to_buy" eval="True"/>
<field name="icon">/base/static/img/icons/website_twitter_wall.png</field>
</record>
<record model="ir.module.module" id="base.module_payment_sepa_direct_debit">
<field name="name">payment_sepa_direct_debit</field>
<field name="shortdesc">Sepa Direct Debit Payment Provider</field>

View file

@ -8,13 +8,14 @@
<field name="page_height">0</field>
<field name="page_width">0</field>
<field name="orientation">Portrait</field>
<field name="margin_top">40</field>
<field name="margin_top">52</field>
<field name="margin_bottom">32</field>
<field name="margin_left">7</field>
<field name="margin_right">7</field>
<field name="margin_left">0</field>
<field name="margin_right">0</field>
<field name="header_line" eval="False" />
<field name="header_spacing">35</field>
<field name="header_spacing">52</field>
<field name="dpi">90</field>
<field name="css_margins" eval="True" />
</record>
<record id="paperformat_us" model="report.paperformat">
@ -24,13 +25,14 @@
<field name="page_height">0</field>
<field name="page_width">0</field>
<field name="orientation">Portrait</field>
<field name="margin_top">40</field>
<field name="margin_bottom">30</field>
<field name="margin_left">7</field>
<field name="margin_right">7</field>
<field name="margin_top">52</field>
<field name="margin_bottom">32</field>
<field name="margin_left">0</field>
<field name="margin_right">0</field>
<field name="header_line" eval="False" />
<field name="header_spacing">35</field>
<field name="header_spacing">52</field>
<field name="dpi">90</field>
<field name="css_margins" eval="True" />
</record>
<record id="paperformat_batch_deposit" model="report.paperformat">

View file

@ -915,7 +915,7 @@ state_et_5,et,"Dire Dawa","DR"
state_et_6,et,"Gambella Peoples","GM"
state_et_7,et,"Harrari Peoples","HR"
state_et_8,et,"Oromia","OR"
state_et_9,et,"Somalia","SM"
state_et_9,et,"Somali","SM"
state_et_10,et,"Southern Peoples, Nations, and Nationalities","SP"
state_et_11,et,"Tigray","TG"
state_ie_1,ie,"Carlow","CW"
@ -1748,3 +1748,34 @@ state_jo_ka,jo,"Karak",JO-KA
state_jo_ma,jo,"Mafraq",JO-MA
state_jo_md,jo,"Madaba",JO-MD
state_jo_mn,jo,"Maan",JO-MN
state_kr_11,kr,"서울특별시","KR-11"
state_kr_26,kr,"부산광역시","KR-26"
state_kr_27,kr,"대구광역시","KR-27"
state_kr_28,kr,"인천광역시","KR-28"
state_kr_29,kr,"광주광역시","KR-29"
state_kr_30,kr,"대전광역시","KR-30"
state_kr_31,kr,"울산광역시","KR-31"
state_kr_41,kr,"경기도","KR-41"
state_kr_42,kr,"강원도","KR-42"
state_kr_43,kr,"충청북도","KR-43"
state_kr_44,kr,"충청남도","KR-44"
state_kr_45,kr,"전라북도","KR-45"
state_kr_46,kr,"전라남도","KR-46"
state_kr_47,kr,"경상북도","KR-47"
state_kr_48,kr,"경상남도","KR-48"
state_kr_49,kr,"제주특별자치도","KR-49"
state_kr_50,kr,"세종특별자치시","KR-50"
state_be_1,be,"Antwerp","VAN"
state_be_2,be,"Limburg","VLI"
state_be_3,be,"East Flanders","VOV"
state_be_4,be,"Flemish Brabant","VBR"
state_be_5,be,"West Flanders","VWV"
state_be_6,be,"Walloon Brabant","WBR"
state_be_7,be,"Hainaut","WHT"
state_be_8,be,"Liège","WLG"
state_be_9,be,"Luxembourg","WLX"
state_be_10,be,"Namur","WNA"
state_bn_b,bn,"Brunei-Muara","B"
state_bn_k,bn,"Belait","K"
state_bn_t,bn,"Tutong","T"
state_bn_p,bn,"Temburong","P"

1 id country_id:id name code
915 state_et_6 et Gambella Peoples GM
916 state_et_7 et Harrari Peoples HR
917 state_et_8 et Oromia OR
918 state_et_9 et Somalia Somali SM
919 state_et_10 et Southern Peoples, Nations, and Nationalities SP
920 state_et_11 et Tigray TG
921 state_ie_1 ie Carlow CW
1748 state_jo_ma jo Mafraq JO-MA
1749 state_jo_md jo Madaba JO-MD
1750 state_jo_mn jo Maan JO-MN
1751 state_kr_11 kr 서울특별시 KR-11
1752 state_kr_26 kr 부산광역시 KR-26
1753 state_kr_27 kr 대구광역시 KR-27
1754 state_kr_28 kr 인천광역시 KR-28
1755 state_kr_29 kr 광주광역시 KR-29
1756 state_kr_30 kr 대전광역시 KR-30
1757 state_kr_31 kr 울산광역시 KR-31
1758 state_kr_41 kr 경기도 KR-41
1759 state_kr_42 kr 강원도 KR-42
1760 state_kr_43 kr 충청북도 KR-43
1761 state_kr_44 kr 충청남도 KR-44
1762 state_kr_45 kr 전라북도 KR-45
1763 state_kr_46 kr 전라남도 KR-46
1764 state_kr_47 kr 경상북도 KR-47
1765 state_kr_48 kr 경상남도 KR-48
1766 state_kr_49 kr 제주특별자치도 KR-49
1767 state_kr_50 kr 세종특별자치시 KR-50
1768 state_be_1 be Antwerp VAN
1769 state_be_2 be Limburg VLI
1770 state_be_3 be East Flanders VOV
1771 state_be_4 be Flemish Brabant VBR
1772 state_be_5 be West Flanders VWV
1773 state_be_6 be Walloon Brabant WBR
1774 state_be_7 be Hainaut WHT
1775 state_be_8 be Liège WLG
1776 state_be_9 be Luxembourg WLX
1777 state_be_10 be Namur WNA
1778 state_bn_b bn Brunei-Muara B
1779 state_bn_k bn Belait K
1780 state_bn_t bn Tutong T
1781 state_bn_p bn Temburong P

View file

@ -1,92 +1,93 @@
"id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","week_start"
"base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","7"
"base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S","7"
"base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","6"
"base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","6"
"base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[]",",",,"%a, %Y.eko %bren %da","%H:%M:%S","1"
"base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[]",",",,"%A %d %b %Y","%I:%M:%S","1"
"base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","1"
"base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",",,"%d.%m.%Y","%H,%M,%S","1"
"base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_zh_CN","Chinese (Simplified) / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H时%M分%S秒","7"
"base.lang_zh_HK","Chinese (HK)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y年%m月%d日 %A","%I時%M分%S秒","7"
"base.lang_zh_TW","Chinese (Traditional) / 繁體中文","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","7"
"base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","1"
"base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","7"
"base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H.%M.%S","1"
"base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","7"
"base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","1"
"base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[]",",",,"%d/%m/%Y","%H:%M:%S","1"
"base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","1"
"base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","1"
"base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","1"
"base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[]",",",".","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","7"
"base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","7"
"base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","7"
"base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","6"
"base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d %B %Y","%H:%M:%S","7"
"base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","1"
"base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y년 %m월 %d일","%H시 %M분 %S초","7"
"base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%Y.%m.%d.","%H:%M:%S","1"
"base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","1"
"base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","1"
"base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","7"
"base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
base.lang_my,"Burmese / ဗမာစာ",my_MM,my,"Left-to-Right","[3,3]",".",",","%Y %b %d %A","%I:%M:%S %p","7"
"base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","6"
"base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[]",",",,"%d.%m.%Y","%H:%M:%S","1"
"base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","1"
"base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","1"
"base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","1"
"base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_sr_RS","Serbian (Cyrillic) / српски","sr_RS","sr_RS","Left-to-Right","[]",",",,"%d.%m.%Y.","%H:%M:%S","7"
"base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","7"
"base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[]",","," ","%d. %m. %Y","%H:%M:%S","1"
"base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","7"
"base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","1"
"base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","7"
"base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","1"
"base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","7"
"base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%y","%H:%M:%S","1"
"base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","1"
"base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","1"
"base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","1"
"base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%b-%d","%I.%M.%S.","1"
"base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[]",".",",","%B %d %A %Y","%p%I.%M.%S","7"
"id","name","code","iso_code","direction","grouping","decimal_point","thousands_sep","date_format","time_format","short_time_format","week_start"
"base.lang_en","English (US)","en_US","en","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%H:%M:%S","%H:%M","7"
"base.lang_am_ET","Amharic / አምሃርኛ","am_ET","am_ET","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S","%I:%M","7"
"base.lang_ar","Arabic / الْعَرَبيّة","ar_001","ar","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6"
"base.lang_ar_SY","Arabic (Syria) / الْعَرَبيّة","ar_SY","ar_SY","Right-to-Left","[3,0]",".",",","%d %b, %Y","%I:%M:%S %p","%I:%M","6"
"base.lang_az","Azerbaijani / Azərbaycanca","az_AZ","az","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_eu_ES","Basque / Euskara","eu_ES","eu_ES","Left-to-Right","[]",",",,"%a, %Y.eko %bren %da","%H:%M:%S","%H:%M","1"
"base.lang_be","Belarusian / Беларуская мова","be_BY","be","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_bn_IN","Bengali / বাংলা","bn_IN","bn_IN","Left-to-Right","[]",",",,"%A %d %b %Y","%I:%M:%S","%I:%M","1"
"base.lang_bs_BA","Bosnian / bosanski jezik","bs_BA","bs","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_bg","Bulgarian / български език","bg_BG","bg","Left-to-Right","[3,0]",",",,"%d.%m.%Y","%H,%M,%S","%H,%M","1"
"base.lang_ca_ES","Catalan / Català","ca_ES","ca_ES","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_zh_CN","Chinese (Simplified) / 简体中文","zh_CN","zh_CN","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H时%M分%S秒","%H时%M分","7"
"base.lang_zh_HK","Chinese (HK)","zh_HK","zh_HK","Left-to-Right","[3,0]",".",",","%Y年%m月%d日 %A","%I時%M分%S秒","%I時%M分","7"
"base.lang_zh_TW","Chinese (Traditional) / 繁體中文","zh_TW","zh_TW","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7"
"base.lang_hr","Croatian / hrvatski jezik","hr_HR","hr","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_cs_CZ","Czech / Čeština","cs_CZ","cs_CZ","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_da_DK","Danish / Dansk","da_DK","da_DK","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_nl_BE","Dutch (BE) / Nederlands (BE)","nl_BE","nl_BE","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_nl","Dutch / Nederlands","nl_NL","nl","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_en_AU","English (AU)","en_AU","en_AU","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_en_CA","English (CA)","en_CA","en_CA","Left-to-Right","[3,0]",".",",","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_en_GB","English (UK)","en_GB","en_GB","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_en_IN","English (IN)","en_IN","en_IN","Left-to-Right","[3,2,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_en_NZ","English (NZ)","en_NZ","en_NZ","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_et_EE","Estonian / Eesti keel","et_EE","et","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fi","Finnish / Suomi","fi_FI","fi","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H.%M.%S","%H.%M","1"
"base.lang_fr_BE","French (BE) / Français (BE)","fr_BE","fr_BE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_fr_CA","French (CA) / Français (CA)","fr_CA","fr_CA","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_fr_CH","French (CH) / Français (CH)","fr_CH","fr_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fr","French / Français","fr_FR","fr","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_gl_ES","Galician / Galego","gl_ES","gl","Left-to-Right","[]",",",,"%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_ka_GE","Georgian / ქართული ენა","ka_GE","ka","Left-to-Right","[3,0]",",",".","%m/%d/%Y","%H:%M:%S","%H:%M","1"
"base.lang_de","German / Deutsch","de_DE","de","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_de_CH","German (CH) / Deutsch (CH)","de_CH","de_CH","Left-to-Right","[3,0]",".","'","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_el_GR","Greek / Ελληνικά","el_GR","el_GR","Left-to-Right","[]",",",".","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_gu_IN","Gujarati / ગુજરાતી","gu_IN","gu","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7"
"base.lang_he_IL","Hebrew / עברית","he_IL","he","Right-to-Left","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_hi_IN","Hindi / हिंदी","hi_IN","hi","Left-to-Right","[]",".",",","%A %d %b %Y","%I:%M:%S","%I:%M","7"
"base.lang_hu","Hungarian / Magyar","hu_HU","hu","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_id","Indonesian / Bahasa Indonesia","id_ID","id","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_it","Italian / Italiano","it_IT","it","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_ja_JP","Japanese / 日本語","ja_JP","ja","Left-to-Right","[3,0]",".",",","%Y年%m月%d日","%H時%M分%S秒","%H時%M分","7"
"base.lang_kab_DZ","Kabyle / Taqbaylit","kab_DZ","kab","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","6"
"base.lang_km","Khmer / ភាសាខ្មែរ","km_KH","km","Left-to-Right","[3,0]",".",",","%d %B %Y","%H:%M:%S","%H:%M","7"
"base.lang_ko_KP","Korean (KP) / 한국어 (KP)","ko_KP","ko_KP","Left-to-Right","[3,0]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_ko_KR","Korean (KR) / 한국어 (KR)","ko_KR","ko_KR","Left-to-Right","[3,0]",".",",","%Y년 %m월 %d일","%H시 %M분 %S초","%H시 %M분","7"
"base.lang_lo_LA","Lao / ພາສາລາວ","lo_LA","lo","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_lv","Latvian / latviešu valoda","lv_LV","lv","Left-to-Right","[3,0]",","," ","%Y.%m.%d.","%H:%M:%S","%H:%M","1"
"base.lang_lt","Lithuanian / Lietuvių kalba","lt_LT","lt","Left-to-Right","[3,0]",",",".","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_lb","Luxembourgish","lb_LU","lb","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_mk","Macedonian / македонски јазик","mk_MK","mk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_ml","Malayalam / മലയാളം","ml_IN","ml","Left-to-Right","[3,0]",","," ","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_mn","Mongolian / монгол","mn_MN","mn","Left-to-Right","[3,0]",".","'","%Y-%m-%d","%H:%M:%S","%H:%M","7"
"base.lang_ms","Malay / Bahasa Melayu","ms_MY","ms","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
base.lang_my,"Burmese / ဗမာစာ",my_MM,my,"Left-to-Right","[3,3]",".",",","%Y %b %d %A","%I:%M:%S %p","%I:%M %p","7"
"base.lang_nb_NO","Norwegian Bokmål / Norsk bokmål","nb_NO","nb_NO","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_fa_IR","Persian / فارسی","fa_IR","fa","Right-to-Left","[3,0]",".",",","%Y/%m/%d","%H:%M:%S","%H:%M","6"
"base.lang_pl","Polish / Język polski","pl_PL","pl","Left-to-Right","[]",",",,"%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_pt_AO","Portuguese (AO) / Português (AO)","pt_AO","pt_AO","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_pt_BR","Portuguese (BR) / Português (BR)","pt_BR","pt_BR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_pt","Portuguese / Português","pt_PT","pt","Left-to-Right","[]",",",,"%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_ro","Romanian / română","ro_RO","ro","Left-to-Right","[3,0]",",",".","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_ru","Russian / русский язык","ru_RU","ru","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_sr@Cyrl","Serbian (Cyrillic) / српски","sr@Cyrl","sr@Cyrl","Left-to-Right","[]",",",,"%d.%m.%Y.","%H:%M:%S","%H:%M","7"
"base.lang_sr@latin","Serbian (Latin) / srpski","sr@latin","sr@latin","Left-to-Right","[]",".",",","%m/%d/%Y","%I:%M:%S %p","%I:%M %p","7"
"base.lang_sk","Slovak / Slovenský jazyk","sk_SK","sk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_sl_SI","Slovenian / slovenščina","sl_SI","sl","Left-to-Right","[]",","," ","%d. %m. %Y","%H:%M:%S","%H:%M","1"
"base.lang_es_419","Spanish (Latin America) / Español (América Latina)","es_419","es_419","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_AR","Spanish (AR) / Español (AR)","es_AR","es_AR","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_BO","Spanish (BO) / Español (BO)","es_BO","es_BO","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_CL","Spanish (CL) / Español (CL)","es_CL","es_CL","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_CO","Spanish (CO) / Español (CO)","es_CO","es_CO","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_CR","Spanish (CR) / Español (CR)","es_CR","es_CR","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_DO","Spanish (DO) / Español (DO)","es_DO","es_DO","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%I:%M:%S %p","%I:%M %p","1"
"base.lang_es_EC","Spanish (EC) / Español (EC)","es_EC","es_EC","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_GT","Spanish (GT) / Español (GT)","es_GT","es_GT","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_MX","Spanish (MX) / Español (MX)","es_MX","es_MX","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PA","Spanish (PA) / Español (PA)","es_PA","es_PA","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PE","Spanish (PE) / Español (PE)","es_PE","es_PE","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_PY","Spanish (PY) / Español (PY)","es_PY","es_PY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es_UY","Spanish (UY) / Español (UY)","es_UY","es_UY","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_es_VE","Spanish (VE) / Español (VE)","es_VE","es_VE","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_es","Spanish / Español","es_ES","es","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sw","Swahili / Kiswahili","sw","sw","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sv_SE","Swedish / Svenska","sv_SE","sv","Left-to-Right","[3,0]",","," ","%Y-%m-%d","%H:%M:%S","%H:%M","1"
"base.lang_th","Thai / ภาษาไทย","th_TH","th","Left-to-Right","[3,0]",".",",","%d/%m/%Y","%H:%M:%S","%H:%M","7"
"base.lang_tl","Tagalog / Filipino","tl_PH","tl","Left-to-Right","[3,0]",".",",","%m/%d/%y","%H:%M:%S","%H:%M","1"
"base.lang_tr","Turkish / Türkçe","tr_TR","tr","Left-to-Right","[3,0]",",",".","%d-%m-%Y","%H:%M:%S","%H:%M","1"
"base.lang_uk_UA","Ukrainian / українська","uk_UA","uk","Left-to-Right","[3,0]",","," ","%d.%m.%Y","%H:%M:%S","%H:%M","1"
"base.lang_vi_VN","Vietnamese / Tiếng Việt","vi_VN","vi","Left-to-Right","[3,0]",",",".","%d/%m/%Y","%H:%M:%S","%H:%M","1"
"base.lang_sq_AL","Albanian / Shqip","sq_AL","sq","Left-to-Right","[3,0]",",",".","%Y-%b-%d","%I.%M.%S.","%I.%M.","1"
"base.lang_te_IN","Telugu / తెలుగు","te_IN","te","Left-to-Right","[]",".",",","%B %d %A %Y","%p%I.%M.%S","%p%I.%M","7"

1 id name code iso_code direction grouping decimal_point thousands_sep date_format time_format short_time_format week_start
2 base.lang_en English (US) en_US en Left-to-Right [3,0] . , %m/%d/%Y %H:%M:%S %H:%M 7
3 base.lang_am_ET Amharic / አምሃርኛ am_ET am_ET Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %I:%M 7
4 base.lang_ar Arabic / الْعَرَبيّة ar_001 ar Right-to-Left [3,0] . , %d %b, %Y %I:%M:%S %p %I:%M 6
5 base.lang_ar_SY Arabic (Syria) / الْعَرَبيّة ar_SY ar_SY Right-to-Left [3,0] . , %d %b, %Y %I:%M:%S %p %I:%M 6
6 base.lang_az Azerbaijani / Azərbaycanca az_AZ az Left-to-Right [3,0] ,   %d.%m.%Y %H:%M:%S %H:%M 1
7 base.lang_eu_ES Basque / Euskara eu_ES eu_ES Left-to-Right [] , %a, %Y.eko %bren %da %H:%M:%S %H:%M 1
8 base.lang_bn_IN base.lang_be Bengali / বাংলা Belarusian / Беларуская мова bn_IN be_BY bn_IN be Left-to-Right [] [3,0] , %A %d %b %Y %d.%m.%Y %I:%M:%S %H:%M:%S %H:%M 1
9 base.lang_bs_BA base.lang_bn_IN Bosnian / bosanski jezik Bengali / বাংলা bs_BA bn_IN bs bn_IN Left-to-Right [3,0] [] , . %d.%m.%Y %A %d %b %Y %H:%M:%S %I:%M:%S %I:%M 1
10 base.lang_bg base.lang_bs_BA Bulgarian / български език Bosnian / bosanski jezik bg_BG bs_BA bg bs Left-to-Right [3,0] , . %d.%m.%Y %H,%M,%S %H:%M:%S %H:%M 1
11 base.lang_ca_ES base.lang_bg Catalan / Català Bulgarian / български език ca_ES bg_BG ca_ES bg Left-to-Right [3,0] , . %d/%m/%Y %d.%m.%Y %H:%M:%S %H,%M,%S %H,%M 1
12 base.lang_zh_CN base.lang_ca_ES Chinese (Simplified) / 简体中文 Catalan / Català zh_CN ca_ES zh_CN ca_ES Left-to-Right [3,0] . , , . %Y年%m月%d日 %d/%m/%Y %H时%M分%S秒 %H:%M:%S %H:%M 7 1
13 base.lang_zh_HK base.lang_zh_CN Chinese (HK) Chinese (Simplified) / 简体中文 zh_HK zh_CN zh_HK zh_CN Left-to-Right [3,0] . , %Y年%m月%d日 %A %Y年%m月%d日 %I時%M分%S秒 %H时%M分%S秒 %H时%M分 7
14 base.lang_zh_TW base.lang_zh_HK Chinese (Traditional) / 繁體中文 Chinese (HK) zh_TW zh_HK zh_TW zh_HK Left-to-Right [3,0] . , %Y年%m月%d日 %Y年%m月%d日 %A %H時%M分%S秒 %I時%M分%S秒 %I時%M分 7
15 base.lang_hr base.lang_zh_TW Croatian / hrvatski jezik Chinese (Traditional) / 繁體中文 hr_HR zh_TW hr zh_TW Left-to-Right [3,0] , . . , %d.%m.%Y %Y年%m月%d日 %H:%M:%S %H時%M分%S秒 %H時%M分 1 7
16 base.lang_cs_CZ base.lang_hr Czech / Čeština Croatian / hrvatski jezik cs_CZ hr_HR cs_CZ hr Left-to-Right [3,0] ,   . %d.%m.%Y %H:%M:%S %H:%M 1
17 base.lang_da_DK base.lang_cs_CZ Danish / Dansk Czech / Čeština da_DK cs_CZ da_DK cs_CZ Left-to-Right [3,0] , .   %d-%m-%Y %d.%m.%Y %H:%M:%S %H:%M 1
18 base.lang_nl_BE base.lang_da_DK Dutch (BE) / Nederlands (BE) Danish / Dansk nl_BE da_DK nl_BE da_DK Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
19 base.lang_nl base.lang_nl_BE Dutch / Nederlands Dutch (BE) / Nederlands (BE) nl_NL nl_BE nl nl_BE Left-to-Right [3,0] , . %d-%m-%Y %H:%M:%S %H:%M 1
20 base.lang_en_AU base.lang_nl English (AU) Dutch / Nederlands en_AU nl_NL en_AU nl Left-to-Right [3,0] . , , . %d/%m/%Y %d-%m-%Y %H:%M:%S %H:%M 7 1
21 base.lang_en_CA base.lang_en_AU English (CA) English (AU) en_CA en_AU en_CA en_AU Left-to-Right [3,0] . , %Y-%m-%d %d/%m/%Y %H:%M:%S %H:%M 7
22 base.lang_en_GB base.lang_en_CA English (UK) English (CA) en_GB en_CA en_GB en_CA Left-to-Right [3,0] . , %d/%m/%Y %Y-%m-%d %H:%M:%S %H:%M 1 7
23 base.lang_en_IN base.lang_en_GB English (IN) English (UK) en_IN en_GB en_IN en_GB Left-to-Right [3,2,0] [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7 1
24 base.lang_en_NZ base.lang_en_IN English (NZ) English (IN) en_NZ en_IN en_NZ en_IN Left-to-Right [3,0] [3,2,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
25 base.lang_et_EE base.lang_en_NZ Estonian / Eesti keel English (NZ) et_EE en_NZ et en_NZ Left-to-Right [3,0] , .   , %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1 7
26 base.lang_fi base.lang_et_EE Finnish / Suomi Estonian / Eesti keel fi_FI et_EE fi et Left-to-Right [3,0] ,   %d.%m.%Y %H.%M.%S %H:%M:%S %H:%M 1
27 base.lang_fr_BE base.lang_fi French (BE) / Français (BE) Finnish / Suomi fr_BE fi_FI fr_BE fi Left-to-Right [3,0] , .   %d/%m/%Y %d.%m.%Y %H:%M:%S %H.%M.%S %H.%M 1
28 base.lang_fr_CA base.lang_fr_BE French (CA) / Français (CA) French (BE) / Français (BE) fr_CA fr_BE fr_CA fr_BE Left-to-Right [3,0] ,   . %Y-%m-%d %d/%m/%Y %H:%M:%S %H:%M 7 1
29 base.lang_fr_CH base.lang_fr_CA French (CH) / Français (CH) French (CA) / Français (CA) fr_CH fr_CA fr_CH fr_CA Left-to-Right [3,0] . , '   %d.%m.%Y %Y-%m-%d %H:%M:%S %H:%M 1 7
30 base.lang_fr base.lang_fr_CH French / Français French (CH) / Français (CH) fr_FR fr_CH fr fr_CH Left-to-Right [3,0] , .   ' %d/%m/%Y %d.%m.%Y %H:%M:%S %H:%M 1
31 base.lang_gl_ES base.lang_fr Galician / Galego French / Français gl_ES fr_FR gl fr Left-to-Right [] [3,0] ,   %d/%m/%Y %H:%M:%S %H:%M 1
32 base.lang_ka_GE base.lang_gl_ES Georgian / ქართული ენა Galician / Galego ka_GE gl_ES ka gl Left-to-Right [3,0] [] , . %m/%d/%Y %d/%m/%Y %H:%M:%S %H:%M 1
33 base.lang_de base.lang_ka_GE German / Deutsch Georgian / ქართული ენა de_DE ka_GE de ka Left-to-Right [3,0] , . %d.%m.%Y %m/%d/%Y %H:%M:%S %H:%M 1
34 base.lang_de_CH base.lang_de German (CH) / Deutsch (CH) German / Deutsch de_CH de_DE de_CH de Left-to-Right [3,0] . , ' . %d.%m.%Y %H:%M:%S %H:%M 1
35 base.lang_el_GR base.lang_de_CH Greek / Ελληνικά German (CH) / Deutsch (CH) el_GR de_CH el_GR de_CH Left-to-Right [] [3,0] , . . ' %d/%m/%Y %d.%m.%Y %I:%M:%S %p %H:%M:%S %H:%M 1
36 base.lang_gu_IN base.lang_el_GR Gujarati / ગુજરાતી Greek / Ελληνικά gu_IN el_GR gu el_GR Left-to-Right [] . , , . %A %d %b %Y %d/%m/%Y %I:%M:%S %I:%M:%S %p %I:%M %p 7 1
37 base.lang_he_IL base.lang_gu_IN Hebrew / עברית Gujarati / ગુજરાતી he_IL gu_IN he gu Right-to-Left Left-to-Right [3,0] [] . , %d/%m/%Y %A %d %b %Y %H:%M:%S %I:%M:%S %I:%M 7
38 base.lang_hi_IN base.lang_he_IL Hindi / हिंदी Hebrew / עברית hi_IN he_IL hi he Left-to-Right Right-to-Left [] [3,0] . , %A %d %b %Y %d/%m/%Y %I:%M:%S %H:%M:%S %H:%M 7
39 base.lang_hu base.lang_hi_IN Hungarian / Magyar Hindi / हिंदी hu_HU hi_IN hu hi Left-to-Right [3,0] [] , . . , %Y-%m-%d %A %d %b %Y %H:%M:%S %I:%M:%S %I:%M 1 7
40 base.lang_id base.lang_hu Indonesian / Bahasa Indonesia Hungarian / Magyar id_ID hu_HU id hu Left-to-Right [3,0] , . %d/%m/%Y %Y-%m-%d %H:%M:%S %H:%M 7 1
41 base.lang_it base.lang_id Italian / Italiano Indonesian / Bahasa Indonesia it_IT id_ID it id Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1 7
42 base.lang_ja_JP base.lang_it Japanese / 日本語 Italian / Italiano ja_JP it_IT ja it Left-to-Right [3,0] . , , . %Y年%m月%d日 %d/%m/%Y %H時%M分%S秒 %H:%M:%S %H:%M 7 1
43 base.lang_kab_DZ base.lang_ja_JP Kabyle / Taqbaylit Japanese / 日本語 kab_DZ ja_JP kab ja Left-to-Right [] [3,0] . , %m/%d/%Y %Y年%m月%d日 %I:%M:%S %p %H時%M分%S秒 %H時%M分 6 7
44 base.lang_km base.lang_kab_DZ Khmer / ភាសាខ្មែរ Kabyle / Taqbaylit km_KH kab_DZ km kab Left-to-Right [3,0] [] . , %d %B %Y %m/%d/%Y %H:%M:%S %I:%M:%S %p %I:%M %p 7 6
45 base.lang_ko_KP base.lang_km Korean (KP) / 한국어 (KP) Khmer / ភាសាខ្មែរ ko_KP km_KH ko_KP km Left-to-Right [3,0] . , %m/%d/%Y %d %B %Y %I:%M:%S %p %H:%M:%S %H:%M 1 7
46 base.lang_ko_KR base.lang_ko_KP Korean (KR) / 한국어 (KR) Korean (KP) / 한국어 (KP) ko_KR ko_KP ko_KR ko_KP Left-to-Right [3,0] . , %Y년 %m월 %d일 %m/%d/%Y %H시 %M분 %S초 %I:%M:%S %p %I:%M %p 7 1
47 base.lang_lo_LA base.lang_ko_KR Lao / ພາສາລາວ Korean (KR) / 한국어 (KR) lo_LA ko_KR lo ko_KR Left-to-Right [3,0] . , %d/%m/%Y %Y년 %m월 %d일 %H:%M:%S %H시 %M분 %S초 %H시 %M분 7
48 base.lang_lv base.lang_lo_LA Latvian / latviešu valoda Lao / ພາສາລາວ lv_LV lo_LA lv lo Left-to-Right [3,0] , .   , %Y.%m.%d. %d/%m/%Y %H:%M:%S %H:%M 1 7
49 base.lang_lt base.lang_lv Lithuanian / Lietuvių kalba Latvian / latviešu valoda lt_LT lv_LV lt lv Left-to-Right [3,0] , .   %Y-%m-%d %Y.%m.%d. %H:%M:%S %H:%M 1
50 base.lang_lb base.lang_lt Luxembourgish Lithuanian / Lietuvių kalba lb_LU lt_LT lb lt Left-to-Right [3,0] ,   . %d/%m/%Y %Y-%m-%d %H:%M:%S %H:%M 1
51 base.lang_mk base.lang_lb Macedonian / македонски јазик Luxembourgish mk_MK lb_LU mk lb Left-to-Right [3,0] ,   %d.%m.%Y %d/%m/%Y %H:%M:%S %H:%M 1
52 base.lang_ml base.lang_mk Malayalam / മലയാളം Macedonian / македонски јазик ml_IN mk_MK ml mk Left-to-Right [3,0] ,   %d/%m/%Y %d.%m.%Y %H:%M:%S %H:%M 1
53 base.lang_mn base.lang_ml Mongolian / монгол Malayalam / മലയാളം mn_MN ml_IN mn ml Left-to-Right [3,0] . , '   %Y-%m-%d %d/%m/%Y %H:%M:%S %H:%M 7 1
54 base.lang_ms base.lang_mn Malay / Bahasa Melayu Mongolian / монгол ms_MY mn_MN ms mn Left-to-Right [3,0] . , ' %d/%m/%Y %Y-%m-%d %H:%M:%S %H:%M 1 7
55 base.lang_my base.lang_ms Burmese / ဗမာစာ Malay / Bahasa Melayu my_MM ms_MY my ms Left-to-Right [3,3] [3,0] . , %Y %b %d %A %d/%m/%Y %I:%M:%S %p %H:%M:%S %H:%M 7 1
56 base.lang_nb_NO base.lang_my Norwegian Bokmål / Norsk bokmål Burmese / ဗမာစာ nb_NO my_MM nb_NO my Left-to-Right [3,0] [3,3] , .   , %d.%m.%Y %Y %b %d %A %H:%M:%S %I:%M:%S %p %I:%M %p 1 7
57 base.lang_fa_IR base.lang_nb_NO Persian / فارسی Norwegian Bokmål / Norsk bokmål fa_IR nb_NO fa nb_NO Right-to-Left Left-to-Right [3,0] . , ,   %Y/%m/%d %d.%m.%Y %H:%M:%S %H:%M 6 1
58 base.lang_pl base.lang_fa_IR Polish / Język polski Persian / فارسی pl_PL fa_IR pl fa Left-to-Right Right-to-Left [] [3,0] , . , %d.%m.%Y %Y/%m/%d %H:%M:%S %H:%M 1 6
59 base.lang_pt_AO base.lang_pl Portuguese (AO) / Português (AO) Polish / Język polski pt_AO pl_PL pt_AO pl Left-to-Right [] , %d-%m-%Y %d.%m.%Y %H:%M:%S %H:%M 1
60 base.lang_pt_BR base.lang_pt_AO Portuguese (BR) / Português (BR) Portuguese (AO) / Português (AO) pt_BR pt_AO pt_BR pt_AO Left-to-Right [3,0] [] , . %d/%m/%Y %d-%m-%Y %H:%M:%S %H:%M 7 1
61 base.lang_pt base.lang_pt_BR Portuguese / Português Portuguese (BR) / Português (BR) pt_PT pt_BR pt pt_BR Left-to-Right [] [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 1 7
62 base.lang_ro base.lang_pt Romanian / română Portuguese / Português ro_RO pt_PT ro pt Left-to-Right [3,0] [] , . %d.%m.%Y %d-%m-%Y %H:%M:%S %H:%M 1
63 base.lang_ru base.lang_ro Russian / русский язык Romanian / română ru_RU ro_RO ru ro Left-to-Right [3,0] ,   . %d.%m.%Y %H:%M:%S %H:%M 1
64 base.lang_sr_RS base.lang_ru Serbian (Cyrillic) / српски Russian / русский язык sr_RS ru_RU sr_RS ru Left-to-Right [] [3,0] ,   %d.%m.%Y. %d.%m.%Y %H:%M:%S %H:%M 7 1
65 base.lang_sr@latin base.lang_sr@Cyrl Serbian (Latin) / srpski Serbian (Cyrillic) / српски sr@latin sr@Cyrl sr@latin sr@Cyrl Left-to-Right [] . , , %m/%d/%Y %d.%m.%Y. %I:%M:%S %p %H:%M:%S %H:%M 7
66 base.lang_sk base.lang_sr@latin Slovak / Slovenský jazyk Serbian (Latin) / srpski sk_SK sr@latin sk sr@latin Left-to-Right [3,0] [] , .   , %d.%m.%Y %m/%d/%Y %H:%M:%S %I:%M:%S %p %I:%M %p 1 7
67 base.lang_sl_SI base.lang_sk Slovenian / slovenščina Slovak / Slovenský jazyk sl_SI sk_SK sl sk Left-to-Right [] [3,0] ,   %d. %m. %Y %d.%m.%Y %H:%M:%S %H:%M 1
68 base.lang_es_419 base.lang_sl_SI Spanish (Latin America) / Español (América Latina) Slovenian / slovenščina es_419 sl_SI es_419 sl Left-to-Right [3,0] [] , .   %d/%m/%Y %d. %m. %Y %H:%M:%S %H:%M 1
69 base.lang_es_AR base.lang_es_419 Spanish (AR) / Español (AR) Spanish (Latin America) / Español (América Latina) es_AR es_419 es_AR es_419 Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7 1
70 base.lang_es_BO base.lang_es_AR Spanish (BO) / Español (BO) Spanish (AR) / Español (AR) es_BO es_AR es_BO es_AR Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1 7
71 base.lang_es_CL base.lang_es_BO Spanish (CL) / Español (CL) Spanish (BO) / Español (BO) es_CL es_BO es_CL es_BO Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1
72 base.lang_es_CO base.lang_es_CL Spanish (CO) / Español (CO) Spanish (CL) / Español (CL) es_CO es_CL es_CO es_CL Left-to-Right [3,0] , . %d-%m-%Y %d/%m/%Y %H:%M:%S %H:%M 7 1
73 base.lang_es_CR base.lang_es_CO Spanish (CR) / Español (CR) Spanish (CO) / Español (CO) es_CR es_CO es_CR es_CO Left-to-Right [3,0] . , , . %d/%m/%Y %d-%m-%Y %H:%M:%S %H:%M 1 7
74 base.lang_es_DO base.lang_es_CR Spanish (DO) / Español (DO) Spanish (CR) / Español (CR) es_DO es_CR es_DO es_CR Left-to-Right [3,0] . , %d/%m/%Y %I:%M:%S %p %H:%M:%S %H:%M 1
75 base.lang_es_EC base.lang_es_DO Spanish (EC) / Español (EC) Spanish (DO) / Español (DO) es_EC es_DO es_EC es_DO Left-to-Right [3,0] , . . , %d/%m/%Y %H:%M:%S %I:%M:%S %p %I:%M %p 1
76 base.lang_es_GT base.lang_es_EC Spanish (GT) / Español (GT) Spanish (EC) / Español (EC) es_GT es_EC es_GT es_EC Left-to-Right [3,0] . , , . %d/%m/%Y %H:%M:%S %H:%M 7 1
77 base.lang_es_MX base.lang_es_GT Spanish (MX) / Español (MX) Spanish (GT) / Español (GT) es_MX es_GT es_MX es_GT Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
78 base.lang_es_PA base.lang_es_MX Spanish (PA) / Español (PA) Spanish (MX) / Español (MX) es_PA es_MX es_PA es_MX Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
79 base.lang_es_PE base.lang_es_PA Spanish (PE) / Español (PE) Spanish (PA) / Español (PA) es_PE es_PA es_PE es_PA Left-to-Right [3,0] . , %d/%m/%Y %H:%M:%S %H:%M 7
80 base.lang_es_PY base.lang_es_PE Spanish (PY) / Español (PY) Spanish (PE) / Español (PE) es_PY es_PE es_PY es_PE Left-to-Right [3,0] , . . , %d/%m/%Y %H:%M:%S %H:%M 7
81 base.lang_es_UY base.lang_es_PY Spanish (UY) / Español (UY) Spanish (PY) / Español (PY) es_UY es_PY es_UY es_PY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1 7
82 base.lang_es_VE base.lang_es_UY Spanish (VE) / Español (VE) Spanish (UY) / Español (UY) es_VE es_UY es_VE es_UY Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 7 1
83 base.lang_es base.lang_es_VE Spanish / Español Spanish (VE) / Español (VE) es_ES es_VE es es_VE Left-to-Right [3,0] , . %d/%m/%Y %H:%M:%S %H:%M 1 7
84 base.lang_sw base.lang_es Swahili / Kiswahili Spanish / Español sw es_ES sw es Left-to-Right [3,0] . , , . %d/%m/%Y %H:%M:%S %H:%M 1
85 base.lang_sv_SE base.lang_sw Swedish / Svenska Swahili / Kiswahili sv_SE sw sv sw Left-to-Right [3,0] , .   , %Y-%m-%d %d/%m/%Y %H:%M:%S %H:%M 1
86 base.lang_th base.lang_sv_SE Thai / ภาษาไทย Swedish / Svenska th_TH sv_SE th sv Left-to-Right [3,0] . , ,   %d/%m/%Y %Y-%m-%d %H:%M:%S %H:%M 7 1
87 base.lang_tl base.lang_th Tagalog / Filipino Thai / ภาษาไทย tl_PH th_TH tl th Left-to-Right [3,0] . , %m/%d/%y %d/%m/%Y %H:%M:%S %H:%M 1 7
88 base.lang_tr base.lang_tl Turkish / Türkçe Tagalog / Filipino tr_TR tl_PH tr tl Left-to-Right [3,0] , . . , %d-%m-%Y %m/%d/%y %H:%M:%S %H:%M 1
89 base.lang_uk_UA base.lang_tr Ukrainian / українська Turkish / Türkçe uk_UA tr_TR uk tr Left-to-Right [3,0] ,   . %d.%m.%Y %d-%m-%Y %H:%M:%S %H:%M 1
90 base.lang_vi_VN base.lang_uk_UA Vietnamese / Tiếng Việt Ukrainian / українська vi_VN uk_UA vi uk Left-to-Right [3,0] , .   %d/%m/%Y %d.%m.%Y %H:%M:%S %H:%M 1
91 base.lang_sq_AL base.lang_vi_VN Albanian / Shqip Vietnamese / Tiếng Việt sq_AL vi_VN sq vi Left-to-Right [3,0] , . %Y-%b-%d %d/%m/%Y %I.%M.%S. %H:%M:%S %H:%M 1
92 base.lang_te_IN base.lang_sq_AL Telugu / తెలుగు Albanian / Shqip te_IN sq_AL te sq Left-to-Right [] [3,0] . , , . %B %d %A %Y %Y-%b-%d %p%I.%M.%S %I.%M.%S. %I.%M. 7 1
93 base.lang_te_IN Telugu / తెలుగు te_IN te Left-to-Right [] . , %B %d %A %Y %p%I.%M.%S %p%I.%M 7

View file

@ -180,6 +180,7 @@
<record id="bn" model="res.country">
<field name="name">Brunei Darussalam</field>
<field name="code">bn</field>
<field eval="'%(street)s\n%(street2)s\n%(city)s %(zip)s\n%(country_name)s'" name="address_format" />
<field name="currency_id" ref="BND" />
<field eval="673" name="phone_code" />
</record>
@ -1104,6 +1105,7 @@
<field name="code">om</field>
<field name="currency_id" ref="OMR" />
<field eval="968" name="phone_code" />
<field name="address_format" eval="'%(street)s\n%(street2)s\n%(city)s %(state_name)s %(zip)s\n%(country_name)s'" />
</record>
<record id="pa" model="res.country">
<field name="name">Panama</field>
@ -1111,6 +1113,7 @@
<field name="currency_id" ref="PAB" />
<field eval="507" name="phone_code" />
<field name="address_format" eval="'%(street)s\n%(street2)s\n%(city)s %(state_name)s %(zip)s\n%(country_name)s'" />
<field name="vat_label">RUC</field>
</record>
<record id="pe" model="res.country">
<field name="name">Peru</field>
@ -1143,6 +1146,7 @@
<field name="code">pk</field>
<field name="currency_id" ref="PKR" />
<field eval="92" name="phone_code" />
<field name="vat_label">NTN</field>
</record>
<record id="pl" model="res.country">
<field name="name">Poland</field>

View file

@ -726,12 +726,6 @@
<field name="rate">1105.24376765</field>
</record>
<record forcecreate="0" id="rateZWL" model="res.currency.rate">
<field name="currency_id" ref="ZWL" />
<field name="name">2010-01-01</field>
<field name="rate">395.80</field>
</record>
<record forcecreate="0" id="rateZIG" model="res.currency.rate">
<field name="currency_id" ref="ZIG" />
<field name="name">2024-04-08</field>

View file

@ -1,9 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<data noupdate="1">
<!-- es_419 is the new "generic" spanish -->
<record id="base.lang_es" model="res.lang">
<field name="url_code">es_ES</field>
</record>
<record id="base.lang_es_419" model="res.lang">
<field name="url_code">es</field>
</record>
<function name="install_lang" model="res.lang"/>
</data>
<data>
<!-- /my is for the portal -->
<record id="base.lang_my" model="res.lang">
<field name="url_code">mya</field>
</record>

View file

@ -100,6 +100,20 @@
<field name="vat">US12345675</field>
</record>
<record id="res_partner_5" model="res.partner">
<field name="city">Florenville</field>
<field name="country_id" ref="base.be"/>
<field name="email">wow@example.com</field>
<field name="image_1920" file="base/static/img/partner_open_wood.png" type="base64"/>
<field name="is_company" eval="True"/>
<field name="mobile">+32 987 65 43 21</field>
<field name="name">OpenWood</field>
<field name="phone">+32 987 65 43 21</field>
<field name="street">Orval 1</field>
<field name="website">www.openwood.example.com</field>
<field name="zip">6823</field>
</record>
<record id="res_partner_10" model="res.partner">
<field name="name">The Jackson Group</field>
<field name="is_company">1</field>

View file

@ -23,6 +23,10 @@ System</span>]]></field>
Administrator</span>]]></field>
</record>
<record id="user_admin_settings" model="res.users.settings" forcecreate="0">
<field name="user_id" ref="base.user_admin"/>
</record>
<!-- Default user with full access rights for newly created users -->
<record id="default_user" model="res.users">
<field name="name">Default User Template</field>

View file

@ -42,6 +42,10 @@
<field name="image_1920" type="base64" file="base/static/img/user_demo-image.png"/>
</record>
<record id="partner_demo" model="res.partner">
<field name="user_id" ref="base.user_demo"/>
</record>
<record model="res.partner" id="base.partner_root">
<field name="tz">Europe/Brussels</field>
</record>
@ -57,6 +61,7 @@
<field name="phone">+1 555-555-5555</field>
<field name="email">admin@yourcompany.example.com</field>
<field name="tz">Europe/Brussels</field>
<field name="user_id" ref="base.user_admin"/>
<field name="image_1920" type="base64" file="base/static/img/partner_root-image.png"/>
</record>

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,7 @@ from . import ir_ui_menu
from . import ir_ui_view
from . import ir_asset
from . import ir_actions
from . import ir_embedded_actions
from . import ir_actions_report
from . import ir_attachment
from . import ir_binary
@ -24,7 +25,6 @@ from . import ir_qweb
from . import ir_qweb_fields
from . import ir_http
from . import ir_logging
from . import ir_property
from . import ir_module
from . import ir_demo
from . import ir_demo_failure
@ -45,5 +45,6 @@ from . import res_company
from . import res_users
from . import res_users_settings
from . import res_users_deletion
from . import res_device
from . import decimal_precision

View file

@ -1,22 +1,16 @@
# -*- coding: utf-8 -*-
from contextlib import closing
from collections import OrderedDict
from datetime import datetime
from lxml import etree
from subprocess import Popen, PIPE
import base64
import copy
import hashlib
import io
import itertools
import json
import logging
import os
import re
import textwrap
import uuid
import psycopg2
try:
import sass as libsass
except ImportError:
@ -29,11 +23,10 @@ from rjsmin import jsmin as rjsmin
from odoo import release, SUPERUSER_ID, _
from odoo.http import request
from odoo.tools import (func, misc, transpile_javascript,
is_odoo_module, SourceMapGenerator, profiler,
apply_inheritance_specs)
is_odoo_module, SourceMapGenerator, profiler, OrderedSet)
from odoo.tools.json import scriptsafe as json
from odoo.tools.constants import SCRIPT_EXTENSIONS, STYLE_EXTENSIONS
from odoo.tools.misc import file_open, file_path
from odoo.tools.pycompat import to_text
_logger = logging.getLogger(__name__)
@ -49,6 +42,8 @@ class AssetError(Exception):
class AssetNotFound(AssetError):
pass
class XMLAssetError(Exception):
pass
class AssetsBundle(object):
rx_css_import = re.compile("(@import[^;{]+;?)", re.M)
@ -326,22 +321,20 @@ class AssetsBundle(object):
if not js_attachment:
template_bundle = ''
if self.templates:
content = ['<?xml version="1.0" encoding="UTF-8"?>']
content.append('<templates xml:space="preserve">')
content.append(self.xml(show_inherit_info=not is_minified))
content.append('</templates>')
templates = '\n'.join(content).replace("\\", "\\\\").replace("`", "\\`").replace("${", "\\${")
templates = self.generate_xml_bundle()
template_bundle = textwrap.dedent(f"""
/*******************************************
* Templates *
*******************************************/
odoo.define('{self.name}.bundle.xml', ['@web/core/registry'], function(require){{
'use strict';
const {{ registry }} = require('@web/core/registry');
registry.category(`xml_templates`).add(`{self.name}`, `{templates}`);
}});""")
odoo.define("{self.name}.bundle.xml", ["@web/core/templates"], function(require) {{
"use strict";
const {{ checkPrimaryTemplateParents, registerTemplate, registerTemplateExtension }} = require("@web/core/templates");
/* {self.name} */
{templates}
}});
""")
if is_minified:
content_bundle = ';\n'.join(asset.minify() for asset in self.javascripts)
@ -394,29 +387,64 @@ class AssetsBundle(object):
return js_attachment
def xml(self, show_inherit_info=False):
def generate_xml_bundle(self):
content = []
blocks = []
try:
blocks = self.xml()
except XMLAssetError as e:
content.append(f'throw new Error({json.dumps(str(e))});')
def get_template(element):
element.set("{http://www.w3.org/XML/1998/namespace}space", "preserve")
string = etree.tostring(element, encoding='unicode')
return string.replace("\\", "\\\\").replace("`", "\\`").replace("${", "\\${")
names = OrderedSet()
primary_parents = OrderedSet()
extension_parents = OrderedSet()
for block in blocks:
if block["type"] == "templates":
for (element, url, inherit_from) in block["templates"]:
if inherit_from:
primary_parents.add(inherit_from)
name = element.get("t-name")
names.add(name)
template = get_template(element)
content.append(f'registerTemplate("{name}", `{url}`, `{template}`);')
else:
for inherit_from, elements in block["extensions"].items():
extension_parents.add(inherit_from)
for (element, url) in elements:
template = get_template(element)
content.append(f'registerTemplateExtension("{inherit_from}", `{url}`, `{template}`);')
missing_names_for_primary = primary_parents - names
if missing_names_for_primary:
content.append(f'checkPrimaryTemplateParents({json.dumps(list(missing_names_for_primary))});')
missing_names_for_extension = extension_parents - names
if missing_names_for_extension:
content.append(f'console.error("Missing (extension) parent templates: {", ".join(missing_names_for_extension)}");')
return '\n'.join(content)
def xml(self):
"""
Create the ir.attachment representing the content of the bundle XML.
The xml contents are loaded and parsed with etree. Inheritances are
applied in the order of files and templates.
Create a list of blocks. A block can have one of the two types "templates" or "extensions".
A template with no parent or template with t-inherit-mode="primary" goes in a block of type "templates".
A template with t-inherit-mode="extension" goes in a block of type "extensions".
Used parsed attributes:
* `t-name`: template name
* `t-inherit`: inherited template name. The template use the
`apply_inheritance_specs` method from `ir.ui.view` to apply
inheritance (with xpath and position).
* 't-inherit-mode': 'primary' to create a new template with the
update, or 'extension' to apply the update on the inherited
template.
* `t-extend` deprecated attribute, used by the JavaScript Qweb.
* `t-inherit`: inherited template name.
* 't-inherit-mode': 'primary' or 'extension'.
:param show_inherit_info: if true add the file url and inherit
information in the template.
:return ir.attachment representing the content of the bundle XML
:return a list of blocks
"""
template_dict = OrderedDict()
parser = etree.XMLParser(ns_clean=True, recover=True, remove_comments=True)
blocks = []
block = None
for asset in self.templates:
# Load content.
try:
@ -425,106 +453,36 @@ class AssetsBundle(object):
io_content = io.BytesIO(template.encode('utf-8'))
content_templates_tree = etree.parse(io_content, parser=parser).getroot()
except etree.ParseError as e:
_logger.error("Could not parse file %s: %s", asset.url, e.msg)
raise
addon = asset.url.split('/')[1]
template_dict.setdefault(addon, OrderedDict())
return asset.generate_error(f'Could not parse file: {e.msg}')
# Process every templates.
for template_tree in list(content_templates_tree):
template_name = None
if 't-name' in template_tree.attrib:
template_name = template_tree.attrib['t-name']
dotted_names = template_name.split('.', 1)
if len(dotted_names) > 1 and dotted_names[0] == addon:
template_name = dotted_names[1]
if 't-inherit' in template_tree.attrib:
inherit_mode = template_tree.attrib.get('t-inherit-mode', 'primary')
template_name = template_tree.get("t-name")
inherit_from = template_tree.get("t-inherit")
inherit_mode = None
if inherit_from:
inherit_mode = template_tree.get('t-inherit-mode', 'primary')
if inherit_mode not in ['primary', 'extension']:
raise ValueError(_("Invalid inherit mode. Module %r and template name %r", addon, template_name))
# Get inherited template, the identifier can be "addon.name", just "name" or (silly) "just.name.with.dots"
parent_dotted_name = template_tree.attrib['t-inherit']
split_name_attempt = parent_dotted_name.split('.', 1)
parent_addon, parent_name = split_name_attempt if len(split_name_attempt) == 2 else (addon, parent_dotted_name)
if parent_addon not in template_dict:
if parent_dotted_name in template_dict[addon]:
parent_addon = addon
parent_name = parent_dotted_name
else:
raise ValueError(_("Module %r not loaded or inexistent (try to inherit %r), or templates of addon being loaded %r are misordered (template %r)", parent_addon, parent_name, addon, template_name))
if parent_name not in template_dict[parent_addon]:
raise ValueError(_("Cannot create %r because the template to inherit %r is not found.", '%s.%s' % (addon, template_name), '%s.%s' % (parent_addon, parent_name)))
# After several performance tests, we found out that deepcopy is the most efficient
# solution in this case (compared with copy, xpath with '.' and stringifying).
parent_tree, parent_urls = template_dict[parent_addon][parent_name]
parent_tree = copy.deepcopy(parent_tree)
if show_inherit_info:
# Add inheritance information as xml comment for debugging.
xpaths = []
for item in template_tree:
position = item.get('position')
attrib = dict(**item.attrib)
attrib.pop('position', None)
comment = etree.Comment(f""" Filepath: {asset.url} ; position="{position}" ; {attrib} """)
if position == "attributes":
if item.get('expr'):
comment_node = etree.Element('xpath', {'expr': item.get('expr'), 'position': 'before'})
else:
comment_node = etree.Element(item.tag, item.attrib)
comment_node.attrib['position'] = 'before'
comment_node.append(comment)
xpaths.append(comment_node)
else:
if len(item) > 0:
item[0].addprevious(comment)
else:
item.append(comment)
xpaths.append(item)
else:
xpaths = list(template_tree)
# Apply inheritance.
if inherit_mode == 'primary':
parent_tree.tag = template_tree.tag
inherited_template = apply_inheritance_specs(parent_tree, xpaths)
if inherit_mode == 'primary': # New template_tree: A' = B(A)
for attr_name, attr_val in template_tree.attrib.items():
if attr_name not in ('t-inherit', 't-inherit-mode'):
inherited_template.set(attr_name, attr_val)
if not template_name:
raise ValueError(_("Template name is missing in file %r.", asset.url))
template_dict[addon][template_name] = (inherited_template, parent_urls + [asset.url])
else: # Modifies original: A = B(A)
template_dict[parent_addon][parent_name] = (inherited_template, parent_urls + [asset.url])
addon = asset.url.split('/')[1]
return asset.generate_error(_(
'Invalid inherit mode. Module "%(module)s" and template name "%(template_name)s"',
module=addon,
template_name=template_name,
))
if inherit_mode == "extension":
if block is None or block["type"] != "extensions":
block = {"type": "extensions", "extensions": OrderedDict()}
blocks.append(block)
block["extensions"].setdefault(inherit_from, [])
block["extensions"][inherit_from].append((template_tree, asset.url))
elif template_name:
if template_name in template_dict[addon]:
raise ValueError(_("Template %r already exists in module %r", template_name, addon))
template_dict[addon][template_name] = (template_tree, [asset.url])
elif template_tree.attrib.get('t-extend'):
template_name = '%s__extend_%s' % (template_tree.attrib.get('t-extend'), len(template_dict[addon]))
template_dict[addon][template_name] = (template_tree, [asset.url])
if block is None or block["type"] != "templates":
block = {"type": "templates", "templates": []}
blocks.append(block)
block["templates"].append((template_tree, asset.url, inherit_from))
else:
raise ValueError(_("Template name is missing in file %r.", asset.url))
return asset.generate_error(_("Template name is missing."))
return blocks
# Concat and render inherited templates
root = etree.Element('root')
for addon in template_dict.values():
for template, urls in addon.values():
if show_inherit_info:
tail = "\n"
if len(root) > 0:
tail = root[-1].tail
root[-1].tail = "\n\n"
comment = etree.Comment(f""" Filepath: {' => '.join(urls)} """)
comment.tail = tail
root.append(comment)
root.append(template)
# Returns the string by removing the <root> tag.
return etree.tostring(root, encoding='unicode')[6:-7]
def css(self):
is_minified = not self.is_debug_assets
@ -652,7 +610,7 @@ css_error_message {
"""Sanitizes @import rules, remove duplicates @import rules, then compile"""
imports = []
def handle_compile_error(e, source):
error = self.get_preprocessor_error(e, source=source)
error = self.get_preprocessor_error(str(e), source=source)
_logger.warning(error)
self.css_errors.append(error)
return ''
@ -668,7 +626,6 @@ css_error_message {
return ''
source = re.sub(self.rx_preprocess_imports, sanitize, source)
compiled = ''
try:
compiled = compiler(source)
except CompileError as e:
@ -700,7 +657,7 @@ css_error_message {
cmd = [rtlcss, '-c', file_path("base/data/rtlcss.json"), '-']
try:
rtlcss = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
rtlcss = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, encoding='utf-8')
except Exception:
# Check the presence of rtlcss, if rtlcss not available then we should return normal less file
@ -717,23 +674,20 @@ css_error_message {
self.css_errors.append(msg)
return ''
stdout, stderr = rtlcss.communicate(input=source.encode('utf-8'))
if rtlcss.returncode or (source and not stdout):
cmd_output = ''.join(misc.ustr(stderr))
if not cmd_output and rtlcss.returncode:
cmd_output = "Process exited with return code %d\n" % rtlcss.returncode
elif not cmd_output:
cmd_output = "rtlcss: error processing payload\n"
error = self.get_rtlcss_error(cmd_output, source=source)
_logger.warning(error)
out, err = rtlcss.communicate(input=source)
if rtlcss.returncode or (source and not out):
if rtlcss.returncode:
error = self.get_rtlcss_error(err or f"Process exited with return code {rtlcss.returncode}", source=source)
else:
error = "rtlcss: error processing payload\n"
_logger.warning("%s", error)
self.css_errors.append(error)
return ''
rtlcss_result = stdout.strip().decode('utf8')
return rtlcss_result
return out.strip()
def get_preprocessor_error(self, stderr, source=None):
"""Improve and remove sensitive information from sass/less compilator error messages"""
error = misc.ustr(stderr).split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error = stderr.split('Load paths')[0].replace(' Use --trace for backtrace.', '')
if 'Cannot load compass' in error:
error += "Maybe you should install the compass gem using this extra argument:\n\n" \
" $ sudo gem install compass --pre\n"
@ -745,8 +699,8 @@ css_error_message {
def get_rtlcss_error(self, stderr, source=None):
"""Improve and remove sensitive information from sass/less compilator error messages"""
error = misc.ustr(stderr).split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error += "This error occurred while compiling the bundle '%s' containing:" % self.name
error = stderr.split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error = f"{error}This error occurred while compiling the bundle {self.name!r} containing:"
return error
@ -765,6 +719,11 @@ class WebAsset(object):
if not inline and not url:
raise Exception("An asset should either be inlined or url linked, defined in bundle '%s'" % bundle.name)
def generate_error(self, msg):
msg = f'{msg!r} in file {self.url!r}'
_logger.error(msg) # log it in the python console in all cases.
return msg
@func.lazy_property
def id(self):
if self._id is None: self._id = str(uuid.uuid4())
@ -840,6 +799,10 @@ class JavascriptAsset(WebAsset):
self._is_transpiled = None
self._converted_content = None
def generate_error(self, msg):
msg = super().generate_error(msg)
return f'console.error({json.dumps(msg)});'
@property
def bundle_version(self):
return self.bundle.get_version('js')
@ -847,7 +810,7 @@ class JavascriptAsset(WebAsset):
@property
def is_transpiled(self):
if self._is_transpiled is None:
self._is_transpiled = bool(is_odoo_module(super().content))
self._is_transpiled = bool(is_odoo_module(self.url, super().content))
return self._is_transpiled
@property
@ -866,7 +829,7 @@ class JavascriptAsset(WebAsset):
try:
return super()._fetch_content()
except AssetError as e:
return u"console.error(%s);" % json.dumps(to_text(e))
return self.generate_error(str(e))
def with_header(self, content=None, minimal=True):
@ -898,17 +861,21 @@ class XMLAsset(WebAsset):
try:
content = super()._fetch_content()
except AssetError as e:
return u"console.error(%s);" % json.dumps(to_text(e))
return self.generate_error(str(e))
parser = etree.XMLParser(ns_clean=True, remove_comments=True, resolve_entities=False)
try:
root = etree.fromstring(content.encode('utf-8'), parser=parser)
except etree.XMLSyntaxError as e:
return f'<t t-name="parsing_error{self.url.replace("/","_")}"><parsererror>Invalid XML template: {self.url} \n {e.msg} </parsererror></t>'
return self.generate_error(f'Invalid XML template: {e.msg}')
if root.tag in ('templates', 'template'):
return ''.join(etree.tostring(el, encoding='unicode') for el in root)
return etree.tostring(root, encoding='unicode')
def generate_error(self, msg):
msg = super().generate_error(msg)
raise XMLAssetError(msg)
@property
def bundle_version(self):
return self.bundle.get_version('js')
@ -1008,17 +975,17 @@ class PreprocessedCSS(StylesheetAsset):
command = self.get_command()
try:
compiler = Popen(command, stdin=PIPE, stdout=PIPE,
stderr=PIPE)
stderr=PIPE, encoding='utf-8')
except Exception:
raise CompileError("Could not execute command %r" % command[0])
(out, err) = compiler.communicate(input=source.encode('utf-8'))
out, err = compiler.communicate(input=source)
if compiler.returncode:
cmd_output = misc.ustr(out) + misc.ustr(err)
cmd_output = out + err
if not cmd_output:
cmd_output = u"Process exited with return code %d\n" % compiler.returncode
raise CompileError(cmd_output)
return out.decode('utf8')
return out
class SassStylesheetAsset(PreprocessedCSS):
rx_indent = re.compile(r'^( +|\t+)', re.M)

View file

@ -15,6 +15,7 @@ import logging
from operator import getitem
import requests
import json
import re
import contextlib
from pytz import timezone
@ -55,9 +56,12 @@ class IrActions(models.Model):
_order = 'name'
_allow_sudo_commands = False
_sql_constraints = [('path_unique', 'unique(path)', "Path to show in the URL must be unique! Please choose another one.")]
name = fields.Char(string='Action Name', required=True, translate=True)
type = fields.Char(string='Action Type', required=True)
xml_id = fields.Char(compute='_compute_xml_id', string="External ID")
path = fields.Char(string="Path to show in the URL")
help = fields.Html(string='Action Description',
help='Optional help text for the users with a description of the target view, such as its usage and purpose.',
translate=True)
@ -68,6 +72,30 @@ class IrActions(models.Model):
required=True, default='action')
binding_view_types = fields.Char(default='list,form')
@api.constrains('path')
def _check_path(self):
for action in self:
if action.path:
if not re.fullmatch(r'[a-z][a-z0-9_-]*', action.path):
raise ValidationError(_('The path should contain only lowercase alphanumeric characters, underscore, and dash, and it should start with a letter.'))
if action.path.startswith("m-"):
raise ValidationError(_("'m-' is a reserved prefix."))
if action.path.startswith("action-"):
raise ValidationError(_("'action-' is a reserved prefix."))
if action.path == "new":
raise ValidationError(_("'new' is reserved, and can not be used as path."))
# Tables ir_act_window, ir_act_report_xml, ir_act_url, ir_act_server and ir_act_client
# inherit from table ir_actions (see base_data.sql). The path must be unique across
# all these tables. The unique constraint is not enough because a big limitation of
# the inheritance feature is that unique indexes only apply to single tables, and
# not accross all the tables. So we need to check the uniqueness of the path manually.
# For more information, see: https://www.postgresql.org/docs/14/ddl-inherit.html#DDL-INHERIT-CAVEATS
# Note that, we leave the unique constraint in place to check the uniqueness of the path
# within the same table before checking the uniqueness across all the tables.
if (self.env['ir.actions.actions'].search_count([('path', '=', action.path)]) > 1):
raise ValidationError(_("Path to show in the URL must be unique! Please choose another one."))
def _compute_xml_id(self):
res = self.get_external_id()
for record in self:
@ -132,7 +160,7 @@ class IrActions(models.Model):
for action in all_actions:
action = dict(action)
groups = action.pop('groups_id', None)
if groups and not self.user_has_groups(groups):
if groups and not any(self.env.user.has_group(ext_id) for ext_id in groups):
# the user may not perform this action
continue
res_model = action.pop('res_model', None)
@ -167,13 +195,16 @@ class IrActions(models.Model):
try:
action = self.env[action_model].sudo().browse(action_id)
fields = ['name', 'binding_view_types']
for field in ('groups_id', 'res_model', 'sequence'):
for field in ('groups_id', 'res_model', 'sequence', 'domain'):
if field in action._fields:
fields.append(field)
action = action.read(fields)[0]
if action.get('groups_id'):
# transform the list of ids into a list of xml ids
groups = self.env['res.groups'].browse(action['groups_id'])
action['groups_id'] = ','.join(ext_id for ext_id in groups._ensure_xml_id().values())
action['groups_id'] = list(groups._ensure_xml_id().values())
if 'domain' in action and not action.get('domain'):
action.pop('domain')
result[binding_type].append(frozendict(action))
except (MissingError):
continue
@ -217,6 +248,7 @@ class IrActions(models.Model):
return {
"binding_model_id", "binding_type", "binding_view_types",
"display_name", "help", "id", "name", "type", "xml_id",
"path",
}
@ -232,9 +264,9 @@ class IrActionsActWindow(models.Model):
def _check_model(self):
for action in self:
if action.res_model not in self.env:
raise ValidationError(_('Invalid model name %r in action definition.', action.res_model))
raise ValidationError(_('Invalid model name %s in action definition.', action.res_model))
if action.binding_model_id and action.binding_model_id.model not in self.env:
raise ValidationError(_('Invalid model name %r in action definition.', action.binding_model_id.model))
raise ValidationError(_('Invalid model name %s in action definition.', action.binding_model_id.model))
@api.depends('view_ids.view_mode', 'view_mode', 'view_id.type')
def _compute_views(self):
@ -265,7 +297,7 @@ class IrActionsActWindow(models.Model):
if len(modes) != len(set(modes)):
raise ValidationError(_('The modes in view_mode must not be duplicated: %s', modes))
if ' ' in modes:
raise ValidationError(_('No spaces allowed in view_mode: %r', modes))
raise ValidationError(_('No spaces allowed in view_mode: %s', modes))
type = fields.Char(default="ir.actions.act_window")
view_id = fields.Many2one('ir.ui.view', string='View Ref.', ondelete='set null')
@ -277,8 +309,8 @@ class IrActionsActWindow(models.Model):
res_model = fields.Char(string='Destination Model', required=True,
help="Model name of the object to open in the view window")
target = fields.Selection([('current', 'Current Window'), ('new', 'New Window'), ('inline', 'Inline Edit'), ('fullscreen', 'Full Screen'), ('main', 'Main action of Current Window')], default="current", string='Target Window')
view_mode = fields.Char(required=True, default='tree,form',
help="Comma-separated list of allowed view modes, such as 'form', 'tree', 'calendar', etc. (Default: tree,form)")
view_mode = fields.Char(required=True, default='list,form',
help="Comma-separated list of allowed view modes, such as 'form', 'list', 'calendar', etc. (Default: list,form)")
mobile_view_mode = fields.Char(default="kanban", help="First view mode in mobile and small screen environments (default='kanban'). If it can't be found among available view modes, the same mode as for wider screens is used)")
usage = fields.Char(string='Action Usage',
help="Used to filter menu and home actions from the user form.")
@ -291,8 +323,14 @@ class IrActionsActWindow(models.Model):
groups_id = fields.Many2many('res.groups', 'ir_act_window_group_rel',
'act_id', 'gid', string='Groups')
search_view_id = fields.Many2one('ir.ui.view', string='Search View Ref.')
embedded_action_ids = fields.One2many('ir.embedded.actions', compute="_compute_embedded_actions")
filter = fields.Boolean()
def _compute_embedded_actions(self):
embedded_actions = self.env["ir.embedded.actions"].search([('parent_action_id', 'in', self.ids)]).filtered(lambda x: x.is_visible)
for action in self:
action.embedded_action_ids = embedded_actions.filtered(lambda rec: rec.parent_action_id == action)
def read(self, fields=None, load='_classic_read'):
""" call the method get_empty_list_help of the model and set the window action help message
"""
@ -336,20 +374,33 @@ class IrActionsActWindow(models.Model):
def _get_readable_fields(self):
return super()._get_readable_fields() | {
"context", "mobile_view_mode", "domain", "filter", "groups_id", "limit",
"res_id", "res_model", "search_view_id", "target", "view_id", "view_mode", "views",
"res_id", "res_model", "search_view_id", "target", "view_id", "view_mode", "views", "embedded_action_ids",
# `flags` is not a real field of ir.actions.act_window but is used
# to give the parameters to generate the action
"flags"
"flags",
# this is used by frontend, with the document layout wizard before send and print
"close_on_report_download",
}
def _get_action_dict(self):
""" Override to return action content with detailed embedded actions data if available.
:return: A dict with updated action dictionary including embedded actions information.
"""
result = super()._get_action_dict()
if embedded_action_ids := result["embedded_action_ids"]:
EmbeddedActions = self.env["ir.embedded.actions"]
embedded_fields = EmbeddedActions._get_readable_fields()
result["embedded_action_ids"] = EmbeddedActions.browse(embedded_action_ids).read(embedded_fields)
return result
VIEW_TYPES = [
('tree', 'Tree'),
('list', 'List'),
('form', 'Form'),
('graph', 'Graph'),
('pivot', 'Pivot'),
('calendar', 'Calendar'),
('gantt', 'Gantt'),
('kanban', 'Kanban'),
]
@ -463,6 +514,7 @@ class IrActionsServer(models.Model):
# - records: recordset of all records on which the action is triggered in multi-mode; may be void
# - time, datetime, dateutil, timezone: useful Python libraries
# - float_compare: utility function to compare floats based on specific precision
# - b64encode, b64decode: functions to encode/decode binary data
# - log: log(message, level='info'): logging function to record debug information in ir.logging table
# - _logger: _logger.info(message): logger to emit messages in server logs
# - UserError: exception class for raising user-facing warning messages
@ -697,15 +749,15 @@ class IrActionsServer(models.Model):
action.webhook_sample_payload = False
continue
payload = {
'id': 1,
'_id': 1,
'_model': self.model_id.model,
'_name': action.name,
'_action': f'{action.name}(#{action.id})',
}
if self.model_id:
sample_record = self.env[self.model_id.model].with_context(active_test=False).search([], limit=1)
for field in action.webhook_field_ids:
if sample_record:
payload['id'] = sample_record.id
payload['_id'] = sample_record.id
payload.update(sample_record.read(self.webhook_field_ids.mapped('name'), load=None)[0])
else:
payload[field.name] = WEBHOOK_SAMPLE_VALUES[field.ttype] if field.ttype in WEBHOOK_SAMPLE_VALUES else WEBHOOK_SAMPLE_VALUES[None]
@ -725,8 +777,8 @@ class IrActionsServer(models.Model):
raise ValidationError(msg)
@api.constrains('child_ids')
def _check_recursion(self):
if not self._check_m2m_recursion('child_ids'):
def _check_child_recursion(self):
if self._has_cycle('child_ids'):
raise ValidationError(_('Recursion found in child server actions'))
def _get_readable_fields(self):
@ -767,7 +819,7 @@ class IrActionsServer(models.Model):
def unlink_action(self):
""" Remove the contextual actions created for the server actions. """
self.check_access_rights('write', raise_exception=True)
self.check_access('write')
self.filtered('binding_model_id').write({'binding_model_id': False})
return True
@ -790,7 +842,7 @@ class IrActionsServer(models.Model):
record_cached = self._context['onchange_self']
for field, new_value in res.items():
record_cached[field] = new_value
else:
elif self.update_path:
starting_record = self.env[self.model_id.model].browse(self._context.get('active_id'))
_, _, target_records = self._traverse_path(record=starting_record)
target_records.write(res)
@ -915,7 +967,7 @@ class IrActionsServer(models.Model):
else:
model_name = action.model_id.model
try:
self.env[model_name].check_access_rights("write")
self.env[model_name].check_access("write")
except AccessError:
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
action.name, self.env.user.login, model_name,
@ -925,11 +977,11 @@ class IrActionsServer(models.Model):
eval_context = self._get_eval_context(action)
records = eval_context.get('record') or eval_context['model']
records |= eval_context.get('records') or eval_context['model']
if records.ids:
if not action_groups and records.ids:
# check access rules on real records only; base automations of
# type 'onchange' can run server actions on new records
try:
records.check_access_rule('write')
records.check_access('write')
except AccessError:
_logger.warning("Forbidden server action %r executed while the user %s does not have access to %s.",
action.name, self.env.user.login, records,
@ -1016,6 +1068,8 @@ class IrActionsServer(models.Model):
elif action.update_field_id.ttype in ['many2one', 'integer']:
try:
expr = int(action.value)
if expr == 0 and action.update_field_id.ttype == 'many2one':
expr = False
except Exception:
pass
elif action.update_field_id.ttype == 'float':
@ -1026,9 +1080,11 @@ class IrActionsServer(models.Model):
def copy_data(self, default=None):
default = default or {}
vals_list = super().copy_data(default=default)
if not default.get('name'):
default['name'] = _('%s (copy)', self.name)
return super().copy_data(default=default)
for vals in vals_list:
vals['name'] = _('%s (copy)', vals.get('name', ''))
return vals_list
class IrActionsTodo(models.Model):
"""

View file

@ -1,15 +1,18 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from contextlib import ExitStack
from markupsafe import Markup
from urllib.parse import urlparse, parse_qs, urlencode
from urllib.parse import urlparse
from odoo import api, fields, models, tools, SUPERUSER_ID, _
from odoo.exceptions import UserError, AccessError, RedirectWarning
from odoo.service import security
from odoo.tools.safe_eval import safe_eval, time
from odoo.tools.misc import find_in_path, ustr
from odoo.tools.misc import find_in_path
from odoo.tools import check_barcode_encoding, config, is_html_empty, parse_version, split_every
from odoo.http import request, root
from odoo.tools.pdf import PdfFileWriter, PdfFileReader, PdfReadError
from odoo.http import request
from odoo.osv.expression import NEGATIVE_TERM_OPERATORS, FALSE_DOMAIN
import io
@ -19,6 +22,7 @@ import lxml.html
import tempfile
import subprocess
import re
import requests
import json
from lxml import etree
@ -58,6 +62,11 @@ except Exception:
def _get_wkhtmltopdf_bin():
return find_in_path('wkhtmltopdf')
def _get_wkhtmltoimage_bin():
return find_in_path('wkhtmltoimage')
def _split_table(tree, max_rows):
"""
Walks through the etree and splits tables with more than max_rows rows into
@ -108,6 +117,23 @@ else:
_logger.info('Wkhtmltopdf seems to be broken.')
wkhtmltopdf_state = 'broken'
wkhtmltoimage_version = None
try:
process = subprocess.Popen(
[_get_wkhtmltoimage_bin(), '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
except OSError:
_logger.info('You need Wkhtmltoimage to generate images from html.')
else:
_logger.info('Will use the Wkhtmltoimage binary at %s', _get_wkhtmltoimage_bin())
out, err = process.communicate()
match = re.search(b'([0-9.]+)', out)
if match:
wkhtmltoimage_version = parse_version(match.group(0).decode('ascii'))
if config['workers'] == 1:
_logger.info('You need to start Odoo with at least two workers to convert images to html.')
else:
_logger.info('Wkhtmltoimage seems to be broken.')
class IrActionsReport(models.Model):
_name = 'ir.actions.report'
@ -144,6 +170,7 @@ class IrActionsReport(models.Model):
help='If enabled, then the second time the user prints with same attachment name, it returns the previous report.')
attachment = fields.Char(string='Save as Attachment Prefix',
help='This is the filename of the attachment used to store the printing result. Keep empty to not save the printed reports. You can use a python expression with the object and time variables.')
domain = fields.Char(string='Filter domain', help='If set, the action will only appear on records that matches the domain.')
@api.depends('model')
def _compute_model_id(self):
@ -183,6 +210,7 @@ class IrActionsReport(models.Model):
"context", "data",
# and this one is used by the frontend later on.
"close_on_report_download",
"domain",
}
def associated_view(self):
@ -206,7 +234,7 @@ class IrActionsReport(models.Model):
def unlink_action(self):
""" Remove the contextual actions created for the reports. """
self.check_access_rights('write', raise_exception=True)
self.check_access('write')
self.filtered('binding_model_id').write({'binding_model_id': False})
return True
@ -245,6 +273,9 @@ class IrActionsReport(models.Model):
def get_paperformat(self):
return self.paperformat_id or self.env.company.paperformat_id
def get_paperformat_by_xmlid(self, xml_id):
return self.env.ref(xml_id).get_paperformat() if xml_id else self.env.company.paperformat_id
def _get_layout(self):
return self.env.ref('web.minimal_layout', raise_if_not_found=False)
@ -354,13 +385,6 @@ class IrActionsReport(models.Model):
if not layout:
return {}
base_url = self._get_report_url(layout=layout)
url = urlparse(base_url)
query = parse_qs(url.query or "")
debug = self.env.context.get("debug")
if not isinstance(debug, str):
debug = "1" if debug else "0"
query["debug"] = debug
base_url = url._replace(query=urlencode(query)).geturl()
root = lxml.html.fromstring(html, parser=lxml.html.HTMLParser(encoding='utf-8'))
match_klass = "//div[contains(concat(' ', normalize-space(@class), ' '), ' {} ')]"
@ -417,17 +441,63 @@ class IrActionsReport(models.Model):
'subst': True,
'body': Markup(lxml.html.tostring(header_node, encoding='unicode')),
'base_url': base_url,
'report_xml_id': self.xml_id,
'debug': self.env.context.get("debug"),
})
footer = self.env['ir.qweb']._render(layout.id, {
'subst': True,
'body': Markup(lxml.html.tostring(footer_node, encoding='unicode')),
'base_url': base_url,
'report_xml_id': self.xml_id,
'debug': self.env.context.get("debug"),
})
return bodies, res_ids, header, footer, specific_paperformat_args
def _run_wkhtmltoimage(self, bodies, width, height, image_format="jpg"):
"""
:bodies str: valid html documents as strings
:param width int: width in pixels
:param height int: height in pixels
:param image_format union['jpg', 'png']: format of the image
:return list[bytes|None]:
"""
if (tools.config['test_enable'] or tools.config['test_file']) and not self.env.context.get('force_image_rendering'):
return [None] * len(bodies)
if not wkhtmltoimage_version or wkhtmltoimage_version < parse_version('0.12.0'):
raise UserError(_('wkhtmltoimage 0.12.0^ is required in order to render images from html'))
command_args = [
'--disable-local-file-access', '--disable-javascript',
'--quiet',
'--width', str(width), '--height', str(height),
'--format', image_format,
]
with ExitStack() as stack:
files = []
for body in bodies:
input_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix='.html', prefix='report_image_html_input.tmp.'))
output_file = stack.enter_context(tempfile.NamedTemporaryFile(suffix=f'.{image_format}', prefix='report_image_output.tmp.'))
input_file.write(body.encode())
files.append((input_file, output_file))
output_images = []
for input_file, output_file in files:
# smaller bodies may be held in a python buffer until close, force flush
input_file.flush()
wkhtmltoimage = [_get_wkhtmltoimage_bin()] + command_args + [input_file.name, output_file.name]
# start and block, no need for parallelism for now
completed_process = subprocess.run(wkhtmltoimage, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, check=False)
if completed_process.returncode:
message = _(
'Wkhtmltoimage failed (error code: %(error_code)s). Message: %(error_message_end)s',
error_code=completed_process.returncode,
error_message_end=completed_process.stderr[-1000:],
)
_logger.warning(message)
output_images.append(None)
else:
output_images.append(output_file.read())
return output_images
@api.model
def _run_wkhtmltopdf(
self,
@ -462,12 +532,24 @@ class IrActionsReport(models.Model):
files_command_args = []
temporary_files = []
temp_session = None
# Passing the cookie to wkhtmltopdf in order to resolve internal links.
if request and request.db:
# Create a temporary session which will not create device logs
temp_session = root.session_store.new()
temp_session.update({
**request.session,
'debug': '',
'_trace_disable': True,
})
if temp_session.uid:
temp_session.session_token = security.compute_session_token(temp_session, self.env)
root.session_store.save(temp_session)
base_url = self._get_report_url()
domain = urlparse(base_url).hostname
cookie = f'session_id={request.session.sid}; HttpOnly; domain={domain}; path=/;'
cookie = f'session_id={temp_session.sid}; HttpOnly; domain={domain}; path=/;'
cookie_jar_file_fd, cookie_jar_file_path = tempfile.mkstemp(suffix='.txt', prefix='report.cookie_jar.tmp.')
temporary_files.append(cookie_jar_file_path)
with closing(os.fdopen(cookie_jar_file_fd, 'wb')) as cookie_jar_file:
@ -514,22 +596,21 @@ class IrActionsReport(models.Model):
try:
wkhtmltopdf = [_get_wkhtmltopdf_bin()] + command_args + files_command_args + paths + [pdf_report_path]
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
err = ustr(err)
process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8")
_out, err = process.communicate()
if process.returncode not in [0, 1]:
if process.returncode == -11:
message = _(
'Wkhtmltopdf failed (error code: %s). Memory limit too low or maximum file number of subprocess reached. Message : %s',
process.returncode,
err[-1000:],
'Wkhtmltopdf failed (error code: %(error_code)s). Memory limit too low or maximum file number of subprocess reached. Message : %(message)s',
error_code=process.returncode,
message=err[-1000:],
)
else:
message = _(
'Wkhtmltopdf failed (error code: %s). Message: %s',
process.returncode,
err[-1000:],
'Wkhtmltopdf failed (error code: %(error_code)s). Message: %(message)s',
error_code=process.returncode,
message=err[-1000:],
)
_logger.warning(message)
raise UserError(message)
@ -538,6 +619,9 @@ class IrActionsReport(models.Model):
_logger.warning('wkhtmltopdf: %s' % err)
except:
raise
finally:
if temp_session:
root.session_store.delete(temp_session)
with open(pdf_report_path, 'rb') as pdf_document:
pdf_content = pdf_document.read()
@ -688,20 +772,18 @@ class IrActionsReport(models.Model):
)
return view_obj._render_template(template, values).encode()
def _handle_merge_pdfs_error(self, error=None, error_stream=None):
raise UserError(_("Odoo is unable to merge the generated PDFs."))
@api.model
def _merge_pdfs(self, streams):
def _merge_pdfs(self, streams, handle_error=_handle_merge_pdfs_error):
writer = PdfFileWriter()
for stream in streams:
try:
reader = PdfFileReader(stream)
writer.appendPagesFromReader(reader)
except (PdfReadError, TypeError, NotImplementedError, ValueError):
# TODO : make custom_error_handler a parameter in master
custom_error_handler = self.env.context.get('custom_error_handler')
if custom_error_handler:
custom_error_handler(stream)
continue
raise UserError(_("Odoo is unable to merge the generated PDFs."))
except (PdfReadError, TypeError, NotImplementedError, ValueError) as e:
handle_error(error=e, error_stream=stream)
result_stream = io.BytesIO()
streams.append(result_stream)
writer.write(result_stream)
@ -774,13 +856,13 @@ class IrActionsReport(models.Model):
html = self.with_context(**additional_context)._render_qweb_html(report_ref, all_res_ids_wo_stream, data=data)[0]
bodies, html_ids, header, footer, specific_paperformat_args = self.with_context(**additional_context)._prepare_html(html, report_model=report_sudo.model)
bodies, html_ids, header, footer, specific_paperformat_args = report_sudo.with_context(**additional_context)._prepare_html(html, report_model=report_sudo.model)
if not has_duplicated_ids and report_sudo.attachment and set(res_ids_wo_stream) != set(html_ids):
raise UserError(_(
"The report's template %r is wrong, please contact your administrator. \n\n"
"Can not separate file to save as attachment because the report's template does not contains the"
" attributes 'data-oe-model' and 'data-oe-id' on the div with 'article' classname.",
"Report template “%s” has an issue, please contact your administrator. \n\n"
"Cannot separate file to save as attachment because the report's template does not contain the"
" attributes 'data-oe-model' and 'data-oe-id' as part of the div with 'article' classname.",
report_sudo.name,
))
@ -865,9 +947,11 @@ class IrActionsReport(models.Model):
stream = io.BytesIO()
attachment_writer.write(stream)
collected_streams[res_ids_wo_stream[i]]['stream'] = stream
return collected_streams
else:
for res_id in res_ids_wo_stream:
individual_collected_stream = self._render_qweb_pdf_prepare_streams(report_ref=report_ref, data=data, res_ids=[res_id])
collected_streams[res_id]['stream'] = individual_collected_stream[res_id]['stream']
collected_streams[False] = {'stream': pdf_content_stream, 'attachment': None}
return collected_streams
@ -910,7 +994,7 @@ class IrActionsReport(models.Model):
})
return attachment_vals_list
def _render_qweb_pdf(self, report_ref, res_ids=None, data=None):
def _pre_render_qweb_pdf(self, report_ref, res_ids=None, data=None):
if not data:
data = {}
if isinstance(res_ids, int):
@ -922,7 +1006,19 @@ class IrActionsReport(models.Model):
return self._render_qweb_html(report_ref, res_ids, data=data)
self = self.with_context(webp_as_jpg=True)
collected_streams = self._render_qweb_pdf_prepare_streams(report_ref, data, res_ids=res_ids)
return self._render_qweb_pdf_prepare_streams(report_ref, data, res_ids=res_ids), 'pdf'
def _render_qweb_pdf(self, report_ref, res_ids=None, data=None):
if not data:
data = {}
if isinstance(res_ids, int):
res_ids = [res_ids]
data.setdefault('report_type', 'pdf')
collected_streams, report_type = self._pre_render_qweb_pdf(report_ref, res_ids=res_ids, data=data)
if report_type != 'pdf':
return collected_streams, report_type
has_duplicated_ids = res_ids and len(res_ids) != len(set(res_ids))
# access the report details with sudo() but keep evaluation context as current user
@ -940,6 +1036,9 @@ class IrActionsReport(models.Model):
else:
_logger.info("The PDF documents %r are now saved in the database", attachment_names)
def custom_handle_merge_pdfs_error(error, error_stream):
error_record_ids.append(stream_to_ids[error_stream])
stream_to_ids = {v['stream']: k for k, v in collected_streams.items() if v['stream']}
# Merge all streams together for a single record.
streams_to_merge = list(stream_to_ids.keys())
@ -948,9 +1047,7 @@ class IrActionsReport(models.Model):
if len(streams_to_merge) == 1:
pdf_content = streams_to_merge[0].getvalue()
else:
with self.with_context(
custom_error_handler=lambda error_stream: error_record_ids.append(stream_to_ids[error_stream])
)._merge_pdfs(streams_to_merge) as pdf_merged_stream:
with self._merge_pdfs(streams_to_merge, custom_handle_merge_pdfs_error) as pdf_merged_stream:
pdf_content = pdf_merged_stream.getvalue()
if error_record_ids:
@ -959,7 +1056,7 @@ class IrActionsReport(models.Model):
'name': _('Problematic record(s)'),
'res_model': report_sudo.model,
'domain': [('id', 'in', error_record_ids)],
'views': [(False, 'tree'), (False, 'form')],
'views': [(False, 'list'), (False, 'form')],
}
num_errors = len(error_record_ids)
if num_errors == 1:
@ -1065,11 +1162,52 @@ class IrActionsReport(models.Model):
return report_action
def _action_configure_external_report_layout(self, report_action):
action = self.env["ir.actions.actions"]._for_xml_id("web.action_base_document_layout_configurator")
def _action_configure_external_report_layout(self, report_action, xml_id="web.action_base_document_layout_configurator"):
action = self.env["ir.actions.actions"]._for_xml_id(xml_id)
py_ctx = json.loads(action.get('context', {}))
report_action['close_on_report_download'] = True
py_ctx['report_action'] = report_action
py_ctx['dialog_size'] = 'large'
action['context'] = py_ctx
return action
def get_valid_action_reports(self, model, record_ids):
""" Return the list of ids of actions for which the domain is
satisfied by at least one record in record_ids.
:param model: the model of the records to validate
:param record_ids: list of ids of records to validate
"""
records = self.env[model].browse(record_ids)
actions_with_domain = self.filtered('domain')
valid_action_report_ids = (self - actions_with_domain).ids # actions without domain are always valid
for action in actions_with_domain:
if records.filtered_domain(literal_eval(action.domain)):
valid_action_report_ids.append(action.id)
return valid_action_report_ids
@api.model
def _prepare_local_attachments(self, attachments):
attachments_with_data = self.env['ir.attachment']
for attachment in attachments:
if not attachment._is_remote_source():
attachments_with_data |= attachment
elif (stream := attachment._to_http_stream()) and stream.url:
# call `_to_http_stream()` in case the attachment is an url or cloud storage attachment
try:
response = requests.get(stream.url, timeout=10)
response.raise_for_status()
attachment_data = response.content
if not attachment_data:
_logger.warning("Attachment %s at with URL %s retrieved successfully, but no content was found.", attachment.id, attachment.url)
continue
attachments_with_data |= self.env['ir.attachment'].new({
'db_datas': attachment_data,
'name': attachment.name,
'mimetype': attachment.mimetype,
'res_model': attachment.res_model,
'res_id': attachment.res_id
})
except requests.exceptions.RequestException as e:
_logger.error("Request for attachment %s with URL %s failed: %s", attachment.id, attachment.url, e)
else:
_logger.error("Unexpected edge case: Is not being considered as a local or remote attachment, attachment ID:%s will be skipped.", attachment.id)
return attachments_with_data

View file

@ -4,22 +4,21 @@ import base64
import binascii
import contextlib
import hashlib
import io
import itertools
import logging
import mimetypes
import os
import psycopg2
import re
import uuid
import werkzeug
from collections import defaultdict
from PIL import Image
from odoo import api, fields, models, SUPERUSER_ID, tools, _
from odoo.exceptions import AccessError, ValidationError, UserError
from odoo.tools import config, human_size, ImageProcess, str2bool, consteq
from odoo.tools.mimetypes import guess_mimetype
from odoo.http import Stream, root, request
from odoo.tools import config, human_size, image, str2bool, consteq
from odoo.tools.mimetypes import guess_mimetype, fix_filename_extension, _olecf_mimetypes
from odoo.osv import expression
_logger = logging.getLogger(__name__)
@ -102,12 +101,6 @@ class IrAttachment(models.Model):
@api.model
def _get_path(self, bin_data, sha):
# retro compatibility
fname = sha[:3] + '/' + sha
full_path = self._full_path(fname)
if os.path.isfile(full_path):
return fname, full_path # keep existing path
# scatter files across 256 dirs
# we use '/' in the db (even on windows)
fname = sha[:2] + '/' + sha
@ -115,6 +108,7 @@ class IrAttachment(models.Model):
dirname = os.path.dirname(full_path)
if not os.path.isdir(dirname):
os.makedirs(dirname, exist_ok=True)
# prevent sha-1 collision
if os.path.isfile(full_path) and not self._same_content(bin_data, full_path):
raise UserError(_("The attachment collides with an existing file."))
@ -340,11 +334,10 @@ class IrAttachment(models.Model):
max_resolution = ICP('base.image_autoresize_max_px', '1920x1920')
if str2bool(max_resolution, True):
try:
img = False
if is_raw:
img = ImageProcess(values['raw'], verify_resolution=False)
img = image.ImageProcess(values['raw'], verify_resolution=False)
else: # datas
img = ImageProcess(base64.b64decode(values['datas']), verify_resolution=False)
img = image.ImageProcess(base64.b64decode(values['datas']), verify_resolution=False)
if not img.image:
_logger.info('Post processing ignored : Empty source, SVG, or WEBP')
@ -363,7 +356,8 @@ class IrAttachment(models.Model):
except UserError as e:
# Catch error during test where we provide fake image
# raise UserError(_("This file could not be decoded as an image file. Please try with a different file."))
_logger.info('Post processing ignored : %s', e)
msg = str(e) # the exception can be lazy-translated, resolve it here
_logger.info('Post processing ignored : %s', msg)
return values
def _check_contents(self, values):
@ -372,8 +366,9 @@ class IrAttachment(models.Model):
'xml' in mimetype and # other xml (svg, text/xml, etc)
not mimetype.startswith('application/vnd.openxmlformats')) # exception for Office formats
force_text = xml_like and (
self.env.context.get('attachments_mime_plainxml') or
not self.env['ir.ui.view'].sudo(False).check_access_rights('write', False))
self.env.context.get('attachments_mime_plainxml')
or not self.env['ir.ui.view'].sudo(False).has_access('write')
)
if force_text:
values['mimetype'] = 'text/plain'
if not self.env.context.get('image_no_postprocess'):
@ -407,10 +402,9 @@ class IrAttachment(models.Model):
name = fields.Char('Name', required=True)
description = fields.Text('Description')
res_name = fields.Char('Resource Name', compute='_compute_res_name')
res_model = fields.Char('Resource Model', readonly=True)
res_field = fields.Char('Resource Field', readonly=True)
res_id = fields.Many2oneReference('Resource ID', model_field='res_model',
readonly=True)
res_model = fields.Char('Resource Model')
res_field = fields.Char('Resource Field')
res_id = fields.Many2oneReference('Resource ID', model_field='res_model')
company_id = fields.Many2one('res.company', string='Company', change_default=True,
default=lambda self: self.env.company)
type = fields.Selection([('url', 'URL'), ('binary', 'File')],
@ -426,7 +420,7 @@ class IrAttachment(models.Model):
raw = fields.Binary(string="File Content (raw)", compute='_compute_raw', inverse='_inverse_raw')
datas = fields.Binary(string='File Content (base64)', compute='_compute_datas', inverse='_inverse_datas')
db_datas = fields.Binary('Database Data', attachment=False)
store_fname = fields.Char('Stored Filename', index=True, unaccent=False)
store_fname = fields.Char('Stored Filename', index=True)
file_size = fields.Integer('File Size', readonly=True)
checksum = fields.Char("Checksum/SHA1", size=40, readonly=True)
mimetype = fields.Char('Mime Type', readonly=True)
@ -474,9 +468,8 @@ class IrAttachment(models.Model):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if res_field:
field = self.env[res_model]._fields[res_field]
if field.groups:
if not self.env.user.user_has_groups(field.groups):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if not field.is_accessible(self.env):
raise AccessError(_("Sorry, you are not allowed to access this document."))
if not (res_model and res_id):
continue
model_ids[res_model].add(res_id)
@ -499,8 +492,7 @@ class IrAttachment(models.Model):
# For related models, check if we can write to the model, as unlinking
# and creating attachments can be seen as an update to the model
access_mode = 'write' if mode in ('create', 'unlink') else mode
records.check_access_rights(access_mode)
records.check_access_rule(access_mode)
records.check_access(access_mode)
@api.model
def _filter_attachment_access(self, attachment_ids):
@ -511,7 +503,7 @@ class IrAttachment(models.Model):
"""
ret_attachments = self.env['ir.attachment']
attachments = self.browse(attachment_ids)
if not attachments.check_access_rights('read', raise_exception=False):
if not attachments.has_access('read'):
return ret_attachments
for attachment in attachments.sudo():
@ -525,7 +517,7 @@ class IrAttachment(models.Model):
return ret_attachments
@api.model
def _search(self, domain, offset=0, limit=None, order=None, access_rights_uid=None):
def _search(self, domain, offset=0, limit=None, order=None):
# add res_field=False in domain if not present; the arg[0] trick below
# works for domain items and '&'/'|'/'!' operators too
disable_binary_fields_attachments = False
@ -535,24 +527,17 @@ class IrAttachment(models.Model):
if self.env.is_superuser():
# rules do not apply for the superuser
return super()._search(domain, offset, limit, order, access_rights_uid)
return super()._search(domain, offset, limit, order)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document. For the sake of performance, fetch the fields to
# determine those permissions within the same SQL query.
self.flush_model(['res_model', 'res_id', 'res_field', 'public', 'create_uid'])
query = super()._search(domain, offset, limit, order, access_rights_uid)
query_str, params = query.select(
f'"{self._table}"."id"',
f'"{self._table}"."res_model"',
f'"{self._table}"."res_id"',
f'"{self._table}"."res_field"',
f'"{self._table}"."public"',
f'"{self._table}"."create_uid"',
)
self.env.cr.execute(query_str, params)
rows = self.env.cr.fetchall()
fnames_to_read = ['id', 'res_model', 'res_id', 'res_field', 'public', 'create_uid']
query = super()._search(domain, offset, limit, order)
rows = self.env.execute_query(query.select(
*[self._field_to_sql(self._table, fname) for fname in fnames_to_read],
))
# determine permissions based on linked records
all_ids = []
@ -566,7 +551,7 @@ class IrAttachment(models.Model):
if res_field and not self.env.is_system():
field = self.env[res_model]._fields[res_field]
if field.groups and not self.env.user.user_has_groups(field.groups):
if field.groups and not self.env.user.has_groups(field.groups):
continue
if not res_id and (self.env.is_system() or create_uid == self.env.uid):
@ -580,7 +565,7 @@ class IrAttachment(models.Model):
if res_model not in self.env:
allowed_ids.update(id_ for ids in targets.values() for id_ in ids)
continue
if not self.env[res_model].check_access_rights('read', False):
if not self.env[res_model].has_access('read'):
continue
# filter ids according to what access rules permit
ResModel = self.env[res_model].with_context(active_test=False)
@ -599,7 +584,7 @@ class IrAttachment(models.Model):
if len(all_ids) == limit and len(result) < self._context.get('need', limit):
need = self._context.get('need', limit) - len(result)
more_ids = self.with_context(need=need)._search(
domain, offset + len(all_ids), limit, order, access_rights_uid,
domain, offset + len(all_ids), limit, order,
)
result.extend(list(more_ids)[:limit - len(result)])
@ -614,11 +599,14 @@ class IrAttachment(models.Model):
vals = self._check_contents(vals)
return super(IrAttachment, self).write(vals)
def copy(self, default=None):
if not (default or {}).keys() & {'datas', 'db_datas', 'raw'}:
# ensure the content is kept and recomputes checksum/store_fname
default = dict(default or {}, raw=self.raw)
return super(IrAttachment, self).copy(default)
def copy_data(self, default=None):
default = dict(default or {})
vals_list = super().copy_data(default=default)
for attachment, vals in zip(self, vals_list):
if not default.keys() & {'datas', 'db_datas', 'raw'}:
# ensure the content is kept and recomputes checksum/store_fname
vals['raw'] = attachment.raw
return vals_list
def unlink(self):
if not self:
@ -729,7 +717,7 @@ class IrAttachment(models.Model):
if record_sudo.with_context(prefetch_fields=False).public:
return record_sudo
if self.env.user.has_group('base.group_portal'):
if self.env.user._is_portal():
# Check the read access on the record linked to the attachment
# eg: Allow to download an attachment on a task from /my/tasks/task_id
self.check('read')
@ -756,3 +744,95 @@ class IrAttachment(models.Model):
('create_uid', '=', SUPERUSER_ID),
]).unlink()
self.env.registry.clear_cache('assets')
def _from_request_file(self, file, *, mimetype, **vals):
"""
Create an attachment out of a request file
:param file: the request file
:param str mimetype:
* "TRUST" to use the mimetype and file extension from the
request file with no verification.
* "GUESS" to determine the mimetype and file extension on
the file's content. The determined extension is added at
the end of the filename unless the filename already had a
valid extension.
* a mimetype in format "{type}/{subtype}" to force the
mimetype to the given value, it adds the corresponding
file extension at the end of the filename unless the
filename already had a valid extension.
"""
if mimetype == 'TRUST':
mimetype = file.content_type
filename = file.filename
elif mimetype == 'GUESS':
head = file.read(1024)
file.seek(-len(head), 1) # rewind
mimetype = guess_mimetype(head)
filename = fix_filename_extension(file.filename, mimetype)
if mimetype in ('application/zip', *_olecf_mimetypes):
mimetype = mimetypes.guess_type(filename)[0]
elif all(mimetype.partition('/')):
filename = fix_filename_extension(file.filename, mimetype)
else:
raise ValueError(f'{mimetype=}')
return self.create({
'name': filename,
'type': 'binary',
'raw': file.read(), # load the entire file in memory :(
'mimetype': mimetype,
**vals,
})
def _to_http_stream(self):
""" Create a :class:`~Stream`: from an ir.attachment record. """
self.ensure_one()
stream = Stream(
mimetype=self.mimetype,
download_name=self.name,
etag=self.checksum,
public=self.public,
)
if self.store_fname:
stream.type = 'path'
stream.path = werkzeug.security.safe_join(
os.path.abspath(config.filestore(request.db)),
self.store_fname
)
stat = os.stat(stream.path)
stream.last_modified = stat.st_mtime
stream.size = stat.st_size
elif self.db_datas:
stream.type = 'data'
stream.data = self.raw
stream.last_modified = self.write_date
stream.size = len(stream.data)
elif self.url:
# When the URL targets a file located in an addon, assume it
# is a path to the resource. It saves an indirection and
# stream the file right away.
static_path = root.get_static_file(
self.url,
host=request.httprequest.environ.get('HTTP_HOST', '')
)
if static_path:
stream = Stream.from_path(static_path, public=True)
else:
stream.type = 'url'
stream.url = self.url
else:
stream.type = 'data'
stream.data = b''
stream.size = 0
return stream
def _is_remote_source(self):
self.ensure_one()
return self.url and not self.file_size and self.url.startswith(('http://', 'https://', 'ftp://'))

View file

@ -9,6 +9,7 @@ from odoo.http import Stream, request
from odoo.tools import file_open, replace_exceptions
from odoo.tools.image import image_process, image_guess_size_from_field_name
from odoo.tools.mimetypes import guess_mimetype, get_extension
from odoo.tools.misc import verify_limited_field_access_token
DEFAULT_PLACEHOLDER_PATH = 'web/static/img/placeholder.png'
@ -21,7 +22,7 @@ class IrBinary(models.AbstractModel):
def _find_record(
self, xmlid=None, res_model='ir.attachment', res_id=None,
access_token=None,
access_token=None, field=None
):
"""
Find and return a record either using an xmlid either a model+id
@ -45,16 +46,16 @@ class IrBinary(models.AbstractModel):
record = self.env[res_model].browse(res_id).exists()
if not record:
raise MissingError(f"No record found for xmlid={xmlid}, res_model={res_model}, id={res_id}")
record = self._find_record_check_access(record, access_token)
if access_token and verify_limited_field_access_token(record, field, access_token):
return record.sudo()
record = self._find_record_check_access(record, access_token, field)
return record
def _find_record_check_access(self, record, access_token):
def _find_record_check_access(self, record, access_token, field):
if record._name == 'ir.attachment':
return record.validate_access(access_token)
record.check_access_rights('read')
record.check_access_rule('read')
record.check_access('read')
return record
def _record_to_stream(self, record, field_name):
@ -70,7 +71,7 @@ class IrBinary(models.AbstractModel):
:rtype: odoo.http.Stream
"""
if record._name == 'ir.attachment' and field_name in ('raw', 'datas', 'db_datas'):
return Stream.from_attachment(record)
return record._to_http_stream()
record.check_field_access_rights('read', [field_name])
@ -82,7 +83,7 @@ class IrBinary(models.AbstractModel):
limit=1)
if not field_attachment:
raise MissingError("The related attachment does not exist.")
return Stream.from_attachment(field_attachment)
return field_attachment._to_http_stream()
return Stream.from_binary_field(record, field_name)

View file

@ -65,7 +65,7 @@ class IrConfigParameter(models.Model):
:return: The value of the parameter, or ``default`` if it does not exist.
:rtype: string
"""
self.check_access_rights('read')
self.browse().check_access('read')
return self._get_param(key) or default
@api.model

View file

@ -4,19 +4,26 @@ import threading
import time
import os
import psycopg2
import psycopg2.errors
import pytz
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from psycopg2 import sql
import odoo
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.modules.registry import Registry
from odoo.tools import SQL
_logger = logging.getLogger(__name__)
BASE_VERSION = odoo.modules.get_manifest('base')['version']
MAX_FAIL_TIME = timedelta(hours=5) # chosen with a fair roll of the dice
MAX_BATCH_PER_CRON_JOB = 10
CONSECUTIVE_TIMEOUT_FOR_FAILURE = 3
MIN_FAILURE_COUNT_BEFORE_DEACTIVATION = 5
MIN_DELTA_BEFORE_DEACTIVATION = timedelta(days=7)
# crons must satisfy both minimum thresholds before deactivation
# custom function to call instead of default PostgreSQL's `pg_notify`
ODOO_NOTIFY_FUNCTION = os.getenv('ODOO_NOTIFY_FUNCTION', 'pg_notify')
@ -38,6 +45,12 @@ _intervalTypes = {
}
class CompletionStatus: # inherit from enum.StrEnum in 3.11
FULLY_DONE = 'fully done'
PARTIALLY_DONE = 'partially done'
FAILED = 'failed'
class ir_cron(models.Model):
""" Model describing cron jobs (also called actions or tasks).
"""
@ -58,17 +71,25 @@ class ir_cron(models.Model):
cron_name = fields.Char('Name', compute='_compute_cron_name', store=True)
user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True)
active = fields.Boolean(default=True)
interval_number = fields.Integer(default=1, group_operator=None, help="Repeat every x.")
interval_number = fields.Integer(default=1, aggregator=None, help="Repeat every x.", required=True)
interval_type = fields.Selection([('minutes', 'Minutes'),
('hours', 'Hours'),
('days', 'Days'),
('weeks', 'Weeks'),
('months', 'Months')], string='Interval Unit', default='months')
numbercall = fields.Integer(string='Number of Calls', default=1, help='How many times the method is called,\na negative number indicates no limit.')
doall = fields.Boolean(string='Repeat Missed', help="Specify if missed occurrences should be executed when the server restarts.")
('months', 'Months')], string='Interval Unit', default='months', required=True)
nextcall = fields.Datetime(string='Next Execution Date', required=True, default=fields.Datetime.now, help="Next planned execution date for this job.")
lastcall = fields.Datetime(string='Last Execution Date', help="Previous time the cron ran successfully, provided to the job through the context on the `lastcall` key")
priority = fields.Integer(default=5, group_operator=None, help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.')
priority = fields.Integer(default=5, aggregator=None, help='The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.')
failure_count = fields.Integer(default=0, help="The number of consecutive failures of this job. It is automatically reset on success.")
first_failure_date = fields.Datetime(string='First Failure Date', help="The first time the cron failed. It is automatically reset on success.")
_sql_constraints = [
(
'check_strictly_positive_interval',
'CHECK(interval_number > 0)',
'The interval number must be a strictly positive number.'
),
]
@api.depends('ir_actions_server_id.name')
def _compute_cron_name(self):
@ -90,24 +111,16 @@ class ir_cron(models.Model):
self = self.with_context(default_state='code')
return super(ir_cron, self).default_get(fields_list)
@api.onchange('active', 'interval_number', 'interval_type')
def _onchange_interval_number(self):
if self.active and (self.interval_number <= 0 or not self.interval_type):
self.active = False
return {'warning': {
'title': _("Scheduled action disabled"),
'message': _("This scheduled action has been disabled because its interval number is not a strictly positive value.")}
}
def method_direct_trigger(self):
self.check_access_rights('write')
for cron in self:
cron._try_lock()
_logger.info('Manually starting job `%s`.', cron.name)
cron.with_user(cron.user_id).with_context({'lastcall': cron.lastcall}).ir_actions_server_id.run()
self.env.flush_all()
_logger.info('Job `%s` done.', cron.name)
cron.lastcall = fields.Datetime.now()
self.ensure_one()
self.browse().check_access('write')
self._try_lock()
_logger.info('Job %r (%s) started manually', self.name, self.id)
self, _ = self.with_user(self.user_id).with_context({'lastcall': self.lastcall})._add_progress() # noqa: PLW0642
self.ir_actions_server_id.run()
self.lastcall = fields.Datetime.now()
self.env.flush_all()
_logger.info('Job %r (%s) done', self.name, self.id)
return True
@classmethod
@ -125,7 +138,7 @@ class ir_cron(models.Model):
for job_id in (job['id'] for job in jobs):
try:
job = cls._acquire_one_job(cron_cr, (job_id,))
job = cls._acquire_one_job(cron_cr, job_id)
except psycopg2.extensions.TransactionRollbackError:
cron_cr.rollback()
_logger.debug("job %s has been processed by another worker, skip", job_id)
@ -135,22 +148,19 @@ class ir_cron(models.Model):
continue
_logger.debug("job %s acquired", job_id)
# take into account overridings of _process_job() on that database
registry = odoo.registry(db_name).check_signaling()
registry = Registry(db_name).check_signaling()
registry[cls._name]._process_job(db, cron_cr, job)
cron_cr.commit()
_logger.debug("job %s updated and released", job_id)
except BadVersion:
_logger.warning('Skipping database %s as its base version is not %s.', db_name, BASE_VERSION)
except BadModuleState:
_logger.warning('Skipping database %s because of modules to install/upgrade/remove.', db_name)
except psycopg2.errors.UndefinedTable:
# The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
except psycopg2.ProgrammingError as e:
if e.pgcode == '42P01':
# Class 42 — Syntax Error or Access Rule Violation; 42P01: undefined_table
# The table ir_cron does not exist; this is probably not an OpenERP database.
_logger.warning('Tried to poll an undefined table on database %s.', db_name)
else:
raise
raise
except Exception:
_logger.warning('Exception in cron:', exc_info=True)
finally:
@ -206,7 +216,6 @@ class ir_cron(models.Model):
SELECT *
FROM ir_cron
WHERE active = true
AND numbercall != 0
AND (nextcall <= (now() at time zone 'UTC')
OR id in (
SELECT cron_id
@ -214,48 +223,45 @@ class ir_cron(models.Model):
WHERE call_at <= (now() at time zone 'UTC')
)
)
ORDER BY priority
ORDER BY failure_count, priority, id
""")
return cr.dictfetchall()
@classmethod
def _acquire_one_job(cls, cr, job_ids):
def _acquire_one_job(cls, cr, job_id):
"""
Acquire for update one job that is ready from the job_ids tuple.
Acquire for update the job with id ``job_id``.
The jobs that have already been processed in this worker should
be excluded from the tuple.
The job should not have been processed yet by the current
worker. Another worker may process the job again, may that job
become ready again quickly enough (e.g. self-triggering, high
frequency, or partially done jobs).
This function raises a ``psycopg2.errors.SerializationFailure``
when the ``nextcall`` of one of the job_ids is modified in
another transaction. You should rollback the transaction and try
again later.
Note: It is possible that this function raises a
``psycopg2.errors.SerializationFailure`` in case the job
has been processed in another worker. In such case it is
advised to roll back the transaction and to go on with the
other jobs.
"""
# We have to make sure ALL jobs are executed ONLY ONCE no matter
# how many cron workers may process them. The exlusion mechanism
# is twofold: (i) prevent parallel processing of the same job,
# and (ii) prevent re-processing jobs that have been processed
# already.
# The query must make sure that (i) two cron workers cannot
# process a given job at a same time. The query must also make
# sure that (ii) a job already processed in another worker
# should not be processed again by this one (or at least not
# before the job becomes ready again).
#
# (i) is implemented via `LIMIT 1 FOR UPDATE SKIP LOCKED`, each
# (i) is implemented via `FOR NO KEY UPDATE SKIP LOCKED`, each
# worker just acquire one available job at a time and lock it so
# the other workers don't select it too.
# (ii) is implemented via the `WHERE` statement, when a job has
# been processed, its nextcall is updated to a date in the
# future and the optional triggers are removed.
#
# Note about (ii): it is possible that a job becomes available
# again quickly (e.g. high frequency or self-triggering cron).
# This function doesn't prevent from acquiring that job multiple
# times at different moments. This can block a worker on
# executing a same job in loop. To prevent this problem, the
# callee is responsible of providing a `job_ids` tuple without
# the jobs it has executed already.
# been processed and is fully done, its nextcall is updated to a
# date in the future and the optional triggers are removed. In
# case a job has only been partially done, the job is left ready
# to be acquired again by another cron worker.
#
# An `UPDATE` lock type is the strongest row lock, it conflicts
# with ALL other lock types. Among them the `KEY SHARE` row lock
# which is implicitely aquired by foreign keys to prevent the
# which is implicitly acquired by foreign keys to prevent the
# referenced record from being removed while in use. Because we
# never delete acquired cron jobs, foreign keys are safe to
# concurrently reference cron jobs. Hence, the `NO KEY UPDATE`
@ -265,10 +271,17 @@ class ir_cron(models.Model):
# Learn more: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-ROWS
query = """
WITH last_cron_progress AS (
SELECT id as progress_id, cron_id, timed_out_counter, done, remaining
FROM ir_cron_progress
WHERE cron_id = %s
ORDER BY id DESC
LIMIT 1
)
SELECT *
FROM ir_cron
WHERE active = true
AND numbercall != 0
LEFT JOIN last_cron_progress lcp ON lcp.cron_id = ir_cron.id
WHERE ir_cron.active = true
AND (nextcall <= (now() at time zone 'UTC')
OR EXISTS (
SELECT cron_id
@ -277,12 +290,12 @@ class ir_cron(models.Model):
AND cron_id = ir_cron.id
)
)
AND id in %s
AND id = %s
ORDER BY priority
LIMIT 1 FOR NO KEY UPDATE SKIP LOCKED
FOR NO KEY UPDATE SKIP LOCKED
"""
try:
cr.execute(query, [job_ids], log_exceptions=False)
cr.execute(query, [job_id, job_id], log_exceptions=False)
except psycopg2.extensions.TransactionRollbackError:
# A serialization error can occur when another cron worker
# commits the new `nextcall` value of a cron it just ran and
@ -292,123 +305,281 @@ class ir_cron(models.Model):
except Exception as exc:
_logger.error("bad query: %s\nERROR: %s", query, exc)
raise
return cr.dictfetchone()
job = cr.dictfetchone()
if not job: # Job is already taken
return None
for field_name in ('done', 'remaining', 'timed_out_counter'):
job[field_name] = job[field_name] or 0
return job
def _notify_admin(self, message):
"""
Notify ``message`` to some administrator.
The base implementation of this method does nothing. It is
supposed to be overridden with some actual communication
mechanism.
"""
_logger.warning(message)
@classmethod
def _process_job(cls, db, cron_cr, job):
""" Execute a cron job and re-schedule a call for later. """
"""
Execute the cron's server action in a dedicated transaction.
# Compute how many calls were missed and at what time we should
# recall the cron next. In the example bellow, we fake a cron
# with an interval of 30 (starting at 0) that was last executed
# at 15 and that is executed again at 135.
#
# 0 60 120 180
# --|-----|-----|-----|-----|-----|-----|----> time
# 1 2* * * * 3 4
#
# 1: lastcall, the last time the cron was executed
# 2: past_nextcall, the cron nextcall as seen from lastcall
# *: missed_call, a total of 4 calls are missing
# 3: now
# 4: future_nextcall, the cron nextcall as seen from now
In case the previous process actually timed out, the cron's
server action is not executed and the cron is considered
``'failed'``.
if job['interval_number'] <= 0:
_logger.error("Job %s %r has been disabled because its interval number is null or negative.", job['id'], job['cron_name'])
cron_cr.execute("UPDATE ir_cron SET active=false WHERE id=%s", [job['id']])
return
The server action can use the progress API via the method
:meth:`_notify_progress` to report processing progress, i.e. how
many records are done and how many records are remaining to
process.
Those progress notifications are used to determine the job's
``CompletionStatus`` and to determine the next time the cron
will be executed:
- ``'fully done'``: the cron is rescheduled later, it'll be
executed again after its regular time interval or upon a new
trigger.
- ``'partially done'``: the cron is rescheduled ASAP, it'll be
executed again by this or another cron worker once the other
ready cron jobs have been executed.
- ``'failed'``: the cron is deactivated if it failed too many
times over a given time span; otherwise it is rescheduled
later.
"""
env = api.Environment(cron_cr, job['user_id'], {})
ir_cron = env[cls._name]
failed_by_timeout = (
job['timed_out_counter'] >= CONSECUTIVE_TIMEOUT_FOR_FAILURE
and not job['done']
)
if not failed_by_timeout:
status = cls._run_job(job)
else:
status = CompletionStatus.FAILED
cron_cr.execute("""
UPDATE ir_cron_progress
SET timed_out_counter = 0
WHERE id = %s
""", (job['progress_id'],))
_logger.error("Job %r (%s) timed out", job['cron_name'], job['id'])
ir_cron._update_failure_count(job, status)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
ir_cron._reschedule_later(job)
elif status == CompletionStatus.PARTIALLY_DONE:
ir_cron._reschedule_asap(job)
if os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
cron_cr.postcommit.add(ir_cron._notifydb) # See: `_notifydb`
else:
raise RuntimeError("unreachable")
cron_cr.commit()
@classmethod
def _run_job(cls, job):
"""
Execute the job's server action multiple times until it
completes. The completion status is returned.
It is considered completed when either:
- the server action doesn't use the progress API, or returned
and notified that all records has been processed: ``'fully done'``;
- the server action returned and notified that there are
remaining records to process, but this cron worker ran this
server action 10 times already: ``'partially done'``;
- the server action was able to commit and notify some work done,
but later crashed due to an exception: ``'partially done'``;
- the server action failed due to an exception and no progress
was notified: ``'failed'``.
"""
timed_out_counter = job['timed_out_counter']
with cls.pool.cursor() as job_cr:
lastcall = fields.Datetime.to_datetime(job['lastcall'])
interval = _intervalTypes[job['interval_type']](job['interval_number'])
env = api.Environment(job_cr, job['user_id'], {'lastcall': lastcall})
ir_cron = env[cls._name]
env = api.Environment(job_cr, job['user_id'], {
'lastcall': job['lastcall'],
'cron_id': job['id'],
})
cron = env[cls._name].browse(job['id'])
# Use the user's timezone to compare and compute datetimes,
# otherwise unexpected results may appear. For instance, adding
# 1 month in UTC to July 1st at midnight in GMT+2 gives July 30
# instead of August 1st!
now = fields.Datetime.context_timestamp(ir_cron, datetime.utcnow())
past_nextcall = fields.Datetime.context_timestamp(
ir_cron, fields.Datetime.to_datetime(job['nextcall']))
status = None
for i in range(MAX_BATCH_PER_CRON_JOB):
cron, progress = cron._add_progress(timed_out_counter=timed_out_counter)
job_cr.commit()
# Compute how many call were missed
missed_call = past_nextcall
missed_call_count = 0
while missed_call <= now:
missed_call += interval
missed_call_count += 1
future_nextcall = missed_call
try:
cron._callback(job['cron_name'], job['ir_actions_server_id'])
except Exception: # noqa: BLE001
if progress.done and progress.remaining:
# we do not consider it a failure if some progress has
# been committed
status = CompletionStatus.PARTIALLY_DONE
else:
status = CompletionStatus.FAILED
else:
if not progress.remaining:
status = CompletionStatus.FULLY_DONE
elif not progress.done:
# assume the server action doesn't use the progress API
# and that there is nothing left to process
status = CompletionStatus.FULLY_DONE
else:
status = CompletionStatus.PARTIALLY_DONE
# Compute how many time we should run the cron
effective_call_count = (
1 if not missed_call_count # run at least once
else 1 if not job['doall'] # run once for all
else missed_call_count if job['numbercall'] == -1 # run them all
else min(missed_call_count, job['numbercall']) # run maximum numbercall times
)
call_count_left = max(job['numbercall'] - effective_call_count, -1)
if status == CompletionStatus.FULLY_DONE and progress.deactivate:
job['active'] = False
finally:
progress.timed_out_counter = 0
timed_out_counter = 0
job_cr.commit()
_logger.info('Job %r (%s) processed %s records, %s records remaining',
job['cron_name'], job['id'], progress.done, progress.remaining)
if status in (CompletionStatus.FULLY_DONE, CompletionStatus.FAILED):
break
# The actual cron execution
for call in range(effective_call_count):
ir_cron._callback(job['cron_name'], job['ir_actions_server_id'], job['id'])
return status
# Update the cron with the information computed above
cron_cr.execute("""
def _update_failure_count(self, job, status):
"""
Update cron ``failure_count`` and ``first_failure_date`` given
the job's completion status. Deactivate the cron when BOTH the
counter reaches ``MIN_FAILURE_COUNT_BEFORE_DEACTIVATION`` AND
the time delta reaches ``MIN_DELTA_BEFORE_DEACTIVATION``.
On ``'fully done'`` and ``'partially done'``, the counter and
failure date are reset.
On ``'failed'`` the counter is increased and the first failure
date is set if the counter was 0. In case both thresholds are
reached, ``active`` is set to ``False`` and both values are
reset.
"""
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
if status == CompletionStatus.FAILED:
failure_count = job['failure_count'] + 1
first_failure_date = job['first_failure_date'] or now
active = job['active']
if (
failure_count >= MIN_FAILURE_COUNT_BEFORE_DEACTIVATION
and fields.Datetime.context_timestamp(self, first_failure_date) + MIN_DELTA_BEFORE_DEACTIVATION < now
):
failure_count = 0
first_failure_date = None
active = False
self._notify_admin(_(
"Cron job %(name)s (%(id)s) has been deactivated after failing %(count)s times. "
"More information can be found in the server logs around %(time)s.",
name=repr(job['cron_name']),
id=job['id'],
count=MIN_FAILURE_COUNT_BEFORE_DEACTIVATION,
time=datetime.replace(datetime.utcnow(), microsecond=0),
))
else:
failure_count = 0
first_failure_date = None
active = job['active']
self.env.cr.execute("""
UPDATE ir_cron
SET nextcall=%s,
numbercall=%s,
lastcall=%s,
active=%s
WHERE id=%s
SET failure_count = %s,
first_failure_date = %s,
active = %s
WHERE id = %s
""", [
fields.Datetime.to_string(future_nextcall.astimezone(pytz.UTC)),
call_count_left,
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['active'] and bool(call_count_left),
failure_count,
first_failure_date,
active,
job['id'],
])
cron_cr.execute("""
def _reschedule_later(self, job):
"""
Reschedule the job to be executed later, after its regular
interval or upon a trigger.
"""
# Use the user's timezone to compare and compute datetimes, otherwise unexpected results may appear.
# For instance, adding 1 month in UTC to July 1st at midnight in GMT+2 gives July 30 instead of August 1st!
now = fields.Datetime.context_timestamp(self, datetime.utcnow())
nextcall = fields.Datetime.context_timestamp(self, job['nextcall'])
interval = _intervalTypes[job['interval_type']](job['interval_number'])
while nextcall <= now:
nextcall += interval
_logger.info('Job %r (%s) completed', job['cron_name'], job['id'])
self.env.cr.execute("""
UPDATE ir_cron
SET nextcall = %s,
lastcall = %s
WHERE id = %s
""", [
fields.Datetime.to_string(nextcall.astimezone(pytz.UTC)),
fields.Datetime.to_string(now.astimezone(pytz.UTC)),
job['id'],
])
self.env.cr.execute("""
DELETE FROM ir_cron_trigger
WHERE cron_id = %s
AND call_at < (now() at time zone 'UTC')
""", [job['id']])
@api.model
def _callback(self, cron_name, server_action_id, job_id):
def _reschedule_asap(self, job):
"""
Reschedule the job to be executed ASAP, after the other cron
jobs had a chance to run.
"""
# leave the existing nextcall and triggers, this leave the job "ready"
pass
def _callback(self, cron_name, server_action_id):
""" Run the method associated to a given job. It takes care of logging
and exception handling. Note that the user running the server action
is the user calling this method. """
self.ensure_one()
try:
if self.pool != self.pool.check_signaling():
# the registry has changed, reload self in the new registry
self.env.reset()
self = self.env()[self._name]
log_depth = (None if _logger.isEnabledFor(logging.DEBUG) else 1)
odoo.netsvc.log(_logger, logging.DEBUG, 'cron.object.execute', (self._cr.dbname, self._uid, '*', cron_name, server_action_id), depth=log_depth)
_logger.info('Starting job `%s`.', cron_name)
_logger.debug(
"cron.object.execute(%r, %d, '*', %r, %d)",
self.env.cr.dbname,
self._uid,
cron_name,
server_action_id,
)
_logger.info('Job %r (%s) starting', cron_name, self.id)
start_time = time.time()
self.env['ir.actions.server'].browse(server_action_id).run()
self.env.flush_all()
end_time = time.time()
_logger.info('Job done: `%s` (%.3fs).', cron_name, end_time - start_time)
_logger.info('Job %r (%s) done in %.3fs', cron_name, self.id, end_time - start_time)
if start_time and _logger.isEnabledFor(logging.DEBUG):
_logger.debug('%.3fs (cron %s, server action %d with uid %d)', end_time - start_time, cron_name, server_action_id, self.env.uid)
_logger.debug('Job %r (%s) server action #%s with uid %s executed in %.3fs',
cron_name, self.id, server_action_id, self.env.uid, end_time - start_time)
self.pool.signal_changes()
except Exception as e:
except Exception:
self.pool.reset_changes()
_logger.exception("Call from cron %s for server action #%s failed in Job #%s",
cron_name, server_action_id, job_id)
self._handle_callback_exception(cron_name, server_action_id, job_id, e)
@api.model
def _handle_callback_exception(self, cron_name, server_action_id, job_id, job_exception):
""" Method called when an exception is raised by a job.
Simply logs the exception and rollback the transaction. """
self._cr.rollback()
_logger.exception('Job %r (%s) server action #%s failed', cron_name, self.id, server_action_id)
self.env.cr.rollback()
raise
def _try_lock(self, lockfk=False):
"""Try to grab a dummy exclusive write-lock to the rows with the given ids,
@ -416,7 +587,7 @@ class ir_cron(models.Model):
to a process currently executing those cron tasks.
:param lockfk: acquire a strong row lock which conflicts with
the lock aquired by foreign keys when they
the lock acquired by foreign keys when they
reference this row.
"""
if not self:
@ -475,17 +646,18 @@ class ir_cron(models.Model):
Schedule a cron job to be executed soon independently of its
``nextcall`` field value.
By default the cron is scheduled to be executed in the next batch but
the optional `at` argument may be given to delay the execution later
with a precision down to 1 minute.
By default, the cron is scheduled to be executed the next time
the cron worker wakes up, but the optional `at` argument may be
given to delay the execution later, with a precision down to 1
minute.
The method may be called with a datetime or an iterable of datetime.
The actual implementation is in :meth:`~._trigger_list`, which is the
recommended method for overrides.
The method may be called with a datetime or an iterable of
datetime. The actual implementation is in :meth:`~._trigger_list`,
which is the recommended method for overrides.
:param Optional[Union[datetime.datetime, list[datetime.datetime]]] at:
When to execute the cron, at one or several moments in time instead
of as soon as possible.
When to execute the cron, at one or several moments in time
instead of as soon as possible.
:return: the created triggers records
:rtype: recordset
"""
@ -524,7 +696,7 @@ class ir_cron(models.Model):
])
if _logger.isEnabledFor(logging.DEBUG):
ats = ', '.join(map(str, at_list))
_logger.debug("will execute '%s' at %s", self.sudo().name, ats)
_logger.debug('Job %r (%s) will execute at %s', self.sudo().name, self.id, ats)
if min(at_list) <= now or os.getenv('ODOO_NOTIFY_CRON_CHANGES'):
self._cr.postcommit.add(self._notifydb)
@ -536,10 +708,47 @@ class ir_cron(models.Model):
ir_cron modification and on trigger creation (regardless of call_at)
"""
with odoo.sql_db.db_connect('postgres').cursor() as cr:
query = sql.SQL("SELECT {}('cron_trigger', %s)").format(sql.Identifier(ODOO_NOTIFY_FUNCTION))
cr.execute(query, [self.env.cr.dbname])
cr.execute(SQL("SELECT %s('cron_trigger', %s)", SQL.identifier(ODOO_NOTIFY_FUNCTION), self.env.cr.dbname))
_logger.debug("cron workers notified")
def _add_progress(self, *, timed_out_counter=None):
"""
Create a progress record for the given cron and add it to its
context.
:param int timed_out_counter: the number of times the cron has
consecutively timed out
:return: a pair ``(cron, progress)``, where the progress has
been injected inside the cron's context
"""
progress = self.env['ir.cron.progress'].sudo().create([{
'cron_id': self.id,
'remaining': 0,
'done': 0,
# we use timed_out_counter + 1 so that if the current execution
# times out, the counter already takes it into account
'timed_out_counter': 0 if timed_out_counter is None else timed_out_counter + 1,
}])
return self.with_context(ir_cron_progress_id=progress.id), progress
def _notify_progress(self, *, done, remaining, deactivate=False):
"""
Log the progress of the cron job.
:param int done: the number of tasks already processed
:param int remaining: the number of tasks left to process
:param bool deactivate: whether the cron will be deactivated
"""
if not (progress_id := self.env.context.get('ir_cron_progress_id')):
return
if done < 0 or remaining < 0:
raise ValueError("`done` and `remaining` must be positive integers.")
self.env['ir.cron.progress'].sudo().browse(progress_id).write({
'remaining': remaining,
'done': done,
'deactivate': deactivate,
})
class ir_cron_trigger(models.Model):
_name = 'ir.cron.trigger'
@ -557,3 +766,19 @@ class ir_cron_trigger(models.Model):
if len(records) >= models.GC_UNLINK_LIMIT:
self.env.ref('base.autovacuum_job')._trigger()
return records.unlink()
class ir_cron_progress(models.Model):
_name = 'ir.cron.progress'
_description = 'Progress of Scheduled Actions'
_rec_name = 'cron_id'
cron_id = fields.Many2one("ir.cron", required=True, index=True, ondelete='cascade')
remaining = fields.Integer(default=0)
done = fields.Integer(default=0)
deactivate = fields.Boolean()
timed_out_counter = fields.Integer(default=0)
@api.autovacuum
def _gc_cron_progress(self):
self.search([('create_date', '<', datetime.now() - relativedelta(weeks=1))]).unlink()

View file

@ -2,9 +2,11 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from datetime import date
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools, _, SUPERUSER_ID
from odoo.exceptions import ValidationError
from odoo.tools import SQL
class IrDefault(models.Model):
@ -33,18 +35,24 @@ class IrDefault(models.Model):
@api.model_create_multi
def create(self, vals_list):
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
return super(IrDefault, self).create(vals_list)
def write(self, vals):
if self:
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
new_default = super().write(vals)
self.check_access_rule('write')
self.check_access('write')
return new_default
def unlink(self):
if self:
# invalidate all company dependent fields since their fallback value in cache may be changed
self.env.invalidate_all()
self.env.registry.clear_cache()
return super(IrDefault, self).unlink()
@ -76,13 +84,15 @@ class IrDefault(models.Model):
model = self.env[model_name]
field = model._fields[field_name]
parsed = field.convert_to_cache(value, model)
if field.type in ('date', 'datetime') and isinstance(value, date):
value = field.to_string(value)
json_value = json.dumps(value, ensure_ascii=False)
except KeyError:
raise ValidationError(_("Invalid field %s.%s", model_name, field_name))
raise ValidationError(_("Invalid field %(model)s.%(field)s", model=model_name, field=field_name))
except Exception:
raise ValidationError(_("Invalid value for %s.%s: %s", model_name, field_name, value))
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s", model=model_name, field=field_name, value=value))
if field.type == 'integer' and not (-2**31 < parsed < 2**31-1):
raise ValidationError(_("Invalid value for %s.%s: %s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model_name, field_name, value))
raise ValidationError(_("Invalid value for %(model)s.%(field)s: %(value)s is out of bounds (integers should be between -2,147,483,648 and 2,147,483,647)", model=model_name, field=field_name, value=value))
# update existing default for the same scope, or create one
field = self.env['ir.model.fields']._get(model_name, field_name)
@ -146,6 +156,7 @@ class IrDefault(models.Model):
current user), as a dict mapping field names to values.
"""
cr = self.env.cr
self.flush_model()
query = """ SELECT f.name, d.json_value
FROM ir_default d
JOIN ir_model_fields f ON d.field_id=f.id
@ -188,3 +199,30 @@ class IrDefault(models.Model):
json_vals = [json.dumps(value, ensure_ascii=False) for value in values]
domain = [('field_id', '=', field.id), ('json_value', 'in', json_vals)]
return self.search(domain).unlink()
@tools.ormcache('model_name', 'field_name')
def _get_field_column_fallbacks(self, model_name, field_name):
company_ids = self.env.execute_query(SQL('SELECT ARRAY_AGG(id) FROM res_company'))[0][0]
field = self.env[model_name]._fields[field_name]
self_super = self.with_user(SUPERUSER_ID)
return json.dumps({
id_: field.convert_to_column(
self_super.with_company(id_)._get_model_defaults(model_name).get(field_name),
self_super.with_company(id_)
)
for id_ in company_ids
})
def _evaluate_condition_with_fallback(self, model_name, condition):
"""
when the field value of the condition is company_dependent without
customization, evaluate if its fallback value will be kept by
the condition
return True/False/None(for unknown)
"""
field_name = condition[0].split('.', 1)[0]
model = self.env[model_name]
field = model._fields[field_name]
fallback = field.get_company_dependent_fallback(model)
record = model.new({field_name: field.convert_to_write(fallback, model)})
return bool(record.filtered_domain([condition]))

View file

@ -17,5 +17,5 @@ class IrDemo(models.TransientModel):
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/web',
'url': '/odoo',
}

View file

@ -0,0 +1,106 @@
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from ast import literal_eval
class IrEmbeddedActions(models.Model):
_name = 'ir.embedded.actions'
_description = 'Embedded Actions'
_order = 'sequence, id'
name = fields.Char(translate=True)
sequence = fields.Integer()
parent_action_id = fields.Many2one('ir.actions.act_window', required=True, string='Parent Action', ondelete="cascade")
parent_res_id = fields.Integer(string="Active Parent Id")
parent_res_model = fields.Char(string='Active Parent Model', required=True)
# It is required to have either action_id or python_method
action_id = fields.Many2one('ir.actions.actions', string="Action", ondelete="cascade")
python_method = fields.Char(help="Python method returning an action")
user_id = fields.Many2one('res.users', string="User", help="User specific embedded action. If empty, shared embedded action", ondelete="cascade")
is_deletable = fields.Boolean(compute="_compute_is_deletable")
default_view_mode = fields.Char(string="Default View", help="Default view (if none, default view of the action is taken)")
filter_ids = fields.One2many("ir.filters", "embedded_action_id", help="Default filter of the embedded action (if none, no filters)")
is_visible = fields.Boolean(string="Embedded visibility", help="Computed field to check if the record should be visible according to the domain", compute="_compute_is_visible")
domain = fields.Char(default="[]", help="Domain applied to the active id of the parent model")
context = fields.Char(default="{}", help="Context dictionary as Python expression, empty by default (Default: {})")
groups_ids = fields.Many2many('res.groups', help='Groups that can execute the embedded action. Leave empty to allow everybody.')
_sql_constraints = [
(
'check_only_one_action_defined',
"""CHECK(
(action_id IS NOT NULL AND python_method IS NULL) OR
(action_id IS NULL AND python_method IS NOT NULL)
)""",
'Constraint to ensure that either an XML action or a python_method is defined, but not both.'
), (
'check_python_method_requires_name',
"""CHECK(
NOT (python_method IS NOT NULL AND name IS NULL)
)""",
'Constraint to ensure that if a python_method is defined, then the name must also be defined.'
)
]
@api.model_create_multi
def create(self, vals_list):
# The name by default is computed based on the triggered action if a action_id is defined.
for vals in vals_list:
if "name" not in vals:
vals["name"] = self.env["ir.actions.actions"].browse(vals["action_id"]).name
if "python_method" in vals and "action_id" in vals:
if vals.get("python_method"):
# then remove the action_id since the action surely given by the python method.
del vals["action_id"]
else: # remove python_method in the vals since the vals is falsy.
del vals["python_method"]
return super().create(vals_list)
# The record is deletable if it hasn't been created from a xml record (i.e. is not a default embedded action)
def _compute_is_deletable(self):
external_ids = self._get_external_ids()
for record in self:
record_external_ids = external_ids[record.id]
record.is_deletable = all(
ex_id.startswith(("__export__", "__custom__")) for ex_id in record_external_ids
)
# Compute if the record should be visible to the user based on the domain applied to the active id of the parent
# model and based on the groups allowed to access the record.
def _compute_is_visible(self):
active_id = self.env.context.get("active_id", False)
if not active_id:
self.is_visible = False
return
domain_id = [("id", "=", active_id)]
for parent_res_model, records in self.grouped('parent_res_model').items():
active_model_record = self.env[parent_res_model].search(domain_id, order='id')
for record in records:
action_groups = record.groups_ids
if not action_groups or (action_groups & self.env.user.groups_id):
domain_model = literal_eval(record.domain or '[]')
record.is_visible = (
record.parent_res_id in (False, self.env.context.get('active_id', False))
and record.user_id.id in (False, self.env.uid)
and active_model_record.filtered_domain(domain_model)
)
else:
record.is_visible = False
# Delete the filters linked to a embedded action.
@api.ondelete(at_uninstall=False)
def _unlink_if_action_deletable(self):
for record in self:
if not record.is_deletable:
raise UserError(_('You cannot delete a default embedded action'))
def _get_readable_fields(self):
""" return the list of fields that are safe to read
"""
return {
"name", "parent_action_id", "parent_res_id", "parent_res_model", "action_id", "python_method", "user_id",
"is_deletable", "default_view_mode", "filter_ids", "domain", "context", "groups_ids"
}

View file

@ -4,13 +4,16 @@
import json
import functools
import itertools
from typing import NamedTuple
import psycopg2
import pytz
from odoo import api, Command, fields, models, _
from odoo.tools import ustr, OrderedSet
from odoo.tools.translate import code_translations, _lt
from odoo import api, Command, fields, models
from odoo.tools import OrderedSet
from odoo.tools.translate import _, code_translations, LazyTranslate
_lt = LazyTranslate(__name__)
REFERENCING_FIELDS = {None, 'id', '.id'}
def only_ref_fields(record):
@ -26,6 +29,12 @@ BOOLEAN_TRANSLATIONS = (
_lt('false')
)
class FakeField(NamedTuple):
comodel_name: str
name: str
class ImportWarning(Warning):
""" Used to send warnings upwards the stack during the import process """
pass
@ -201,8 +210,99 @@ class IrFieldsConverter(models.AbstractModel):
raise self._format_import_error(ValueError, msg, value)
def _str_to_properties(self, model, field, value):
msg = _("Unable to import field type '%s' ", field.type)
raise self._format_import_error(ValueError, msg)
# If we want to import the all properties at once (with the technical value)
if isinstance(value, str):
try:
value = json.loads(value)
except ValueError:
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg)
if not isinstance(value, list):
msg = _("Unable to import'%%(field)s' Properties field as a whole, target individual property instead.")
raise self._format_import_error(ValueError, msg, {'value': value})
warnings = []
for property_dict in value:
if not (property_dict.keys() >= {'name', 'type', 'string'}):
msg = _("'%(value)s' does not seem to be a valid Property value for field '%%(field)s'. Each property need at least 'name', 'type' and 'string' attribute.")
raise self._format_import_error(ValueError, msg, {'value': property_dict})
val = property_dict.get('value')
if not val:
property_dict.pop('value', None)
continue
property_type = property_dict['type']
if property_type == 'selection':
# either label or the technical value
new_val = next(iter(
sel_val for sel_val, sel_label in property_dict['selection']
if val in (sel_val, sel_label)
), None)
if not new_val:
msg = _("'%(value)s' does not seem to be a valid Selection value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
property_dict['value'] = new_val
elif property_type == 'tags':
tags = val.split(',')
new_val = []
for tag in tags:
val_tag = next(iter(
tag_val for tag_val, tag_label, _color in property_dict['tags']
if tag in (tag_val, tag_label)
), None)
if not val_tag:
msg = _("'%(value)s' does not seem to be a valid Tag value for '%(label_property)s' (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': tag, 'label_property': property_dict['string']})
new_val.append(val_tag)
property_dict['value'] = new_val
elif property_type == 'boolean':
new_val, warnings = self._str_to_boolean(model, field, val)
if not warnings:
property_dict['value'] = new_val
else:
msg = _("Unknown value '%(value)s' for boolean '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type in ('many2one', 'many2many'):
[record] = property_dict['value']
subfield, w1 = self._referencing_subfield(record)
if w1:
warnings.append(w1)
values = record[subfield]
references = values.split(',') if property_type == 'many2many' else [values]
ids = []
fake_field = FakeField(comodel_name=property_dict['comodel'], name=property_dict['string'])
for reference in references:
id_, __, ws = self.db_id_for(model, fake_field, subfield, reference)
ids.append(id_)
warnings.extend(ws)
property_dict['value'] = ids if property_type == 'many2many' else ids[0]
elif property_type == 'integer':
try:
property_dict['value'] = int(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an integer for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
elif property_type == 'float':
try:
property_dict['value'] = float(val)
except ValueError:
msg = _("'%(value)s' does not seem to be an float for field '%(label_property)s' property (subfield of '%%(field)s' field).")
raise self._format_import_error(ValueError, msg, {'value': val, 'label_property': property_dict['string']})
return value, warnings
@api.model
def _str_to_boolean(self, model, field, value):
@ -365,7 +465,6 @@ class IrFieldsConverter(models.AbstractModel):
selection = field.get_description(env)['selection']
for item, label in selection:
label = ustr(label)
if callable(field.selection):
labels = [label]
for item2, label2 in field._description_selection(self.env):
@ -417,7 +516,7 @@ class IrFieldsConverter(models.AbstractModel):
action = {
'name': 'Possible Values',
'type': 'ir.actions.act_window', 'target': 'new',
'view_mode': 'tree,form',
'view_mode': 'list,form',
'views': [(False, 'list'), (False, 'form')],
'context': {'create': False},
'help': _(u"See all possible values")}
@ -461,9 +560,9 @@ class IrFieldsConverter(models.AbstractModel):
if ids:
if len(ids) > 1:
warnings.append(ImportWarning(_(
"Found multiple matches for value %r in field %%(field)r (%d matches)",
str(value).replace('%', '%%'),
len(ids),
'Found multiple matches for value "%(value)s" in field "%%(field)s" (%(match_count)s matches)',
value=str(value).replace('%', '%%'),
match_count=len(ids),
)))
id, _name = ids[0]
else:
@ -477,7 +576,7 @@ class IrFieldsConverter(models.AbstractModel):
else:
raise self._format_import_error(
Exception,
_("Unknown sub-field %r", subfield)
_("Unknown sub-field %s", subfield),
)
set_empty = False

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools
from odoo.exceptions import UserError
from odoo.tools.safe_eval import safe_eval, datetime
@ -17,13 +17,15 @@ class IrFilters(models.Model):
"and available to all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Text(default='[]', required=True)
sort = fields.Char(default='[]', required=True)
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
is_default = fields.Boolean(string='Default Filter')
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
embedded_action_id = fields.Many2one('ir.embedded.actions', help="The embedded action this filter is applied to", ondelete="cascade")
embedded_parent_res_id = fields.Integer(help="id of the record the filter should be applied to. Only used in combination with embedded actions")
active = fields.Boolean(default=True)
@api.model
@ -35,16 +37,20 @@ class IrFilters(models.Model):
)
return self._cr.fetchall()
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
# NULL Integer field value read as 0, wouldn't matter except in this case will trigger
# check_res_id_only_when_embedded_action
for vals in vals_list:
if vals.get('embedded_parent_res_id') == 0:
del vals['embedded_parent_res_id']
return [dict(vals, name=self.env._("%s (copy)", ir_filter.name)) for ir_filter, vals in zip(self, vals_list)]
def write(self, vals):
new_filter = super().write(vals)
self.check_access_rule('write')
self.check_access('write')
return new_filter
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)', self.name))
return super(IrFilters, self).copy(default)
def _get_eval_domain(self):
self.ensure_one()
return safe_eval(self.domain, {
@ -53,17 +59,17 @@ class IrFilters(models.Model):
})
@api.model
def _get_action_domain(self, action_id=None):
def _get_action_domain(self, action_id=None, embedded_action_id=None, embedded_parent_res_id=None):
"""Return a domain component for matching filters that are visible in the
same context (menu/view) as the given action."""
if action_id:
# filters specific to this menu + global ones
return [('action_id', 'in', [action_id, False])]
# only global ones
return [('action_id', '=', False)]
action_condition = ('action_id', 'in', [action_id, False]) if action_id else ('action_id', '=', False)
embedded_condition = ('embedded_action_id', '=', embedded_action_id) if embedded_action_id else ('embedded_action_id', '=', False)
embedded_parent_res_id_condition = ('embedded_parent_res_id', '=', embedded_parent_res_id) if embedded_action_id and embedded_parent_res_id else ('embedded_parent_res_id', 'in', [0, False])
return [action_condition, embedded_condition, embedded_parent_res_id_condition]
@api.model
def get_filters(self, model, action_id=None):
def get_filters(self, model, action_id=None, embedded_action_id=None, embedded_parent_res_id=None):
"""Obtain the list of filters available for the user on the given model.
:param int model: id of model to find filters for
@ -73,15 +79,16 @@ class IrFilters(models.Model):
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
``action_id`` (m2o tuple), ``embedded_action_id`` (m2o tuple), ``embedded_parent_res_id``
and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
user_context = self.env['res.users'].context_get()
action_domain = self._get_action_domain(action_id)
action_domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
return self.with_context(user_context).search_read(
action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])],
['name', 'is_default', 'domain', 'context', 'user_id', 'sort'],
['name', 'is_default', 'domain', 'context', 'user_id', 'sort', 'embedded_action_id', 'embedded_parent_res_id'],
)
@api.model
@ -101,7 +108,7 @@ class IrFilters(models.Model):
:raises odoo.exceptions.UserError: if there is an existing default and
we're not updating it
"""
domain = self._get_action_domain(vals.get('action_id'))
domain = self._get_action_domain(vals.get('action_id'), vals.get('embedded_action_id'), vals.get('embedded_parent_res_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
@ -113,13 +120,17 @@ class IrFilters(models.Model):
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default", model=vals.get('model_id')))
raise UserError(self.env._("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default", model=vals.get('model_id')))
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
current_filters = self.get_filters(vals['model_id'], action_id)
embedded_action_id = vals.get('embedded_action_id')
if not embedded_action_id and 'embedded_parent_res_id' in vals:
del vals['embedded_parent_res_id']
embedded_parent_res_id = vals.get('embedded_parent_res_id')
current_filters = self.get_filters(vals['model_id'], action_id, embedded_action_id, embedded_parent_res_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
@ -131,7 +142,7 @@ class IrFilters(models.Model):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id)
domain = self._get_action_domain(action_id, embedded_action_id, embedded_parent_res_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
@ -155,12 +166,26 @@ class IrFilters(models.Model):
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (model_id, user_id, action_id, name)', 'Filter names must be unique'),
('name_model_uid_unique', 'unique (model_id, user_id, action_id, embedded_action_id, embedded_parent_res_id, name)',
'Filter names must be unique'),
# The embedded_parent_res_id can only be defined when the embedded_action_id field is set.
# As the embedded model is linked to only one res_model, It ensure the unicity of the filter regarding the
# embedded_parent_res_model and the embedded_parent_res_id
(
'check_res_id_only_when_embedded_action',
"""CHECK(
NOT (embedded_parent_res_id IS NOT NULL AND embedded_action_id IS NULL)
)""",
'Constraint to ensure that the embedded_parent_res_id is only defined when a top_action_id is defined.'
),
('check_sort_json', "CHECK(sort IS NULL OR jsonb_typeof(sort::jsonb) = 'array')", 'Invalid sort definition'),
]
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
tools.create_unique_index(self._cr, 'ir_filters_name_model_uid_unique_action_index',
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)', 'lower(name)'])
self._table, ['model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)',
'lower(name)', 'embedded_parent_res_id', 'COALESCE(embedded_action_id,-1)'])
return result

View file

@ -2,16 +2,13 @@
#----------------------------------------------------------
# ir_http modular http routing
#----------------------------------------------------------
import base64
import hashlib
import json
import logging
import mimetypes
import os
import re
import sys
import traceback
import threading
import unicodedata
import werkzeug
import werkzeug.exceptions
@ -23,17 +20,35 @@ try:
except ImportError:
from werkzeug.routing.converters import NumberConverter # moved in werkzeug 2.2.2
# optional python-slugify import (https://github.com/un33k/python-slugify)
try:
import slugify as slugify_lib
except ImportError:
slugify_lib = None
import odoo
from odoo import api, http, models, tools, SUPERUSER_ID
from odoo.exceptions import AccessDenied, AccessError, MissingError
from odoo.http import request, Response, ROUTING_KEYS, Stream
from odoo.exceptions import AccessDenied
from odoo.http import request, Response, ROUTING_KEYS
from odoo.modules.registry import Registry
from odoo.service import security
from odoo.tools import get_lang, submap
from odoo.tools.json import json_default
from odoo.tools.misc import get_lang, submap
from odoo.tools.translate import code_translations
_logger = logging.getLogger(__name__)
# see also mimetypes module: https://docs.python.org/3/library/mimetypes.html and odoo.tools.mimetypes
EXTENSION_TO_WEB_MIMETYPES = {
'.css': 'text/css',
'.less': 'text/less',
'.scss': 'text/scss',
'.js': 'text/javascript',
'.xml': 'text/xml',
'.csv': 'text/csv',
'.html': 'text/html',
}
class RequestUID(object):
def __init__(self, **kw):
@ -47,13 +62,17 @@ class ModelConverter(werkzeug.routing.BaseConverter):
super().__init__(url_map)
self.model = model
def to_python(self, value):
IrHttp = Registry(threading.current_thread().dbname)['ir.http']
self.slug = IrHttp._slug
self.unslug = IrHttp._unslug
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
return env[self.model].browse(int(value))
return env[self.model].browse(self.unslug(value)[1])
def to_url(self, value):
return value.id
def to_url(self, value: models.BaseModel) -> str:
return self.slug(value)
class ModelsConverter(werkzeug.routing.BaseConverter):
@ -63,12 +82,12 @@ class ModelsConverter(werkzeug.routing.BaseConverter):
super().__init__(url_map)
self.model = model
def to_python(self, value):
def to_python(self, value: str) -> models.BaseModel:
_uid = RequestUID(value=value, converter=self)
env = api.Environment(request.cr, _uid, request.context)
return env[self.model].browse(int(v) for v in value.split(','))
def to_url(self, value):
def to_url(self, value: models.BaseModel) -> str:
return ",".join(value.ids)
@ -117,12 +136,59 @@ class IrHttp(models.AbstractModel):
_name = 'ir.http'
_description = "HTTP Routing"
@classmethod
def _slugify_one(cls, value: str, max_length: int = 0) -> str:
""" Transform a string to a slug that can be used in a url path.
This method will first try to do the job with python-slugify if present.
Otherwise it will process string by stripping leading and ending spaces,
converting unicode chars to ascii, lowering all chars and replacing spaces
and underscore with hyphen "-".
"""
if slugify_lib:
# There are 2 different libraries only python-slugify is supported
try:
return slugify_lib.slugify(value, max_length=max_length)
except TypeError:
pass
uni = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
slug_str = re.sub(r'[\W_]+', '-', uni).strip('-').lower()
return slug_str[:max_length] if max_length > 0 else slug_str
@classmethod
def _slugify(cls, value: str, max_length: int = 0, path: bool = False) -> str:
if not path:
return cls._slugify_one(value, max_length=max_length)
else:
res = []
for u in value.split('/'):
s = cls._slugify_one(u, max_length=max_length)
if s:
res.append(s)
# check if supported extension
path_no_ext, ext = os.path.splitext(value)
if ext in EXTENSION_TO_WEB_MIMETYPES:
res[-1] = cls._slugify_one(path_no_ext) + ext
return '/'.join(res)
@classmethod
def _slug(cls, value: models.BaseModel | tuple[int, str]) -> str:
if isinstance(value, tuple):
return str(value[0])
return str(value.id)
@classmethod
def _unslug(cls, value: str) -> tuple[str | None, int] | tuple[None, None]:
try:
return None, int(value)
except ValueError:
return None, None
#------------------------------------------------------
# Routing map
#------------------------------------------------------
@classmethod
def _get_converters(cls):
def _get_converters(cls) -> dict[str, type]:
return {'model': ModelConverter, 'models': ModelsConverter, 'int': SignedIntConverter}
@classmethod
@ -134,6 +200,48 @@ class IrHttp(models.AbstractModel):
def _get_public_users(cls):
return [request.env['ir.model.data']._xmlid_to_res_model_res_id('base.public_user')[1]]
@classmethod
def _auth_method_bearer(cls):
headers = request.httprequest.headers
def get_http_authorization_bearer_token():
# werkzeug<2.3 doesn't expose `authorization.token` (for bearer authentication)
# check header directly
header = headers.get("Authorization")
if header and (m := re.match(r"^bearer\s+(.+)$", header, re.IGNORECASE)):
return m.group(1)
return None
def check_sec_headers():
"""Protection against CSRF attacks.
Modern browsers automatically add Sec- headers that we can check to protect against CSRF.
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Sec-Fetch-User
"""
return (
headers.get("Sec-Fetch-Dest") == "document"
and headers.get("Sec-Fetch-Mode") == "navigate"
and headers.get("Sec-Fetch-Site") in ('none', 'same-origin')
and headers.get("Sec-Fetch-User") == "?1"
)
if token := get_http_authorization_bearer_token():
# 'rpc' scope does not really exist, we basically require a global key (scope NULL)
uid = request.env['res.users.apikeys']._check_credentials(scope='rpc', key=token)
if not uid:
raise werkzeug.exceptions.Unauthorized(
"Invalid apikey",
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
if request.env.uid and request.env.uid != uid:
raise AccessDenied("Session user does not match the used apikey")
request.update_env(user=uid)
elif not request.env.uid:
raise werkzeug.exceptions.Unauthorized(
'User not authenticated, use the "Authorization" header',
www_authenticate=werkzeug.datastructures.WWWAuthenticate('bearer'))
elif not check_sec_headers():
raise AccessDenied("Missing \"Authorization\" or Sec-headers for interactive usage")
cls._auth_method_user()
@classmethod
def _auth_method_user(cls):
if request.env.uid in [None] + cls._get_public_users():
@ -152,10 +260,13 @@ class IrHttp(models.AbstractModel):
@classmethod
def _authenticate(cls, endpoint):
auth = 'none' if http.is_cors_preflight(request, endpoint) else endpoint.routing['auth']
cls._authenticate_explicit(auth)
@classmethod
def _authenticate_explicit(cls, auth):
try:
if request.session.uid is not None:
if not security.check_session(request.session, request.env):
if not security.check_session(request.session, request.env, request):
request.session.logout(keep_db=True)
request.env = api.Environment(request.env.cr, None, request.session.context)
getattr(cls, f'_auth_method_{auth}')()
@ -169,6 +280,10 @@ class IrHttp(models.AbstractModel):
def _geoip_resolve(cls):
return request._geoip_resolve()
@classmethod
def _sanitize_cookies(cls, cookies):
pass
@classmethod
def _pre_dispatch(cls, rule, args):
ICP = request.env['ir.config_parameter'].with_user(SUPERUSER_ID)
@ -188,25 +303,22 @@ class IrHttp(models.AbstractModel):
request.dispatcher.pre_dispatch(rule, args)
# Replace uid placeholder by the current request.env.uid
for key, val in list(args.items()):
if isinstance(val, models.BaseModel) and isinstance(val._uid, RequestUID):
args[key] = val.with_user(request.env.uid)
# verify the default language set in the context is valid,
# otherwise fallback on the company lang, english or the first
# lang installed
env = request.env if request.env.uid else request.env['base'].with_user(SUPERUSER_ID).env
request.update_context(lang=get_lang(env)._get_cached('code'))
request.update_context(lang=get_lang(env).code)
for key, val in list(args.items()):
if not isinstance(val, models.BaseModel):
continue
# Replace uid and lang placeholder by the current request.env.uid and request.env.lang
args[key] = val.with_env(request.env)
try:
# explicitly crash now, instead of crashing later
args[key].check_access_rights('read')
args[key].check_access_rule('read')
args[key].check_access('read')
except (odoo.exceptions.AccessError, odoo.exceptions.MissingError) as e:
# custom behavior in case a record is not accessible / has been removed
if handle_error := rule.endpoint.routing.get('handle_params_access_error'):
@ -240,7 +352,7 @@ class IrHttp(models.AbstractModel):
model = request.env['ir.attachment']
attach = model.sudo()._get_serve_attachment(request.httprequest.path)
if attach and (attach.store_fname or attach.db_datas):
return Stream.from_attachment(attach).get_response()
return attach._to_http_stream().get_response()
@classmethod
def _redirect(cls, location, code=303):
@ -273,9 +385,7 @@ class IrHttp(models.AbstractModel):
def _gc_sessions(self):
if os.getenv("ODOO_SKIP_GC_SESSIONS"):
return
ICP = self.env["ir.config_parameter"]
max_lifetime = int(ICP.get_param('sessions.max_inactivity_seconds', http.SESSION_LIFETIME))
http.root.session_store.vacuum(max_lifetime=max_lifetime)
http.root.session_store.vacuum(max_lifetime=http.get_session_max_inactivity(self.env))
@api.model
def get_translations_for_webclient(self, modules, lang):
@ -283,21 +393,19 @@ class IrHttp(models.AbstractModel):
modules = self.pool._init_modules
if not lang:
lang = self._context.get("lang")
langs = self.env['res.lang']._lang_get(lang)
lang_params = None
if langs:
lang_params = {
"name": langs.name,
"direction": langs.direction,
"date_format": langs.date_format,
"time_format": langs.time_format,
"grouping": langs.grouping,
"decimal_point": langs.decimal_point,
"thousands_sep": langs.thousands_sep,
"week_start": langs.week_start,
}
lang_params['week_start'] = int(lang_params['week_start'])
lang_params['code'] = lang
lang_data = self.env['res.lang']._get_data(code=lang)
lang_params = {
"name": lang_data.name,
"code": lang_data.code,
"direction": lang_data.direction,
"date_format": lang_data.date_format,
"time_format": lang_data.time_format,
"short_time_format": lang_data.short_time_format,
"grouping": lang_data.grouping,
"decimal_point": lang_data.decimal_point,
"thousands_sep": lang_data.thousands_sep,
"week_start": int(lang_data.week_start),
} if lang_data else None
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
# done server-side when the language is loaded, so we only need to load the user's lang.
@ -317,11 +425,11 @@ class IrHttp(models.AbstractModel):
'lang': lang,
'multi_lang': len(self.env['res.lang'].sudo().get_installed()) > 1,
}
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True).encode()).hexdigest()
return hashlib.sha1(json.dumps(translation_cache, sort_keys=True, default=json_default).encode()).hexdigest()
@classmethod
def _is_allowed_cookie(cls, cookie_type):
return True
return True if cookie_type == 'required' else bool(request.env.user)
@api.model
def _verify_request_recaptcha_token(self, action):

View file

@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from email.message import EmailMessage
from email.utils import make_msgid
import base64
import datetime
import email
@ -12,19 +9,18 @@ import logging
import re
import smtplib
import ssl
import sys
import threading
from email.message import EmailMessage
from email.utils import make_msgid
from socket import gaierror, timeout
from OpenSSL import crypto as SSLCrypto
from OpenSSL.crypto import Error as SSLCryptoError, FILETYPE_PEM
from OpenSSL.SSL import Error as SSLError
from urllib3.contrib.pyopenssl import PyOpenSSLContext
from odoo import api, fields, models, tools, _
from odoo import api, fields, models, tools, _, modules
from odoo.exceptions import UserError
from odoo.tools import ustr, pycompat, formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize
from odoo.tools import formataddr, email_normalize, encapsulate_email, email_domain_extract, email_domain_normalize, human_size
_logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('odoo.tests')
@ -55,7 +51,7 @@ class SMTPConnection:
SMTP_ATTRIBUTES = [
'auth', 'auth_cram_md5', 'auth_login', 'auth_plain', 'close', 'data', 'docmd', 'ehlo', 'ehlo_or_helo_if_needed',
'expn', 'from_filter', 'getreply', 'has_extn', 'login', 'mail', 'noop', 'putcmd', 'quit', 'rcpt', 'rset',
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host',
'send_message', 'sendmail', 'set_debuglevel', 'smtp_from', 'starttls', 'user', 'verify', '_host', 'esmtp_features',
]
for name in SMTP_ATTRIBUTES:
setattr(SMTPConnection, name, make_wrap_property(name))
@ -99,7 +95,7 @@ def extract_rfc2822_addresses(text):
"""
if not text:
return []
candidates = address_pattern.findall(ustr(text))
candidates = address_pattern.findall(text)
valid_addresses = []
for c in candidates:
try:
@ -157,10 +153,19 @@ class IrMailServer(models.Model):
smtp_debug = fields.Boolean(string='Debugging', help="If enabled, the full output of SMTP sessions will "
"be written to the server log at DEBUG level "
"(this is very verbose and may include confidential info!)")
max_email_size = fields.Float(string="Max Email Size")
sequence = fields.Integer(string='Priority', default=10, help="When no specific mail server is requested for a mail, the highest priority one "
"is used. Default priority is 10 (smaller number = higher priority)")
active = fields.Boolean(default=True)
_sql_constraints = [
(
'certificate_requires_tls',
"CHECK(smtp_encryption != 'none' OR smtp_authentication != 'certificate')",
"Certificate-based authentication requires a TLS transport"
),
]
@api.depends('smtp_authentication')
def _compute_smtp_authentication_info(self):
for server in self:
@ -217,11 +222,11 @@ class IrMailServer(models.Model):
for line in usage_details_per_server[server])
if is_multiple_server_usage:
raise UserError(
_('You cannot archive these Outgoing Mail Servers (%s) because they are still used in the following case(s):\n%s',
error_server_usage, error_usage_details))
_('You cannot archive these Outgoing Mail Servers (%(server_usage)s) because they are still used in the following case(s):\n%(usage_details)s',
server_usage=error_server_usage, usage_details=error_usage_details))
raise UserError(
_('You cannot archive this Outgoing Mail Server (%s) because it is still used in the following case(s):\n%s',
error_server_usage, error_usage_details))
_('You cannot archive this Outgoing Mail Server (%(server_usage)s) because it is still used in the following case(s):\n%(usage_details)s',
server_usage=error_server_usage, usage_details=error_usage_details))
def _active_usages_compute(self):
"""Compute a dict server id to list of user-friendly outgoing mail servers usage of this record set.
@ -232,6 +237,11 @@ class IrMailServer(models.Model):
"""
return dict()
def _get_max_email_size(self):
if self.max_email_size:
return self.max_email_size
return float(self.env['ir.config_parameter'].sudo().get_param('base.default_max_email_size', '10'))
def _get_test_email_from(self):
self.ensure_one()
email_from = False
@ -252,7 +262,16 @@ class IrMailServer(models.Model):
def _get_test_email_to(self):
return "noreply@odoo.com"
def test_smtp_connection(self):
def test_smtp_connection(self, autodetect_max_email_size=False):
"""Test the connection and if autodetect_max_email_size, set auto-detected max email size.
:param bool autodetect_max_email_size: whether to autodetect the max email size
:return (dict): client action to notify the user of the result of the operation (connection test or
auto-detection successful depending on the autodetect_max_email_size parameter)
:raises UserError: if the connection fails and if autodetect_max_email_size and
the server doesn't support the auto-detection of email max size
"""
for server in self:
smtp = False
try:
@ -274,6 +293,12 @@ class IrMailServer(models.Model):
(code, repl) = smtp.getreply()
if code != 354:
raise UserError(_('The server refused the test connection with error %(repl)s', repl=repl)) # noqa: TRY301
if autodetect_max_email_size:
max_size = smtp.esmtp_features.get('size')
if not max_size:
raise UserError(_('The server "%(server_name)s" doesn\'t return the maximum email size.',
server_name=server.name))
server.max_email_size = float(max_size) / (1024 ** 2)
except (UnicodeError, idna.core.InvalidCodepoint) as e:
raise UserError(_("Invalid server name!\n %s", e)) from e
except (gaierror, timeout) as e:
@ -301,7 +326,12 @@ class IrMailServer(models.Model):
# ignored, just a consequence of the previous exception
pass
message = _("Connection Test Successful!")
if autodetect_max_email_size:
message = _(
'Email maximum size updated (%(details)s).',
details=', '.join(f'{server.name}: {human_size(server.max_email_size * 1024 ** 2)}' for server in self))
else:
message = _('Connection Test Successful!')
return {
'type': 'ir.actions.client',
'tag': 'display_notification',
@ -309,9 +339,14 @@ class IrMailServer(models.Model):
'message': message,
'type': 'success',
'sticky': False,
}
'next': {'type': 'ir.actions.act_window_close'}, # force a form reload
},
}
def action_retrieve_max_email_size(self):
self.ensure_one()
return self.test_smtp_connection(autodetect_max_email_size=True)
def connect(self, host=None, port=None, user=None, password=None, encryption=None,
smtp_from=None, ssl_certificate=None, ssl_private_key=None, smtp_debug=False, mail_server_id=None,
allow_archived=False):
@ -336,9 +371,8 @@ class IrMailServer(models.Model):
longer raised.
"""
# Do not actually connect while running in test mode
if self._is_test_mode():
if modules.module.current_test:
return
mail_server = smtp_encryption = None
if mail_server_id:
mail_server = self.sudo().browse(mail_server_id)
@ -408,10 +442,11 @@ class IrMailServer(models.Model):
raise UserError(_('Could not load your certificate / private key. \n%s', str(e)))
if not smtp_server:
raise UserError(
(_("Missing SMTP Server") + "\n" +
_("Please define at least one SMTP server, "
"or provide the SMTP parameters explicitly.")))
raise UserError(_(
"Missing SMTP Server\n"
"Please define at least one SMTP server, "
"or provide the SMTP parameters explicitly.",
))
if smtp_encryption == 'ssl':
if 'SMTP_SSL' not in smtplib.__all__:
@ -500,12 +535,11 @@ class IrMailServer(models.Model):
headers = headers or {} # need valid dict later
email_cc = email_cc or []
email_bcc = email_bcc or []
body = body or u''
msg = EmailMessage(policy=email.policy.SMTP)
if not message_id:
if object_id:
message_id = tools.generate_tracking_message_id(object_id)
message_id = tools.mail.generate_tracking_message_id(object_id)
else:
message_id = make_msgid()
msg['Message-Id'] = message_id
@ -522,16 +556,16 @@ class IrMailServer(models.Model):
msg['Bcc'] = email_bcc
msg['Date'] = datetime.datetime.utcnow()
for key, value in headers.items():
msg[pycompat.to_text(ustr(key))] = value
msg[key] = value
email_body = ustr(body)
email_body = body or ''
if subtype == 'html' and not body_alternative:
msg['MIME-Version'] = '1.0'
msg.add_alternative(tools.html2plaintext(email_body), subtype='plain', charset='utf-8')
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
elif body_alternative:
msg['MIME-Version'] = '1.0'
msg.add_alternative(ustr(body_alternative), subtype=subtype_alternative, charset='utf-8')
msg.add_alternative(body_alternative, subtype=subtype_alternative, charset='utf-8')
msg.add_alternative(email_body, subtype=subtype, charset='utf-8')
else:
msg.set_content(email_body, subtype=subtype, charset='utf-8')
@ -703,27 +737,14 @@ class IrMailServer(models.Model):
smtp_from, smtp_to_list, message = self._prepare_email_message(message, smtp)
# Do not actually send emails in testing mode!
if self._is_test_mode():
_test_logger.info("skip sending email in test mode")
if modules.module.current_test:
_test_logger.debug("skip sending email in test mode")
return message['Message-Id']
try:
message_id = message['Message-Id']
if sys.version_info < (3, 7, 4):
# header folding code is buggy and adds redundant carriage
# returns, it got fixed in 3.7.4 thanks to bpo-34424
message_str = message.as_string()
message_str = re.sub('\r+(?!\n)', '', message_str)
mail_options = []
if any((not is_ascii(addr) for addr in smtp_to_list + [smtp_from])):
# non ascii email found, require SMTPUTF8 extension,
# the relay may reject it
mail_options.append("SMTPUTF8")
smtp.sendmail(smtp_from, smtp_to_list, message_str, mail_options=mail_options)
else:
smtp.send_message(message, smtp_from, smtp_to_list)
smtp.send_message(message, smtp_from, smtp_to_list)
# do not quit() a pre-established smtp_session
if not smtp_session:
@ -731,8 +752,12 @@ class IrMailServer(models.Model):
except smtplib.SMTPServerDisconnected:
raise
except Exception as e:
params = (ustr(smtp_server), e.__class__.__name__, e)
msg = _("Mail delivery failed via SMTP server '%s'.\n%s: %s", *params)
msg = _(
"Mail delivery failed via SMTP server '%(server)s'.\n%(exception_name)s: %(message)s",
server=smtp_server,
exception_name=e.__class__.__name__,
message=e,
)
_logger.info(msg)
raise MailDeliveryException(_("Mail Delivery Failed"), msg)
return message_id
@ -842,11 +867,3 @@ class IrMailServer(models.Model):
else:
self.smtp_port = 25
return result
def _is_test_mode(self):
"""Return True if we are running the tests, so we do not send real emails.
Can be overridden in tests after mocking the SMTP lib to test in depth the
outgoing mail server.
"""
return getattr(threading.current_thread(), 'testing', False) or self.env.registry.in_test_mode()

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import inspect
import itertools
import logging
import random
@ -10,18 +11,29 @@ from collections import defaultdict
from collections.abc import Mapping
from operator import itemgetter
from psycopg2 import sql
from psycopg2.extras import Json
from psycopg2.sql import Identifier, SQL, Placeholder
from odoo import api, fields, models, tools, _, _lt, Command
from odoo import api, fields, models, tools, Command
from odoo.exceptions import AccessError, UserError, ValidationError
from odoo.osv import expression
from odoo.tools import pycompat, unique, OrderedSet, lazy_property
from odoo.tools import format_list, lazy_property, sql, unique, OrderedSet, SQL
from odoo.tools.safe_eval import safe_eval, datetime, dateutil, time
from odoo.tools.translate import _, LazyTranslate
_lt = LazyTranslate(__name__)
_logger = logging.getLogger(__name__)
# Messages are declared in extenso so they are properly exported in translation terms
ACCESS_ERROR_HEADER = {
'read': _lt("You are not allowed to access '%(document_kind)s' (%(document_model)s) records."),
'write': _lt("You are not allowed to modify '%(document_kind)s' (%(document_model)s) records."),
'create': _lt("You are not allowed to create '%(document_kind)s' (%(document_model)s) records."),
'unlink': _lt("You are not allowed to delete '%(document_kind)s' (%(document_model)s) records."),
}
ACCESS_ERROR_GROUPS = _lt("This operation is allowed for the following groups:\n%(groups_list)s")
ACCESS_ERROR_NOGROUP = _lt("No group currently allows this operation.")
ACCESS_ERROR_RESOLUTION = _lt("Contact your administrator to request access if necessary.")
MODULE_UNINSTALL_FLAG = '_force_unlink'
RE_ORDER_FIELDS = re.compile(r'"?(\w+)"?\s*(?:asc|desc)?', flags=re.I)
@ -65,13 +77,6 @@ def selection_xmlid(module, model_name, field_name, value):
return '%s.selection__%s__%s__%s' % (module, xmodel, field_name, xvalue)
# generic INSERT and UPDATE queries
INSERT_QUERY = SQL("INSERT INTO {table} ({cols}) VALUES %s RETURNING id")
UPDATE_QUERY = SQL("UPDATE {table} SET {assignment} WHERE {condition} RETURNING id")
quote = '"{}"'.format
def query_insert(cr, table, rows):
""" Insert rows in a table. ``rows`` is a list of dicts, all with the same
set of keys. Return the ids of the new rows.
@ -79,12 +84,15 @@ def query_insert(cr, table, rows):
if isinstance(rows, Mapping):
rows = [rows]
cols = list(rows[0])
query = INSERT_QUERY.format(
table=Identifier(table),
cols=SQL(",").join(map(Identifier, cols)),
query = SQL(
"INSERT INTO %s (%s)",
SQL.identifier(table),
SQL(",").join(map(SQL.identifier, cols)),
)
assert not query.params
str_query = query.code + " VALUES %s RETURNING id"
params = [tuple(row[col] for col in cols) for row in rows]
cr.execute_values(query, params)
cr.execute_values(str_query, params)
return [row[0] for row in cr.fetchall()]
@ -92,34 +100,40 @@ def query_update(cr, table, values, selectors):
""" Update the table with the given values (dict), and use the columns in
``selectors`` to select the rows to update.
"""
setters = set(values) - set(selectors)
query = UPDATE_QUERY.format(
table=Identifier(table),
assignment=SQL(",").join(
SQL("{} = {}").format(Identifier(s), Placeholder(s))
for s in setters
query = SQL(
"UPDATE %s SET %s WHERE %s RETURNING id",
SQL.identifier(table),
SQL(",").join(
SQL("%s = %s", SQL.identifier(key), val)
for key, val in values.items()
if key not in selectors
),
condition=SQL(" AND ").join(
SQL("{} = {}").format(Identifier(s), Placeholder(s))
for s in selectors
SQL(" AND ").join(
SQL("%s = %s", SQL.identifier(key), values[key])
for key in selectors
),
)
cr.execute(query, values)
cr.execute(query)
return [row[0] for row in cr.fetchall()]
def select_en(model, fnames, where, params):
def select_en(model, fnames, model_names):
""" Select the given columns from the given model's table, with the given WHERE clause.
Translated fields are returned in 'en_US'.
"""
table = quote(model._table)
cols = ", ".join(
f"{quote(fname)}->>'en_US'" if model._fields[fname].translate else quote(fname)
if not model_names:
return []
cols = SQL(", ").join(
SQL("%s->>'en_US'", SQL.identifier(fname)) if model._fields[fname].translate else SQL.identifier(fname)
for fname in fnames
)
query = f"SELECT {cols} FROM {table} WHERE {where}"
model.env.cr.execute(query, params)
return model.env.cr.fetchall()
query = SQL(
"SELECT %s FROM %s WHERE model IN %s",
cols,
SQL.identifier(model._table),
tuple(model_names),
)
return model.env.execute_query(query)
def upsert_en(model, fnames, rows, conflict):
@ -131,24 +145,6 @@ def upsert_en(model, fnames, rows, conflict):
:param conflict: list of column names to put into the ON CONFLICT clause
:return: the ids of the inserted or updated rows
"""
table = quote(model._table)
cols = ", ".join(quote(fname) for fname in fnames)
values = ", ".join("%s" for row in rows)
conf = ", ".join(conflict)
excluded = ", ".join(
(
f"COALESCE({table}.{quote(fname)}, '{{}}'::jsonb) || EXCLUDED.{quote(fname)}"
if model._fields[fname].translate is True
else f"EXCLUDED.{quote(fname)}"
)
for fname in fnames
)
query = f"""
INSERT INTO {table} ({cols}) VALUES {values}
ON CONFLICT ({conf}) DO UPDATE SET ({cols}) = ({excluded})
RETURNING id
"""
# for translated fields, we can actually erase the json value, as
# translations will be reloaded after this
def identity(val):
@ -158,12 +154,34 @@ def upsert_en(model, fnames, rows, conflict):
return Json({'en_US': val}) if val is not None else val
wrappers = [(jsonify if model._fields[fname].translate else identity) for fname in fnames]
params = [
values = [
tuple(func(val) for func, val in zip(wrappers, row))
for row in rows
]
model.env.cr.execute(query, params)
return [row[0] for row in model.env.cr.fetchall()]
comma = SQL(", ").join
query = SQL("""
INSERT INTO %(table)s (%(cols)s) VALUES %(values)s
ON CONFLICT (%(conflict)s) DO UPDATE SET (%(cols)s) = (%(excluded)s)
RETURNING id
""",
table=SQL.identifier(model._table),
cols=comma(SQL.identifier(fname) for fname in fnames),
values=comma(values),
conflict=comma(SQL.identifier(fname) for fname in conflict),
excluded=comma(
(
SQL(
"COALESCE(%s, '{}'::jsonb) || EXCLUDED.%s",
SQL.identifier(model._table, fname),
SQL.identifier(fname),
)
if model._fields[fname].translate is True
else SQL("EXCLUDED.%s", SQL.identifier(fname))
)
for fname in fnames
),
)
return [id_ for id_, in model.env.execute_query(query)]
#
@ -240,13 +258,12 @@ class IrModel(models.Model):
@api.depends()
def _compute_count(self):
cr = self.env.cr
self.count = 0
for model in self:
records = self.env[model.model]
if not records._abstract and records._auto:
cr.execute(sql.SQL('SELECT COUNT(*) FROM {}').format(sql.Identifier(records._table)))
model.count = cr.fetchone()[0]
[[count]] = self.env.execute_query(SQL("SELECT COUNT(*) FROM %s", SQL.identifier(records._table)))
model.count = count
@api.constrains('model')
def _check_model_name(self):
@ -306,11 +323,11 @@ class IrModel(models.Model):
continue
table = current_model._table
kind = tools.table_kind(self._cr, table)
if kind == tools.TableKind.View:
self._cr.execute(sql.SQL('DROP VIEW {}').format(sql.Identifier(table)))
elif kind == tools.TableKind.Regular:
self._cr.execute(sql.SQL('DROP TABLE {} CASCADE').format(sql.Identifier(table)))
kind = sql.table_kind(self._cr, table)
if kind == sql.TableKind.View:
self._cr.execute(SQL('DROP VIEW %s', SQL.identifier(table)))
elif kind == sql.TableKind.Regular:
self._cr.execute(SQL('DROP TABLE %s CASCADE', SQL.identifier(table)))
elif kind is not None:
_logger.warning(
"Unable to drop table %r of model %r: unmanaged or unknown tabe type %r",
@ -325,7 +342,7 @@ class IrModel(models.Model):
# Prevent manual deletion of module tables
for model in self:
if model.state != 'manual':
raise UserError(_("Model %r contains module data and cannot be removed.", model.name))
raise UserError(_("Model %s contains module data and cannot be removed.", model.name))
def unlink(self):
# prevent screwing up fields that depend on these models' fields
@ -417,7 +434,7 @@ class IrModel(models.Model):
model_ids = {}
existing = {}
for row in select_en(self, ['id'] + cols, "model IN %s", [tuple(model_names)]):
for row in select_en(self, ['id'] + cols, model_names):
model_ids[row[1]] = row[0]
existing[row[1]] = row[1:]
@ -450,7 +467,7 @@ class IrModel(models.Model):
models.check_pg_name(model_data["model"].replace(".", "_"))
class CustomModel(models.Model):
_name = pycompat.to_text(model_data['model'])
_name = model_data['model']
_description = model_data['name']
_module = False
_custom = True
@ -486,8 +503,8 @@ class IrModel(models.Model):
for model_data in cr.dictfetchall():
model_class = self._instanciate(model_data)
Model = model_class._build_model(self.pool, cr)
kind = tools.table_kind(cr, Model._table)
if kind not in (tools.TableKind.Regular, None):
kind = sql.table_kind(cr, Model._table)
if kind not in (sql.TableKind.Regular, None):
_logger.info(
"Model %r is backed by table %r which is not a regular table (%r), disabling automatic schema management",
Model._name, Model._table, kind,
@ -540,13 +557,14 @@ class IrModelFields(models.Model):
copied = fields.Boolean(string='Copied',
compute='_compute_copied', store=True, readonly=False,
help="Whether the value is copied when duplicating a record.")
related = fields.Char(string='Related Field', help="The corresponding related field, if any. This must be a dot-separated list of field names.")
related = fields.Char(string='Related Field Definition', help="The corresponding related field, if any. This must be a dot-separated list of field names.")
related_field_id = fields.Many2one('ir.model.fields', compute='_compute_related_field_id',
store=True, string="Related field", ondelete='cascade')
store=True, string="Related Field", ondelete='cascade')
required = fields.Boolean()
readonly = fields.Boolean()
index = fields.Boolean(string='Indexed')
translate = fields.Boolean(string='Translatable', help="Whether values for this field can be translated (enables the translation mechanism for that field)")
company_dependent = fields.Boolean(string='Company Dependent', help="Whether values for this field is company dependent", readonly=True)
size = fields.Integer()
state = fields.Selection([('manual', 'Custom Field'), ('base', 'Base Field')], string='Type', default='manual', required=True, readonly=True, index=True)
on_delete = fields.Selection([('cascade', 'Cascade'), ('set null', 'Set NULL'), ('restrict', 'Restrict')],
@ -662,10 +680,18 @@ class IrModelFields(models.Model):
for index, name in enumerate(names):
field = self._get(model_name, name)
if not field:
raise UserError(_("Unknown field name %r in related field %r", name, self.related))
raise UserError(_(
'Unknown field name "%(field_name)s" in related field "%(related_field)s"',
field_name=name,
related_field=self.related,
))
model_name = field.relation
if index < last and not field.relation:
raise UserError(_("Non-relational field name %r in related field %r", name, self.related))
raise UserError(_(
'Non-relational field name "%(field_name)s" in related field "%(related_field)s"',
field_name=name,
related_field=self.related,
))
return field
@api.constrains('related')
@ -674,9 +700,17 @@ class IrModelFields(models.Model):
if rec.state == 'manual' and rec.related:
field = rec._related_field()
if field.ttype != rec.ttype:
raise ValidationError(_("Related field %r does not have type %r", rec.related, rec.ttype))
raise ValidationError(_(
'Related field "%(related_field)s" does not have type "%(type)s"',
related_field=rec.related,
type=rec.ttype,
))
if field.relation != rec.relation:
raise ValidationError(_("Related field %r does not have comodel %r", rec.related, rec.relation))
raise ValidationError(_(
'Related field "%(related_field)s" does not have comodel "%(comodel)s"',
related_field=rec.related,
comodel=rec.relation,
))
@api.onchange('related')
def _onchange_related(self):
@ -710,7 +744,7 @@ class IrModelFields(models.Model):
continue
for seq in record.depends.split(","):
if not seq.strip():
raise UserError(_("Empty dependency in %r", record.depends))
raise UserError(_("Empty dependency in %s", record.depends))
model = self.env[record.model]
names = seq.strip().split(".")
last = len(names) - 1
@ -719,9 +753,17 @@ class IrModelFields(models.Model):
raise UserError(_("Compute method cannot depend on field 'id'"))
field = model._fields.get(name)
if field is None:
raise UserError(_("Unknown field %r in dependency %r", name, seq.strip()))
raise UserError(_(
'Unknown field “%(field)s” in dependency “%(dependency)s',
field=name,
dependency=seq.strip(),
))
if index < last and not field.relational:
raise UserError(_("Non-relational field %r in dependency %r", name, seq.strip()))
raise UserError(_(
'Non-relational field “%(field)s” in dependency “%(dependency)s',
field=name,
dependency=seq.strip(),
))
model = model[name]
@api.onchange('compute')
@ -746,7 +788,7 @@ class IrModelFields(models.Model):
else:
currency_field = self._get(rec.model, rec.currency_field)
if not currency_field:
raise ValidationError(_("Unknown field name %r in currency_field", rec.currency_field))
raise ValidationError(_("Unknown field specified “%s in currency_field", rec.currency_field))
if currency_field.ttype != 'many2one':
raise ValidationError(_("Currency field does not have type many2one"))
@ -792,7 +834,7 @@ class IrModelFields(models.Model):
return
return {'warning': {
'title': _("Warning"),
'message': _("The table %r if used for other, possibly incompatible fields.", self.relation_table),
'message': _("The table %s” is used by another, possibly incompatible field(s).", self.relation_table),
}}
@api.constrains('required', 'ttype', 'on_delete')
@ -827,10 +869,10 @@ class IrModelFields(models.Model):
is_model = model is not None
if field.store:
# TODO: Refactor this brol in master
if is_model and tools.column_exists(self._cr, model._table, field.name) and \
tools.table_kind(self._cr, model._table) == tools.TableKind.Regular:
self._cr.execute(sql.SQL('ALTER TABLE {} DROP COLUMN {} CASCADE').format(
sql.Identifier(model._table), sql.Identifier(field.name),
if is_model and sql.column_exists(self._cr, model._table, field.name) and \
sql.table_kind(self._cr, model._table) == sql.TableKind.Regular:
self._cr.execute(SQL('ALTER TABLE %s DROP COLUMN %s CASCADE',
SQL.identifier(model._table), SQL.identifier(field.name),
))
if field.state == 'manual' and field.ttype == 'many2many':
rel_name = field.relation_table or (is_model and model._fields[field.name].relation)
@ -845,7 +887,7 @@ class IrModelFields(models.Model):
(tuple(tables_to_drop), tuple(self.ids)))
tables_to_keep = set(row[0] for row in self._cr.fetchall())
for rel_name in tables_to_drop - tables_to_keep:
self._cr.execute(sql.SQL('DROP TABLE {}').format(sql.Identifier(rel_name)))
self._cr.execute(SQL('DROP TABLE %s', SQL.identifier(rel_name)))
return True
@ -888,8 +930,8 @@ class IrModelFields(models.Model):
if not uninstalling:
field, dep = failed_dependencies[0]
raise UserError(_(
"The field '%s' cannot be removed because the field '%s' depends on it.",
field, dep,
"The field '%(field)s' cannot be removed because the field '%(other_field)s' depends on it.",
field=field, other_field=dep,
))
else:
self = self.union(*[
@ -919,9 +961,9 @@ class IrModelFields(models.Model):
except Exception:
if not uninstalling:
raise UserError(_(
"Cannot rename/delete fields that are still present in views:\nFields: %s\nView: %s",
", ".join(str(f) for f in fields),
view.name,
"Cannot rename/delete fields that are still present in views:\nFields: %(fields)s\nView: %(view)s",
fields=format_list(self.env, [str(f) for f in fields]),
view=view.name,
))
else:
# uninstall mode
@ -957,7 +999,7 @@ class IrModelFields(models.Model):
# discard the removed fields from fields to compute
for field in fields:
self.env.all.tocompute.pop(field, None)
self.env.transaction.tocompute.pop(field, None)
model_names = self.mapped('model')
self._drop_column()
@ -1000,7 +1042,7 @@ class IrModelFields(models.Model):
('model', '=', vals['relation']),
('name', '=', vals['relation_field']),
]):
raise UserError(_("Many2one %s on model %s does not exist!", vals['relation_field'], vals['relation']))
raise UserError(_("Many2one %(field)s on model %(model)s does not exist!", field=vals['relation_field'], model=vals['relation']))
if any(model in self.pool for model in models):
# setup models; this re-initializes model in registry
@ -1068,18 +1110,18 @@ class IrModelFields(models.Model):
# rename column in database, and its corresponding index if present
table, oldname, newname, index, stored = column_rename
if stored:
self._cr.execute(
sql.SQL('ALTER TABLE {} RENAME COLUMN {} TO {}').format(
sql.Identifier(table),
sql.Identifier(oldname),
sql.Identifier(newname)
))
self._cr.execute(SQL(
'ALTER TABLE %s RENAME COLUMN %s TO %s',
SQL.identifier(table),
SQL.identifier(oldname),
SQL.identifier(newname)
))
if index:
self._cr.execute(
sql.SQL('ALTER INDEX {} RENAME TO {}').format(
sql.Identifier(f'{table}_{oldname}_index'),
sql.Identifier(f'{table}_{newname}_index'),
))
self._cr.execute(SQL(
'ALTER INDEX %s RENAME TO %s',
SQL.identifier(f'{table}_{oldname}_index'),
SQL.identifier(f'{table}_{newname}_index'),
))
if column_rename or patched_models or translate_only:
# setup models, this will reload all manual fields in registry
@ -1124,6 +1166,7 @@ class IrModelFields(models.Model):
'selectable': bool(field.search or field.store),
'size': getattr(field, 'size', None),
'translate': bool(field.translate),
'company_dependent': bool(field.company_dependent),
'relation_field': field.inverse_name if field.type == 'one2many' else None,
'relation_table': field.relation if field.type == 'many2many' else None,
'column1': field.column1 if field.type == 'many2many' else None,
@ -1168,7 +1211,7 @@ class IrModelFields(models.Model):
field_ids = {}
existing = {}
for row in select_en(self, ['id'] + cols, "model IN %s", [tuple(model_names)]):
for row in select_en(self, ['id'] + cols, model_names):
field_ids[row[1:3]] = row[0]
existing[row[1:3]] = row[1:]
@ -1234,6 +1277,7 @@ class IrModelFields(models.Model):
'required': bool(field_data['required']),
'readonly': bool(field_data['readonly']),
'store': bool(field_data['store']),
'company_dependent': bool(field_data['company_dependent']),
}
if field_data['ttype'] in ('char', 'text', 'html'):
attrs['translate'] = bool(field_data['translate'])
@ -1367,7 +1411,7 @@ class ModelInherit(models.Model):
IrModel = self.env["ir.model"]
get_model_id = IrModel._get_id
module_mapping = defaultdict(list)
module_mapping = defaultdict(OrderedSet)
for model_name in model_names:
get_field_id = self.env["ir.model.fields"]._get_ids(model_name).get
model_id = get_model_id(model_name)
@ -1384,10 +1428,16 @@ class ModelInherit(models.Model):
] + [
(model_id, get_model_id(parent_name), get_field_id(field))
for parent_name, field in cls._inherits.items()
] + [
(model_id, get_model_id(field.comodel_name), get_field_id(field_name))
for (field_name, field) in inspect.getmembers(cls)
if isinstance(field, fields.Many2one)
if field.type == 'many2one' and not field.related and field.delegate
if field_name not in cls._inherits.values()
]
for item in items:
module_mapping[item].append(cls._module)
module_mapping[item].add(cls._module)
if not module_mapping:
return
@ -1480,6 +1530,13 @@ class IrModelSelection(models.Model):
]
if not fields:
return
if invalid_fields := OrderedSet(
field for field in fields
for selection in field.selection
for value_label in selection
if not isinstance(value_label, str)
):
raise ValidationError(_("Fields %s contain a non-str value/label in selection", invalid_fields))
# determine expected and existing rows
IMF = self.env['ir.model.fields']
@ -1682,14 +1739,15 @@ class IrModelSelection(models.Model):
"Could not fulfill ondelete action for field %s.%s, "
"attempting ORM bypass...", records._name, fname,
)
query = sql.SQL("UPDATE {} SET {}=%s WHERE id IN %s").format(
sql.Identifier(records._table),
sql.Identifier(fname),
)
# if this fails then we're shit out of luck and there's nothing
# we can do except fix on a case-by-case basis
value = field.convert_to_column(value, records)
self.env.cr.execute(query, [value, records._ids])
self.env.execute_query(SQL(
"UPDATE %s SET %s=%s WHERE id IN %s",
SQL.identifier(records._table),
SQL.identifier(fname),
field.convert_to_column_insert(value, records),
records._ids,
))
records.invalidate_recordset([fname])
for selection in self:
@ -1727,8 +1785,8 @@ class IrModelSelection(models.Model):
else:
# this shouldn't happen... simply a sanity check
raise ValueError(_(
"The ondelete policy %r is not valid for field %r",
ondelete, selection
'The ondelete policy "%(policy)s" is not valid for field "%(field)s"',
policy=ondelete, field=selection,
))
def _get_records(self):
@ -1770,39 +1828,38 @@ class IrModelConstraint(models.Model):
]
def unlink(self):
self.check_access_rights('unlink')
self.check_access_rule('unlink')
self.check_access('unlink')
ids_set = set(self.ids)
for data in self.sorted(key='id', reverse=True):
name = tools.ustr(data.name)
name = data.name
if data.model.model in self.env:
table = self.env[data.model.model]._table
else:
table = data.model.model.replace('.', '_')
typ = data.type
# double-check we are really going to delete all the owners of this schema element
self._cr.execute("""SELECT id from ir_model_constraint where name=%s""", (data.name,))
self._cr.execute("""SELECT id from ir_model_constraint where name=%s""", [name])
external_ids = set(x[0] for x in self._cr.fetchall())
if external_ids - ids_set:
# as installed modules have defined this element we must not delete it!
continue
typ = data.type
if typ == 'f':
# test if FK exists on this table (it could be on a related m2m table, in which case we ignore it)
self._cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""",
('f', name, table))
if self._cr.fetchone():
self._cr.execute(
sql.SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
sql.Identifier(table),
sql.Identifier(name[:63])
))
self._cr.execute(SQL(
'ALTER TABLE %s DROP CONSTRAINT %s',
SQL.identifier(table),
SQL.identifier(name[:63]),
))
_logger.info('Dropped FK CONSTRAINT %s@%s', name, data.model.model)
if typ == 'u':
hname = tools.make_identifier(name)
hname = sql.make_identifier(name)
# test if constraint exists
# Since type='u' means any "other" constraint, to avoid issues we limit to
# 'c' -> check, 'u' -> unique, 'x' -> exclude constraints, effective leaving
@ -1812,16 +1869,18 @@ class IrModelConstraint(models.Model):
WHERE cs.contype in ('c', 'u', 'x') and cs.conname=%s and cl.relname=%s""",
(hname, table))
if self._cr.fetchone():
self._cr.execute(sql.SQL('ALTER TABLE {} DROP CONSTRAINT {}').format(
sql.Identifier(table), sql.Identifier(hname)))
self._cr.execute(SQL(
'ALTER TABLE %s DROP CONSTRAINT %s',
SQL.identifier(table),
SQL.identifier(hname),
))
_logger.info('Dropped CONSTRAINT %s@%s', name, data.model.model)
return super().unlink()
def copy(self, default=None):
default = dict(default or {})
default['name'] = self.name + '_copy'
return super(IrModelConstraint, self).copy(default)
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
return [dict(vals, name=constraint.name + '_copy') for constraint, vals in zip(self, vals_list)]
def _reflect_constraint(self, model, conname, type, definition, module, message=None):
""" Reflect the given constraint, and return its corresponding record
@ -1922,23 +1981,23 @@ class IrModelRelation(models.Model):
ids_set = set(self.ids)
to_drop = tools.OrderedSet()
for data in self.sorted(key='id', reverse=True):
name = tools.ustr(data.name)
name = data.name
# double-check we are really going to delete all the owners of this schema element
self._cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,))
external_ids = set(x[0] for x in self._cr.fetchall())
if external_ids - ids_set:
self._cr.execute("""SELECT id from ir_model_relation where name = %s""", [name])
external_ids = {x[0] for x in self._cr.fetchall()}
if not external_ids.issubset(ids_set):
# as installed modules have defined this element we must not delete it!
continue
if tools.table_exists(self._cr, name):
if sql.table_exists(self._cr, name):
to_drop.add(name)
self.unlink()
# drop m2m relation tables
for table in to_drop:
self._cr.execute(sql.SQL('DROP TABLE {} CASCADE').format(sql.Identifier(table)))
self._cr.execute(SQL('DROP TABLE %s CASCADE', SQL.identifier(table)))
_logger.info('Dropped table %s', table)
def _reflect_relation(self, model, table, module):
@ -1981,6 +2040,7 @@ class IrModelAccess(models.Model):
def group_names_with_access(self, model_name, access_mode):
""" Return the names of visible groups which have been granted
``access_mode`` on the model ``model_name``.
:rtype: list
"""
assert access_mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
@ -1998,6 +2058,25 @@ class IrModelAccess(models.Model):
""", [lang, lang, model_name])
return [('%s/%s' % x) if x[0] else x[1] for x in self._cr.fetchall()]
@api.model
@tools.ormcache('model_name', 'access_mode')
def _get_access_groups(self, model_name, access_mode='read'):
""" Return the group expression object that represents the users who
have ``access_mode`` to the model ``model_name``.
"""
assert access_mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
model = self.env['ir.model']._get(model_name)
accesses = self.sudo().search([
(f'perm_{access_mode}', '=', True), ('model_id', '=', model.id),
])
group_definitions = self.env['res.groups']._get_group_definitions()
if not accesses:
return group_definitions.empty
if not all(access.group_id for access in accesses): # there is some global access
return group_definitions.universe
return group_definitions.from_ids(accesses.group_id.ids)
# The context parameter is useful when the method translates error messages.
# But as the method raises an exception in that case, the key 'lang' might
# not be really necessary as a cache key, unless the `ormcache_context`
@ -2007,26 +2086,22 @@ class IrModelAccess(models.Model):
def _get_allowed_models(self, mode='read'):
assert mode in ('read', 'write', 'create', 'unlink'), 'Invalid access mode'
group_ids = self.env.user._get_group_ids()
self.flush_model()
self.env.cr.execute(f"""
rows = self.env.execute_query(SQL("""
SELECT m.model
FROM ir_model_access a
JOIN ir_model m ON (m.id = a.model_id)
WHERE a.perm_{mode}
WHERE a.perm_%s
AND a.active
AND (
a.group_id IS NULL OR
-- use subselect fo force a better query plan. See #99695 --
a.group_id IN (
SELECT gu.gid
FROM res_groups_users_rel gu
WHERE gu.uid = %s
)
a.group_id IN %s
)
GROUP BY m.model
""", (self.env.uid,))
""", SQL(mode), tuple(group_ids) or (None,)))
return frozenset(v[0] for v in self.env.cr.fetchall())
return frozenset(v[0] for v in rows)
@api.model
def check(self, model, mode='read', raise_exception=True):
@ -2036,61 +2111,32 @@ class IrModelAccess(models.Model):
assert isinstance(model, str), 'Not a model name: %s' % (model,)
# TransientModel records have no access rights, only an implicit access rule
if model not in self.env:
_logger.error('Missing model %s', model)
has_access = model in self._get_allowed_models(mode)
if not has_access and raise_exception:
groups = '\n'.join('\t- %s' % g for g in self.group_names_with_access(model, mode))
document_kind = self.env['ir.model']._get(model).name or model
msg_heads = {
# Messages are declared in extenso so they are properly exported in translation terms
'read': _lt(
"You are not allowed to access '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'write': _lt(
"You are not allowed to modify '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'create': _lt(
"You are not allowed to create '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
'unlink': _lt(
"You are not allowed to delete '%(document_kind)s' (%(document_model)s) records.",
document_kind=document_kind,
document_model=model,
),
}
operation_error = msg_heads[mode]
if groups:
group_info = _("This operation is allowed for the following groups:\n%(groups_list)s", groups_list=groups)
else:
group_info = _("No group currently allows this operation.")
resolution_info = _("Contact your administrator to request access if necessary.")
_logger.info('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, self._uid, model)
msg = """{operation_error}
{group_info}
{resolution_info}""".format(
operation_error=operation_error,
group_info=group_info,
resolution_info=resolution_info)
raise AccessError(msg) from None
raise self._make_access_error(model, mode) from None
return has_access
def _make_access_error(self, model: str, mode: str):
""" Return the exception corresponding to an access error. """
_logger.info('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, self._uid, model)
operation_error = str(ACCESS_ERROR_HEADER[mode]) % {
'document_kind': self.env['ir.model']._get(model).name or model,
'document_model': model,
}
groups = "\n".join(f"\t- {g}" for g in self.group_names_with_access(model, mode))
if groups:
group_info = str(ACCESS_ERROR_GROUPS) % {'groups_list': groups}
else:
group_info = str(ACCESS_ERROR_NOGROUP)
resolution_info = str(ACCESS_ERROR_RESOLUTION)
return AccessError(f"{operation_error}\n\n{group_info}\n\n{resolution_info}")
@api.model
def call_cache_clearing_methods(self):
@ -2164,10 +2210,12 @@ class IrModelData(models.Model):
def _auto_init(self):
res = super(IrModelData, self)._auto_init()
tools.create_unique_index(self._cr, 'ir_model_data_module_name_uniq_index',
self._table, ['module', 'name'])
tools.create_index(self._cr, 'ir_model_data_model_res_id_index',
self._table, ['model', 'res_id'])
sql.create_unique_index(
self._cr, 'ir_model_data_module_name_uniq_index',
self._table, ['module', 'name'])
sql.create_index(
self._cr, 'ir_model_data_model_res_id_index',
self._table, ['model', 'res_id'])
return res
@api.depends('res_id', 'model', 'complete_name')
@ -2222,23 +2270,35 @@ class IrModelData(models.Model):
if self.env[model].search([('id', '=', res_id)]):
return model, res_id
if raise_on_access_error:
raise AccessError(_('Not enough access rights on the external ID %r', '%s.%s', (module, xml_id)))
raise AccessError(_('Not enough access rights on the external ID "%(module)s.%(xml_id)s"', module=module, xml_id=xml_id))
return model, False
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
self.ensure_one()
rand = "%04x" % random.getrandbits(16)
default = dict(default or {}, name="%s_%s" % (self.name, rand))
return super().copy(default)
def copy_data(self, default=None):
vals_list = super().copy_data(default=default)
for model, vals in zip(self, vals_list):
rand = "%04x" % random.getrandbits(16)
vals['name'] = "%s_%s" % (model.name, rand)
return vals_list
@api.model_create_multi
def create(self, vals_list):
res = super().create(vals_list)
if any(vals.get('model') == 'res.groups' for vals in vals_list):
self.env.registry.clear_cache('groups')
return res
def write(self, values):
self.env.registry.clear_cache() # _xmlid_lookup
return super().write(values)
res = super().write(values)
if values.get('model') == 'res.groups':
self.env.registry.clear_cache('groups')
return res
def unlink(self):
""" Regular unlink method, but make sure to clear the caches. """
self.env.registry.clear_cache() # _xmlid_lookup
if self and any(data.model == 'res.groups' for data in self.exists()):
self.env.registry.clear_cache('groups')
return super(IrModelData, self).unlink()
def _lookup_xmlids(self, xml_ids, model):
@ -2311,6 +2371,9 @@ class IrModelData(models.Model):
# update loaded_xmlids
self.pool.loaded_xmlids.update("%s.%s" % row[:2] for row in rows)
if any(row[2] == 'res.groups' for row in rows):
self.env.registry.clear_cache('groups')
# NOTE: this method is overriden in web_studio; if you need to make another
# override, make sure it is compatible with the one that is there.
def _build_insert_xmlids_values(self):
@ -2424,6 +2487,8 @@ class IrModelData(models.Model):
('model', '=', records._name),
('res_id', 'in', records.ids),
])
cloc_exclude_data = ref_data.filtered(lambda imd: imd.module == '__cloc_exclude__')
ref_data -= cloc_exclude_data
records -= records.browse((ref_data - module_data).mapped('res_id'))
if not records:
return
@ -2452,6 +2517,7 @@ class IrModelData(models.Model):
_logger.info('Deleting %s', records)
try:
with self._cr.savepoint():
cloc_exclude_data.unlink()
records.unlink()
except Exception:
if len(records) <= 1:
@ -2599,10 +2665,9 @@ class IrModelData(models.Model):
@api.model
def toggle_noupdate(self, model, res_id):
""" Toggle the noupdate flag on the external id of the record """
record = self.env[model].browse(res_id)
if record.check_access_rights('write'):
for xid in self.search([('model', '=', model), ('res_id', '=', res_id)]):
xid.noupdate = not xid.noupdate
self.env[model].browse(res_id).check_access('write')
for xid in self.search([('model', '=', model), ('res_id', '=', res_id)]):
xid.noupdate = not xid.noupdate
class WizardModelMenu(models.TransientModel):
@ -2618,7 +2683,7 @@ class WizardModelMenu(models.TransientModel):
vals = {
'name': menu.name,
'res_model': model.model,
'view_mode': 'tree,form',
'view_mode': 'list,form',
}
action_id = self.env['ir.actions.act_window'].create(vals)
self.env['ir.ui.menu'].create({

View file

@ -101,7 +101,7 @@ class ModuleCategory(models.Model):
@api.constrains('parent_id')
def _check_parent_not_circular(self):
if not self._check_recursion():
if self._has_cycle():
raise ValidationError(_("Error ! You cannot create recursive categories."))
@ -178,7 +178,7 @@ class Module(models.Model):
for element, _attribute, _link, _pos in html.iterlinks():
if element.get('src') and not '//' in element.get('src') and not 'static/' in element.get('src'):
element.set('src', "/%s/static/description/%s" % (module.name, element.get('src')))
return tools.html_sanitize(lxml.html.tostring(html))
return tools.html_sanitize(lxml.html.tostring(html, encoding='unicode'))
for module in self:
if not module.name:
@ -299,6 +299,7 @@ class Module(models.Model):
sequence = fields.Integer('Sequence', default=100)
dependencies_id = fields.One2many('ir.module.module.dependency', 'module_id',
string='Dependencies', readonly=True)
country_ids = fields.Many2many('res.country', 'module_country', 'module_id', 'country_id')
exclusion_ids = fields.One2many('ir.module.module.exclusion', 'module_id',
string='Exclusions', readonly=True)
auto_install = fields.Boolean('Automatic Installation',
@ -351,18 +352,17 @@ class Module(models.Model):
""" Domain to retrieve the modules that should be loaded by the registry. """
return [('state', '=', 'installed')]
@classmethod
def check_external_dependencies(cls, module_name, newstate='to install'):
terp = cls.get_module_info(module_name)
def check_external_dependencies(self, module_name, newstate='to install'):
terp = self.get_module_info(module_name)
try:
modules.check_manifest_dependencies(terp)
except Exception as e:
if newstate == 'to install':
msg = _('Unable to install module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to install module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to upgrade module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
else:
msg = _('Unable to process module "%s" because an external dependency is not met: %s', module_name, e.args[0])
msg = _('Unable to process module "%(module)s" because an external dependency is not met: %(dependency)s', module=module_name, dependency=e.args[0])
raise UserError(msg)
def _state_update(self, newstate, states_to_update, level=100):
@ -381,7 +381,10 @@ class Module(models.Model):
update_mods, ready_mods = self.browse(), self.browse()
for dep in module.dependencies_id:
if dep.state == 'unknown':
raise UserError(_("You try to install module %r that depends on module %r.\nBut the latter module is not available in your system.", module.name, dep.name))
raise UserError(_(
'You try to install module "%(module)s" that depends on module "%(dependency)s".\nBut the latter module is not available in your system.',
module=module.name, dependency=dep.name,
))
if dep.depend_id.state == newstate:
ready_mods += dep.depend_id
else:
@ -401,16 +404,20 @@ class Module(models.Model):
@assert_log_admin_access
def button_install(self):
company_countries = self.env['res.company'].search([]).country_id
# domain to select auto-installable (but not yet installed) modules
auto_domain = [('state', '=', 'uninstalled'), ('auto_install', '=', True)]
# determine whether an auto-install module must be installed:
# - all its dependencies are installed or to be installed,
# - at least one dependency is 'to install'
# - if the module is country specific, at least one company is in one of the countries
install_states = frozenset(('installed', 'to install', 'to upgrade'))
def must_install(module):
states = {dep.state for dep in module.dependencies_id if dep.auto_install_required}
return states <= install_states and 'to install' in states
return states <= install_states and 'to install' in states and (
not module.country_ids or module.country_ids & company_countries
)
modules = self
while modules:
@ -428,7 +435,11 @@ class Module(models.Model):
for module in install_mods:
for exclusion in module.exclusion_ids:
if exclusion.name in install_names:
raise UserError(_('Modules %r and %r are incompatible.', module.shortdesc, exclusion.exclusion_id.shortdesc))
raise UserError(_(
'Modules "%(module)s" and "%(incompatible_module)s" are incompatible.',
module=module.shortdesc,
incompatible_module=exclusion.exclusion_id.shortdesc,
))
# check category exclusions
def closure(module):
@ -448,7 +459,7 @@ class Module(models.Model):
if modules and not any(modules <= closure(module) for module in modules):
labels = dict(self.fields_get(['state'])['state']['selection'])
raise UserError(
_('You are trying to install incompatible modules in category %r:%s', category.name, ''.join(
_('You are trying to install incompatible modules in category "%(category)s":%(module_list)s', category=category.name, module_list=''.join(
f"\n- {module.shortdesc} ({labels[module.state]})"
for module in modules
))
@ -571,7 +582,7 @@ class Module(models.Model):
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/web',
'url': '/odoo',
}
def _button_immediate_function(self, function):
@ -689,7 +700,7 @@ class Module(models.Model):
module = todo[i]
i += 1
if module.state not in ('installed', 'to upgrade'):
raise UserError(_("Can not upgrade module %r. It is not installed.", module.name))
raise UserError(_("Cannot upgrade module “%s. It is not installed.", module.name))
if self.get_module_info(module.name).get("installable", True):
self.check_external_dependencies(module.name, 'to upgrade')
for dep in Dependency.search([('name', '=', module.name)]):
@ -708,7 +719,7 @@ class Module(models.Model):
continue
for dep in module.dependencies_id:
if dep.state == 'unknown':
raise UserError(_('You try to upgrade the module %s that depends on the module: %s.\nBut this module is not available in your system.', module.name, dep.name))
raise UserError(_('You try to upgrade the module %(module)s that depends on the module: %(dependency)s.\nBut this module is not available in your system.', module=module.name, dependency=dep.name))
if dep.state == 'uninstalled':
to_install += self.search([('name', '=', dep.name)]).ids
@ -794,6 +805,7 @@ class Module(models.Model):
def _update_from_terp(self, terp):
self._update_dependencies(terp.get('depends', []), terp.get('auto_install'))
self._update_countries(terp.get('countries', []))
self._update_exclusions(terp.get('excludes', []))
self._update_category(terp.get('category', 'Uncategorized'))
@ -810,6 +822,16 @@ class Module(models.Model):
self.env['ir.module.module.dependency'].invalidate_model(['auto_install_required'])
self.invalidate_recordset(['dependencies_id'])
def _update_countries(self, countries=()):
existing = set(self.country_ids.ids)
needed = set(self.env['res.country'].search([('code', 'in', [c.upper() for c in countries])]).ids)
for dep in (needed - existing):
self._cr.execute('INSERT INTO module_country (module_id, country_id) values (%s, %s)', (self.id, dep))
for dep in (existing - needed):
self._cr.execute('DELETE FROM module_country WHERE module_id = %s and country_id = %s', (self.id, dep))
self.invalidate_recordset(['country_ids'])
self.env['res.company'].invalidate_model(['uninstalled_l10n_module_ids'])
def _update_exclusions(self, excludes=None):
self.env['ir.module.module.exclusion'].flush_model()
existing = set(excl.name for excl in self.exclusion_ids)
@ -883,13 +905,18 @@ class Module(models.Model):
def search_panel_select_range(self, field_name, **kwargs):
if field_name == 'category_id':
enable_counters = kwargs.get('enable_counters', False)
domain = [('parent_id', '=', False), ('child_ids.module_ids', '!=', False)]
domain = [
('parent_id', '=', False),
'|',
('module_ids.application', '!=', False),
('child_ids.module_ids', '!=', False),
]
excluded_xmlids = [
'base.module_category_website_theme',
'base.module_category_theme',
]
if not self.user_has_groups('base.group_no_one'):
if not self.env.user.has_group('base.group_no_one'):
excluded_xmlids.append('base.module_category_hidden')
excluded_category_ids = []
@ -928,18 +955,19 @@ class Module(models.Model):
return super(Module, self).search_panel_select_range(field_name, **kwargs)
@api.model
def _load_module_terms(self, modules, langs, overwrite=False):
def _load_module_terms(self, modules, langs, overwrite=False, imported_module=False):
""" Load PO files of the given modules for the given languages. """
# load i18n files
translation_importer = TranslationImporter(self.env.cr, verbose=False)
for module_name in modules:
modpath = get_module_path(module_name)
modpath = get_module_path(module_name, downloaded=imported_module)
if not modpath:
continue
for lang in langs:
is_lang_imported = False
for po_path in get_po_paths(module_name, lang):
env = self.env if imported_module else None
for po_path in get_po_paths(module_name, lang, env=env):
_logger.info('module %s: loading translation file %s for language %s', module_name, po_path, lang)
translation_importer.load_file(po_path, lang)
is_lang_imported = True
@ -994,6 +1022,27 @@ class ModuleDependency(models.Model):
for dependency in self:
dependency.state = dependency.depend_id.state or 'unknown'
@api.model
def all_dependencies(self, module_names):
to_search = {key: True for key in module_names}
res = {}
def search_direct_deps(to_search, res):
to_search_list = list(to_search.keys())
dependencies = self.web_search_read(domain=[("module_id.name", "in", to_search_list)], specification={"module_id":{"fields":{"name":{}}}, "name": {}, })["records"]
to_search.clear()
for dependency in dependencies:
dep_name = dependency["name"]
mod_name = dependency["module_id"]["name"]
if dep_name not in res and dep_name not in to_search and dep_name not in to_search_list:
to_search[dep_name] = True
if mod_name not in res:
res[mod_name] = list()
res[mod_name].append(dep_name)
search_direct_deps(to_search, res)
while to_search:
search_direct_deps(to_search, res)
return res
class ModuleExclusion(models.Model):
_name = "ir.module.module.exclusion"

Some files were not shown because too many files have changed in this diff Show more