mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-18 17:32:03 +02:00
Initial commit: Core packages
This commit is contained in:
commit
12c29a983b
9512 changed files with 8379910 additions and 0 deletions
|
|
@ -0,0 +1,67 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import common
|
||||
from . import test_acl
|
||||
from . import test_api
|
||||
from . import test_barcode
|
||||
from . import test_base
|
||||
from . import test_basecase
|
||||
from . import test_cache
|
||||
from . import test_date_utils
|
||||
from . import test_deprecation
|
||||
from . import test_db_cursor
|
||||
from . import test_expression
|
||||
from . import test_float
|
||||
from . import test_format_address_mixin
|
||||
from . import test_func
|
||||
from . import test_http_case
|
||||
from . import test_image
|
||||
from . import test_avatar_mixin
|
||||
from . import test_ir_actions
|
||||
from . import test_ir_attachment
|
||||
from . import test_ir_cron
|
||||
from . import test_ir_filters
|
||||
from . import test_ir_http
|
||||
from . import test_ir_mail_server
|
||||
from . import test_ir_model
|
||||
from . import test_ir_module
|
||||
from . import test_ir_sequence
|
||||
from . import test_ir_sequence_date_range
|
||||
from . import test_ir_default
|
||||
from . import test_mail
|
||||
from . import test_menu
|
||||
from . import test_mimetypes
|
||||
from . import test_misc
|
||||
from . import test_module
|
||||
from . import test_orm
|
||||
from . import test_ormcache
|
||||
from . import test_osv
|
||||
from . import test_qweb_field
|
||||
from . import test_qweb
|
||||
from . import test_res_config
|
||||
from . import test_res_lang
|
||||
from . import test_search
|
||||
from . import test_translate
|
||||
from . import test_tz
|
||||
# from . import test_uninstall # loop
|
||||
from . import test_user_has_group
|
||||
from . import test_views
|
||||
from . import test_xmlrpc
|
||||
from . import test_res_company
|
||||
from . import test_res_currency
|
||||
from . import test_res_country
|
||||
from . import test_res_partner
|
||||
from . import test_res_partner_bank
|
||||
from . import test_res_users
|
||||
from . import test_reports
|
||||
from . import test_test_retry
|
||||
from . import test_test_suite
|
||||
from . import test_tests_tags
|
||||
from . import test_form_create
|
||||
from . import test_cloc
|
||||
from . import test_profiler
|
||||
from . import test_pdf
|
||||
from . import test_neutralize
|
||||
from . import test_config_parameter
|
||||
from . import test_ir_module_category
|
||||
from . import test_num2words_ar
|
||||
465
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/common.py
Normal file
465
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/common.py
Normal file
|
|
@ -0,0 +1,465 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from contextlib import contextmanager
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo.tests.common import TransactionCase, HttpCase
|
||||
from odoo import Command
|
||||
|
||||
DISABLED_MAIL_CONTEXT = {
|
||||
'tracking_disable': True,
|
||||
'mail_create_nolog': True,
|
||||
'mail_create_nosubscribe': True,
|
||||
'mail_notrack': True,
|
||||
'no_reset_password': True,
|
||||
}
|
||||
|
||||
|
||||
class BaseCommon(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
# Enforce the use of USD as main currency unless modified in inherited class(es)
|
||||
cls._use_currency('USD')
|
||||
|
||||
# Mail logic won't be tested by default in other modules.
|
||||
# Mail API overrides should be tested with dedicated tests on purpose
|
||||
# Hack to use with_context and avoid manual context dict modification
|
||||
cls.env = cls.env['base'].with_context(**DISABLED_MAIL_CONTEXT).env
|
||||
|
||||
cls.partner = cls.env['res.partner'].create({
|
||||
'name': 'Test Partner',
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def _use_currency(cls, currency_code):
|
||||
# Enforce constant currency
|
||||
currency = cls._enable_currency(currency_code)
|
||||
if not cls.env.company.currency_id == currency:
|
||||
cls.env.transaction.cache.set(cls.env.company, type(cls.env.company).currency_id, currency.id, dirty=True)
|
||||
# this is equivalent to cls.env.company.currency_id = currency but without triggering buisness code checks.
|
||||
# The value is added in cache, and the cache value is set as dirty so that that
|
||||
# the value will be written to the database on next flush.
|
||||
# this was needed because some journal entries may exist when running tests, especially l10n demo data.
|
||||
|
||||
@classmethod
|
||||
def _enable_currency(cls, currency_code):
|
||||
currency = cls.env['res.currency'].with_context(active_test=False).search(
|
||||
[('name', '=', currency_code.upper())]
|
||||
)
|
||||
currency.action_unarchive()
|
||||
return currency
|
||||
|
||||
|
||||
class BaseUsersCommon(BaseCommon):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
cls.group_portal = cls.env.ref('base.group_portal')
|
||||
cls.group_user = cls.env.ref('base.group_user')
|
||||
|
||||
cls.user_portal = cls.env['res.users'].create({
|
||||
'name': 'Test Portal User',
|
||||
'login': 'portal_user',
|
||||
'password': 'portal_user',
|
||||
'email': 'portal_user@gladys.portal',
|
||||
'groups_id': [Command.set([cls.group_portal.id])],
|
||||
})
|
||||
|
||||
cls.user_internal = cls.env['res.users'].create({
|
||||
'name': 'Test Internal User',
|
||||
'login': 'internal_user',
|
||||
'password': 'internal_user',
|
||||
'email': 'mark.brown23@example.com',
|
||||
'groups_id': [Command.set([cls.group_user.id])],
|
||||
})
|
||||
|
||||
|
||||
class TransactionCaseWithUserDemo(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
cls.env.ref('base.partner_admin').write({'name': 'Mitchell Admin'})
|
||||
cls.user_demo = cls.env['res.users'].search([('login', '=', 'demo')])
|
||||
cls.partner_demo = cls.user_demo.partner_id
|
||||
|
||||
if not cls.user_demo:
|
||||
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
|
||||
# YTI TODO: This could be factorized between the different classes
|
||||
cls.partner_demo = cls.env['res.partner'].create({
|
||||
'name': 'Marc Demo',
|
||||
'email': 'mark.brown23@example.com',
|
||||
})
|
||||
cls.user_demo = cls.env['res.users'].create({
|
||||
'login': 'demo',
|
||||
'password': 'demo',
|
||||
'partner_id': cls.partner_demo.id,
|
||||
'groups_id': [Command.set([cls.env.ref('base.group_user').id, cls.env.ref('base.group_partner_manager').id])],
|
||||
})
|
||||
|
||||
|
||||
class HttpCaseWithUserDemo(HttpCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.user_admin = cls.env.ref('base.user_admin')
|
||||
cls.user_admin.write({'name': 'Mitchell Admin'})
|
||||
cls.partner_admin = cls.user_admin.partner_id
|
||||
cls.user_demo = cls.env['res.users'].search([('login', '=', 'demo')])
|
||||
cls.partner_demo = cls.user_demo.partner_id
|
||||
|
||||
if not cls.user_demo:
|
||||
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
|
||||
cls.partner_demo = cls.env['res.partner'].create({
|
||||
'name': 'Marc Demo',
|
||||
'email': 'mark.brown23@example.com',
|
||||
'tz': 'UTC'
|
||||
})
|
||||
cls.user_demo = cls.env['res.users'].create({
|
||||
'login': 'demo',
|
||||
'password': 'demo',
|
||||
'partner_id': cls.partner_demo.id,
|
||||
'groups_id': [Command.set([cls.env.ref('base.group_user').id, cls.env.ref('base.group_partner_manager').id])],
|
||||
})
|
||||
|
||||
|
||||
class SavepointCaseWithUserDemo(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
cls.user_demo = cls.env['res.users'].search([('login', '=', 'demo')])
|
||||
cls.partner_demo = cls.user_demo.partner_id
|
||||
|
||||
if not cls.user_demo:
|
||||
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
|
||||
cls.partner_demo = cls.env['res.partner'].create({
|
||||
'name': 'Marc Demo',
|
||||
'email': 'mark.brown23@example.com',
|
||||
})
|
||||
cls.user_demo = cls.env['res.users'].create({
|
||||
'login': 'demo',
|
||||
'password': 'demo',
|
||||
'partner_id': cls.partner_demo.id,
|
||||
'groups_id': [Command.set([cls.env.ref('base.group_user').id, cls.env.ref('base.group_partner_manager').id])],
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def _load_partners_set(cls):
|
||||
cls.partner_category = cls.env['res.partner.category'].create({
|
||||
'name': 'Sellers',
|
||||
'color': 2,
|
||||
})
|
||||
cls.partner_category_child_1 = cls.env['res.partner.category'].create({
|
||||
'name': 'Office Supplies',
|
||||
'parent_id': cls.partner_category.id,
|
||||
})
|
||||
cls.partner_category_child_2 = cls.env['res.partner.category'].create({
|
||||
'name': 'Desk Manufacturers',
|
||||
'parent_id': cls.partner_category.id,
|
||||
})
|
||||
|
||||
# Load all the demo partners
|
||||
cls.partners = cls.env['res.partner'].create([
|
||||
{
|
||||
'name': 'Inner Works', # Wood Corner
|
||||
'state_id': cls.env.ref('base.state_us_1').id,
|
||||
'category_id': [Command.set([cls.partner_category_child_1.id, cls.partner_category_child_2.id,])],
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Sheila Ruiz', # 'Willie Burke',
|
||||
}), Command.create({
|
||||
'name': 'Wyatt Howard', # 'Ron Gibson',
|
||||
}), Command.create({
|
||||
'name': 'Austin Kennedy', # Tom Ruiz
|
||||
})],
|
||||
}, {
|
||||
'name': 'Pepper Street', # 'Deco Addict',
|
||||
'state_id': cls.env.ref('base.state_us_2').id,
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Liam King', # 'Douglas Fletcher',
|
||||
}), Command.create({
|
||||
'name': 'Craig Richardson', # 'Floyd Steward',
|
||||
}), Command.create({
|
||||
'name': 'Adam Cox', # 'Addison Olson',
|
||||
})],
|
||||
}, {
|
||||
'name': 'AnalytIQ', #'Gemini Furniture',
|
||||
'state_id': cls.env.ref('base.state_us_3').id,
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Pedro Boyd', # Edwin Hansen
|
||||
}), Command.create({
|
||||
'name': 'Landon Roberts', # 'Jesse Brown',
|
||||
'company_id': cls.env.ref('base.main_company').id,
|
||||
}), Command.create({
|
||||
'name': 'Leona Shelton', # 'Soham Palmer',
|
||||
}), Command.create({
|
||||
'name': 'Scott Kim', # 'Oscar Morgan',
|
||||
})],
|
||||
}, {
|
||||
'name': 'Urban Trends', # 'Ready Mat',
|
||||
'state_id': cls.env.ref('base.state_us_4').id,
|
||||
'category_id': [Command.set([cls.partner_category_child_1.id, cls.partner_category_child_2.id,])],
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Louella Jacobs', # 'Billy Fox',
|
||||
}), Command.create({
|
||||
'name': 'Albert Alexander', # 'Kim Snyder',
|
||||
}), Command.create({
|
||||
'name': 'Brad Castillo', # 'Edith Sanchez',
|
||||
}), Command.create({
|
||||
'name': 'Sophie Montgomery', # 'Sandra Neal',
|
||||
}), Command.create({
|
||||
'name': 'Chloe Bates', # 'Julie Richards',
|
||||
}), Command.create({
|
||||
'name': 'Mason Crawford', # 'Travis Mendoza',
|
||||
}), Command.create({
|
||||
'name': 'Elsie Kennedy', # 'Theodore Gardner',
|
||||
})],
|
||||
}, {
|
||||
'name': 'Ctrl-Alt-Fix', # 'The Jackson Group',
|
||||
'state_id': cls.env.ref('base.state_us_5').id,
|
||||
'child_ids': [Command.create({
|
||||
'name': 'carole miller', # 'Toni Rhodes',
|
||||
}), Command.create({
|
||||
'name': 'Cecil Holmes', # 'Gordon Owens',
|
||||
})],
|
||||
}, {
|
||||
'name': 'Ignitive Labs', # 'Azure Interior',
|
||||
'state_id': cls.env.ref('base.state_us_6').id,
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Jonathan Webb', # 'Brandon Freeman',
|
||||
}), Command.create({
|
||||
'name': 'Clinton Clark', # 'Nicole Ford',
|
||||
}), Command.create({
|
||||
'name': 'Howard Bryant', # 'Colleen Diaz',
|
||||
})],
|
||||
}, {
|
||||
'name': 'Amber & Forge', # 'Lumber Inc',
|
||||
'state_id': cls.env.ref('base.state_us_7').id,
|
||||
'child_ids': [Command.create({
|
||||
'name': 'Mark Webb', # 'Lorraine Douglas',
|
||||
})],
|
||||
}, {
|
||||
'name': 'Rebecca Day', # 'Chester Reed',
|
||||
'parent_id': cls.env.ref('base.main_partner').id,
|
||||
}, {
|
||||
'name': 'Gabriella Jennings', # 'Dwayne Newman',
|
||||
'parent_id': cls.env.ref('base.main_partner').id,
|
||||
}
|
||||
])
|
||||
|
||||
|
||||
class TransactionCaseWithUserPortal(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.user_portal = cls.env['res.users'].sudo().search([('login', '=', 'portal')])
|
||||
cls.partner_portal = cls.user_portal.partner_id
|
||||
|
||||
if not cls.user_portal:
|
||||
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
|
||||
cls.partner_portal = cls.env['res.partner'].create({
|
||||
'name': 'Joel Willis',
|
||||
'email': 'joel.willis63@example.com',
|
||||
})
|
||||
cls.user_portal = cls.env['res.users'].with_context(no_reset_password=True).create({
|
||||
'login': 'portal',
|
||||
'password': 'portal',
|
||||
'partner_id': cls.partner_portal.id,
|
||||
'groups_id': [Command.set([cls.env.ref('base.group_portal').id])],
|
||||
})
|
||||
|
||||
|
||||
class HttpCaseWithUserPortal(HttpCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.user_portal = cls.env['res.users'].sudo().search([('login', '=', 'portal')])
|
||||
cls.partner_portal = cls.user_portal.partner_id
|
||||
|
||||
if not cls.user_portal:
|
||||
cls.env['ir.config_parameter'].sudo().set_param('auth_password_policy.minlength', 4)
|
||||
cls.partner_portal = cls.env['res.partner'].create({
|
||||
'name': 'Joel Willis',
|
||||
'email': 'joel.willis63@example.com',
|
||||
})
|
||||
cls.user_portal = cls.env['res.users'].with_context(no_reset_password=True).create({
|
||||
'login': 'portal',
|
||||
'password': 'portal',
|
||||
'partner_id': cls.partner_portal.id,
|
||||
'groups_id': [Command.set([cls.env.ref('base.group_portal').id])],
|
||||
})
|
||||
|
||||
|
||||
class MockSmtplibCase:
|
||||
"""Class which allows you to mock the smtplib feature, to be able to test in depth the
|
||||
sending of emails. Unlike "MockEmail" which mocks mainly the <ir.mail_server> methods,
|
||||
here we mainly mock the smtplib to be able to test the <ir.mail_server> model.
|
||||
"""
|
||||
@contextmanager
|
||||
def mock_smtplib_connection(self):
|
||||
self.emails = []
|
||||
|
||||
origin = self
|
||||
|
||||
class TestingSMTPSession:
|
||||
"""SMTP session object returned during the testing.
|
||||
|
||||
So we do not connect to real SMTP server. Store the mail
|
||||
server id used for the SMTP connection and other information.
|
||||
|
||||
Can be mocked for testing to know which with arguments the email was sent.
|
||||
"""
|
||||
def quit(self):
|
||||
pass
|
||||
|
||||
def send_message(self, message, smtp_from, smtp_to_list):
|
||||
origin.emails.append({
|
||||
'smtp_from': smtp_from,
|
||||
'smtp_to_list': smtp_to_list,
|
||||
'message': message.as_string(),
|
||||
'msg_from': message['From'],
|
||||
'from_filter': self.from_filter,
|
||||
})
|
||||
|
||||
def sendmail(self, smtp_from, smtp_to_list, message_str, mail_options):
|
||||
origin.emails.append({
|
||||
'smtp_from': smtp_from,
|
||||
'smtp_to_list': smtp_to_list,
|
||||
'message': message_str,
|
||||
'msg_from': None, # to fix if necessary
|
||||
'from_filter': self.from_filter,
|
||||
})
|
||||
|
||||
def set_debuglevel(self, smtp_debug):
|
||||
pass
|
||||
|
||||
def ehlo_or_helo_if_needed(self):
|
||||
pass
|
||||
|
||||
def login(self, user, password):
|
||||
pass
|
||||
|
||||
def starttls(self, keyfile=None, certfile=None, context=None):
|
||||
pass
|
||||
|
||||
self.testing_smtp_session = TestingSMTPSession()
|
||||
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
connect_origin = IrMailServer.connect
|
||||
find_mail_server_origin = IrMailServer._find_mail_server
|
||||
|
||||
with patch('smtplib.SMTP_SSL', side_effect=lambda *args, **kwargs: self.testing_smtp_session), \
|
||||
patch('smtplib.SMTP', side_effect=lambda *args, **kwargs: self.testing_smtp_session), \
|
||||
patch.object(type(IrMailServer), '_is_test_mode', lambda self: False), \
|
||||
patch.object(type(IrMailServer), 'connect', wraps=IrMailServer, side_effect=connect_origin) as connect_mocked, \
|
||||
patch.object(type(IrMailServer), '_find_mail_server', side_effect=find_mail_server_origin) as find_mail_server_mocked:
|
||||
self.connect_mocked = connect_mocked
|
||||
self.find_mail_server_mocked = find_mail_server_mocked
|
||||
yield
|
||||
|
||||
def assert_email_sent_smtp(self, smtp_from=None, smtp_to_list=None, message_from=None,
|
||||
mail_server=None, from_filter=None,
|
||||
emails_count=1):
|
||||
"""Check that the given email has been sent.
|
||||
|
||||
If one of the parameter is None, it's just ignored and not used to retrieve the email.
|
||||
|
||||
:param smtp_from: FROM used for the authentication to the mail server
|
||||
:param smtp_to_list: List of destination email address
|
||||
:param message_from: FROM used in the SMTP headers
|
||||
:param mail_server: used to compare the 'from_filter' as an alternative
|
||||
to using the from_filter parameter
|
||||
:param from_filter: from_filter of the <ir.mail_server> used to send the email
|
||||
Can use a lambda to check the value
|
||||
:param emails_count: the number of emails which should match the condition
|
||||
:return: True if at least one email has been found with those parameters
|
||||
"""
|
||||
if from_filter is not None and mail_server:
|
||||
raise ValueError('Invalid usage: use either from_filter either mail_server')
|
||||
if from_filter is None and mail_server is not None:
|
||||
from_filter = mail_server.from_filter
|
||||
matching_emails = filter(
|
||||
lambda email:
|
||||
(smtp_from is None or (
|
||||
smtp_from(email['smtp_from'])
|
||||
if callable(smtp_from)
|
||||
else smtp_from == email['smtp_from'])
|
||||
)
|
||||
and (smtp_to_list is None or smtp_to_list == email['smtp_to_list'])
|
||||
and (message_from is None or 'From: %s' % message_from in email['message'])
|
||||
and (from_filter is None or from_filter == email['from_filter']),
|
||||
self.emails,
|
||||
)
|
||||
|
||||
debug_info = ''
|
||||
matching_emails_count = len(list(matching_emails))
|
||||
if matching_emails_count != emails_count:
|
||||
emails_from = []
|
||||
for email in self.emails:
|
||||
from_found = next((
|
||||
line.split('From:')[1].strip() for line in email['message'].splitlines()
|
||||
if line.startswith('From:')), '')
|
||||
emails_from.append(from_found)
|
||||
debug_info = '\n'.join(
|
||||
f"SMTP-From: {email['smtp_from']}, SMTP-To: {email['smtp_to_list']}, Msg-From: {email_msg_from}, From_filter: {email['from_filter']})"
|
||||
for email, email_msg_from in zip(self.emails, emails_from)
|
||||
)
|
||||
self.assertEqual(
|
||||
matching_emails_count, emails_count,
|
||||
msg=f'Incorrect emails sent: {matching_emails_count} found, {emails_count} expected'
|
||||
f'\nConditions\nSMTP-From: {smtp_from}, SMTP-To: {smtp_to_list}, Msg-From: {message_from}, From_filter: {from_filter}'
|
||||
f'\nNot found in\n{debug_info}'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _init_mail_config(cls):
|
||||
cls.alias_bounce = 'bounce.test'
|
||||
cls.alias_domain = 'test.com'
|
||||
cls.default_from = 'notifications'
|
||||
cls.env['ir.config_parameter'].sudo().set_param('mail.catchall.domain', cls.alias_domain)
|
||||
cls.env['ir.config_parameter'].sudo().set_param('mail.default.from', cls.default_from)
|
||||
cls.env['ir.config_parameter'].sudo().set_param('mail.bounce.alias', cls.alias_bounce)
|
||||
|
||||
@classmethod
|
||||
def _init_mail_servers(cls):
|
||||
cls.env['ir.mail_server'].search([]).unlink()
|
||||
|
||||
ir_mail_server_values = {
|
||||
'smtp_host': 'smtp_host',
|
||||
'smtp_encryption': 'none',
|
||||
}
|
||||
(
|
||||
cls.server_domain,
|
||||
cls.server_user,
|
||||
cls.server_notification,
|
||||
cls.server_default,
|
||||
) = cls.env['ir.mail_server'].create([
|
||||
{
|
||||
'name': 'Domain based server',
|
||||
'from_filter': 'test.com',
|
||||
** ir_mail_server_values,
|
||||
}, {
|
||||
'name': 'User specific server',
|
||||
'from_filter': 'specific_user@test.com',
|
||||
** ir_mail_server_values,
|
||||
}, {
|
||||
'name': 'Server Notifications',
|
||||
'from_filter': 'notifications@test.com',
|
||||
** ir_mail_server_values,
|
||||
}, {
|
||||
'name': 'Server No From Filter',
|
||||
'from_filter': False,
|
||||
** ir_mail_server_values,
|
||||
},
|
||||
])
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 2 KiB |
BIN
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/minimal.pdf
Normal file
BIN
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/minimal.pdf
Normal file
Binary file not shown.
BIN
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/odoo.jpg
Normal file
BIN
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/odoo.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8 KiB |
296
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_acl.py
Normal file
296
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_acl.py
Normal file
|
|
@ -0,0 +1,296 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from odoo.exceptions import AccessError
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools.misc import mute_logger
|
||||
from odoo import Command
|
||||
|
||||
# test group that demo user should not have
|
||||
GROUP_SYSTEM = 'base.group_system'
|
||||
|
||||
|
||||
class TestACL(TransactionCaseWithUserDemo):
|
||||
|
||||
def setUp(self):
|
||||
super(TestACL, self).setUp()
|
||||
self.erp_system_group = self.env.ref(GROUP_SYSTEM)
|
||||
|
||||
def _set_field_groups(self, model, field_name, groups):
|
||||
field = model._fields[field_name]
|
||||
self.patch(field, 'groups', groups)
|
||||
|
||||
def test_field_visibility_restriction(self):
|
||||
"""Check that model-level ``groups`` parameter effectively restricts access to that
|
||||
field for users who do not belong to one of the explicitly allowed groups"""
|
||||
currency = self.env['res.currency'].with_user(self.user_demo)
|
||||
|
||||
# Add a view that adds a label for the field we are going to check
|
||||
extension = self.env["ir.ui.view"].create({
|
||||
"name": "Add separate label for decimal_places",
|
||||
"model": "res.currency",
|
||||
"inherit_id": self.env.ref("base.view_currency_form").id,
|
||||
"arch": """
|
||||
<data>
|
||||
<field name="decimal_places" position="attributes">
|
||||
<attribute name="nolabel">1</attribute>
|
||||
</field>
|
||||
<field name="decimal_places" position="before">
|
||||
<label for="decimal_places"/>
|
||||
</field>
|
||||
</data>
|
||||
""",
|
||||
})
|
||||
currency = currency.with_context(check_view_ids=extension.ids)
|
||||
|
||||
# Verify the test environment first
|
||||
original_fields = currency.fields_get([])
|
||||
with self.debug_mode():
|
||||
# <group groups="base.group_no_one">
|
||||
# <group string="Price Accuracy">
|
||||
# <field name="rounding"/>
|
||||
# <field name="decimal_places"/>
|
||||
# </group>
|
||||
form_view = currency.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(form_view.get('arch'))
|
||||
has_group_system = self.user_demo.has_group(GROUP_SYSTEM)
|
||||
self.assertFalse(has_group_system, "`demo` user should not belong to the restricted group before the test")
|
||||
self.assertIn('decimal_places', original_fields, "'decimal_places' field must be properly visible before the test")
|
||||
self.assertNotEqual(view_arch.xpath("//field[@name='decimal_places'][@nolabel='1']"), [],
|
||||
"Field 'decimal_places' must be found in view definition before the test")
|
||||
self.assertNotEqual(view_arch.xpath("//label[@for='decimal_places']"), [],
|
||||
"Label for 'decimal_places' must be found in view definition before the test")
|
||||
|
||||
# restrict access to the field and check it's gone
|
||||
self._set_field_groups(currency, 'decimal_places', GROUP_SYSTEM)
|
||||
|
||||
fields = currency.fields_get([])
|
||||
form_view = currency.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(form_view.get('arch'))
|
||||
self.assertNotIn('decimal_places', fields, "'decimal_places' field should be gone")
|
||||
self.assertEqual(view_arch.xpath("//field[@name='decimal_places']"), [],
|
||||
"Field 'decimal_places' must not be found in view definition")
|
||||
self.assertEqual(view_arch.xpath("//label[@for='decimal_places']"), [],
|
||||
"Label for 'decimal_places' must not be found in view definition")
|
||||
|
||||
# Make demo user a member of the restricted group and check that the field is back
|
||||
self.erp_system_group.users += self.user_demo
|
||||
has_group_system = self.user_demo.has_group(GROUP_SYSTEM)
|
||||
fields = currency.fields_get([])
|
||||
with self.debug_mode():
|
||||
form_view = currency.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(form_view.get('arch'))
|
||||
self.assertTrue(has_group_system, "`demo` user should now belong to the restricted group")
|
||||
self.assertIn('decimal_places', fields, "'decimal_places' field must be properly visible again")
|
||||
self.assertNotEqual(view_arch.xpath("//field[@name='decimal_places']"), [],
|
||||
"Field 'decimal_places' must be found in view definition again")
|
||||
self.assertNotEqual(view_arch.xpath("//label[@for='decimal_places']"), [],
|
||||
"Label for 'decimal_places' must be found in view definition again")
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_field_crud_restriction(self):
|
||||
"Read/Write RPC access to restricted field should be forbidden"
|
||||
partner = self.env['res.partner'].browse(1).with_user(self.user_demo)
|
||||
|
||||
# Verify the test environment first
|
||||
has_group_system = self.user_demo.has_group(GROUP_SYSTEM)
|
||||
self.assertFalse(has_group_system, "`demo` user should not belong to the restricted group")
|
||||
self.assertTrue(partner.read(['bank_ids']))
|
||||
self.assertTrue(partner.write({'bank_ids': []}))
|
||||
|
||||
# Now restrict access to the field and check it's forbidden
|
||||
self._set_field_groups(partner, 'bank_ids', GROUP_SYSTEM)
|
||||
|
||||
with self.assertRaises(AccessError):
|
||||
partner.read(['bank_ids'])
|
||||
with self.assertRaises(AccessError):
|
||||
partner.write({'bank_ids': []})
|
||||
|
||||
# Add the restricted group, and check that it works again
|
||||
self.erp_system_group.users += self.user_demo
|
||||
has_group_system = self.user_demo.has_group(GROUP_SYSTEM)
|
||||
self.assertTrue(has_group_system, "`demo` user should now belong to the restricted group")
|
||||
self.assertTrue(partner.read(['bank_ids']))
|
||||
self.assertTrue(partner.write({'bank_ids': []}))
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_fields_browse_restriction(self):
|
||||
"""Test access to records having restricted fields"""
|
||||
# Invalidate cache to avoid restricted value to be available
|
||||
# in the cache
|
||||
self.env.invalidate_all()
|
||||
partner = self.env['res.partner'].with_user(self.user_demo)
|
||||
self._set_field_groups(partner, 'email', GROUP_SYSTEM)
|
||||
|
||||
# accessing fields must no raise exceptions...
|
||||
partner = partner.search([], limit=1)
|
||||
partner.name
|
||||
# ... except if they are restricted
|
||||
with self.assertRaises(AccessError):
|
||||
with mute_logger('odoo.models'):
|
||||
partner.email
|
||||
|
||||
def test_view_create_edit_button(self):
|
||||
""" Test form view Create, Edit, Delete button visibility based on access right of model.
|
||||
Test the user with and without access in the same unit test / transaction
|
||||
to test the views cache is properly working """
|
||||
methods = ['create', 'edit', 'delete']
|
||||
company = self.env['res.company'].with_user(self.user_demo)
|
||||
company_view = company.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(company_view['arch'])
|
||||
|
||||
# demo not part of the group_system, create edit and delete must be False
|
||||
for method in methods:
|
||||
self.assertEqual(view_arch.get(method), 'false')
|
||||
|
||||
# demo part of the group_system, create edit and delete must not be specified
|
||||
company = self.env['res.company'].with_user(self.env.ref("base.user_admin"))
|
||||
company_view = company.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(company_view['arch'])
|
||||
for method in methods:
|
||||
self.assertIsNone(view_arch.get(method))
|
||||
|
||||
def test_m2o_field_create_edit(self):
|
||||
""" Test many2one field Create and Edit option visibility based on access rights of relation field
|
||||
Test the user with and without access in the same unit test / transaction
|
||||
to test the views cache is properly working """
|
||||
methods = ['create', 'write']
|
||||
company = self.env['res.company'].with_user(self.user_demo)
|
||||
company_view = company.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(company_view['arch'])
|
||||
field_node = view_arch.xpath("//field[@name='currency_id']")
|
||||
self.assertTrue(len(field_node), "currency_id field should be in company from view")
|
||||
for method in methods:
|
||||
self.assertEqual(field_node[0].get('can_' + method), 'false')
|
||||
|
||||
company = self.env['res.company'].with_user(self.env.ref("base.user_admin"))
|
||||
company_view = company.get_view(False, 'form')
|
||||
view_arch = etree.fromstring(company_view['arch'])
|
||||
field_node = view_arch.xpath("//field[@name='currency_id']")
|
||||
for method in methods:
|
||||
self.assertEqual(field_node[0].get('can_' + method), 'true')
|
||||
|
||||
def test_get_views_fields(self):
|
||||
""" Tests fields restricted to group_system are not passed when calling `get_views` as demo
|
||||
but the same fields are well passed when calling `get_views` as admin"""
|
||||
Partner = self.env['res.partner']
|
||||
self._set_field_groups(Partner, 'email', GROUP_SYSTEM)
|
||||
views = Partner.with_user(self.user_demo).get_views([(False, 'form')])
|
||||
self.assertFalse('email' in views['models']['res.partner'])
|
||||
views = Partner.with_user(self.env.ref("base.user_admin")).get_views([(False, 'form')])
|
||||
self.assertTrue('email' in views['models']['res.partner'])
|
||||
|
||||
|
||||
class TestIrRule(TransactionCaseWithUserDemo):
|
||||
|
||||
def test_ir_rule(self):
|
||||
model_res_partner = self.env.ref('base.model_res_partner')
|
||||
group_user = self.env.ref('base.group_user')
|
||||
|
||||
# create an ir_rule for the Employee group with an blank domain
|
||||
rule1 = self.env['ir.rule'].create({
|
||||
'name': 'test_rule1',
|
||||
'model_id': model_res_partner.id,
|
||||
'domain_force': False,
|
||||
'groups': [Command.set(group_user.ids)],
|
||||
})
|
||||
|
||||
# read as demo user the partners (one blank domain)
|
||||
partners_demo = self.env['res.partner'].with_user(self.user_demo)
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# same with domain 1=1
|
||||
rule1.domain_force = "[(1,'=',1)]"
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# same with domain []
|
||||
rule1.domain_force = "[]"
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# create another ir_rule for the Employee group (to test multiple rules)
|
||||
rule2 = self.env['ir.rule'].create({
|
||||
'name': 'test_rule2',
|
||||
'model_id': model_res_partner.id,
|
||||
'domain_force': False,
|
||||
'groups': [Command.set(group_user.ids)],
|
||||
})
|
||||
|
||||
# read as demo user with domains [] and blank
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# same with domains 1=1 and blank
|
||||
rule1.domain_force = "[(1,'=',1)]"
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# same with domains 1=1 and 1=1
|
||||
rule2.domain_force = "[(1,'=',1)]"
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# create another ir_rule for the Employee group (to test multiple rules)
|
||||
rule3 = self.env['ir.rule'].create({
|
||||
'name': 'test_rule3',
|
||||
'model_id': model_res_partner.id,
|
||||
'domain_force': False,
|
||||
'groups': [Command.set(group_user.ids)],
|
||||
})
|
||||
|
||||
# read the partners as demo user
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# same with domains 1=1, 1=1 and 1=1
|
||||
rule3.domain_force = "[(1,'=',1)]"
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# modify the global rule on res_company which triggers a recursive check
|
||||
# of the rules on company
|
||||
global_rule = self.env.ref('base.res_company_rule_employee')
|
||||
global_rule.domain_force = "[('id','in', company_ids)]"
|
||||
|
||||
# read as demo user (exercising the global company rule)
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# Modify the ir_rule for employee to have a rule that fordids seeing any
|
||||
# record. We use a domain with implicit AND operator for later tests on
|
||||
# normalization.
|
||||
rule2.domain_force = "[('id','=',False),('name','=',False)]"
|
||||
|
||||
# check that demo user still sees partners, because group-rules are OR'ed
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partner.")
|
||||
|
||||
# create a new group with demo user in it, and a complex rule
|
||||
group_test = self.env['res.groups'].create({
|
||||
'name': 'Test Group',
|
||||
'users': [Command.set(self.user_demo.ids)],
|
||||
})
|
||||
|
||||
# add the rule to the new group, with a domain containing an implicit
|
||||
# AND operator, which is more tricky because it will have to be
|
||||
# normalized before combining it
|
||||
rule3.write({
|
||||
'domain_force': "[('name','!=',False),('id','!=',False)]",
|
||||
'groups': [Command.set(group_test.ids)],
|
||||
})
|
||||
|
||||
# read the partners again as demo user, which should give results
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see partners even with the combined rules.")
|
||||
|
||||
# delete global domains (to combine only group domains)
|
||||
self.env['ir.rule'].search([('groups', '=', False)]).unlink()
|
||||
|
||||
# read the partners as demo user (several group domains, no global domain)
|
||||
partners = partners_demo.search([])
|
||||
self.assertTrue(partners, "Demo user should see some partners.")
|
||||
714
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_api.py
Normal file
714
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_api.py
Normal file
|
|
@ -0,0 +1,714 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, models, Command
|
||||
from odoo.addons.base.tests.common import SavepointCaseWithUserDemo
|
||||
from odoo.tools import mute_logger, unique, lazy
|
||||
from odoo.exceptions import AccessError
|
||||
|
||||
|
||||
class TestAPI(SavepointCaseWithUserDemo):
|
||||
""" test the new API of the ORM """
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(TestAPI, cls).setUpClass()
|
||||
cls._load_partners_set()
|
||||
|
||||
def assertIsRecordset(self, value, model):
|
||||
self.assertIsInstance(value, models.BaseModel)
|
||||
self.assertEqual(value._name, model)
|
||||
|
||||
def assertIsRecord(self, value, model):
|
||||
self.assertIsRecordset(value, model)
|
||||
self.assertTrue(len(value) <= 1)
|
||||
|
||||
def assertIsNull(self, value, model):
|
||||
self.assertIsRecordset(value, model)
|
||||
self.assertFalse(value)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_00_query(self):
|
||||
""" Build a recordset, and check its contents. """
|
||||
domain = [('name', 'ilike', 'j'), ('id', 'in', self.partners.ids)]
|
||||
partners = self.env['res.partner'].search(domain)
|
||||
|
||||
# partners is a collection of browse records
|
||||
self.assertTrue(partners)
|
||||
|
||||
# partners and its contents are instance of the model
|
||||
self.assertIsRecordset(partners, 'res.partner')
|
||||
for p in partners:
|
||||
self.assertIsRecord(p, 'res.partner')
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_01_query_offset(self):
|
||||
""" Build a recordset with offset, and check equivalence. """
|
||||
partners1 = self.env['res.partner'].search([('id', 'in', self.partners.ids)], offset=5)
|
||||
partners2 = self.env['res.partner'].search([('id', 'in', self.partners.ids)])[5:]
|
||||
self.assertIsRecordset(partners1, 'res.partner')
|
||||
self.assertIsRecordset(partners2, 'res.partner')
|
||||
self.assertEqual(list(partners1), list(partners2))
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_02_query_limit(self):
|
||||
""" Build a recordset with offset, and check equivalence. """
|
||||
partners1 = self.env['res.partner'].search([('id', 'in', self.partners.ids)], order='id asc', limit=5)
|
||||
partners2 = self.env['res.partner'].search([('id', 'in', self.partners.ids)], order='id asc')[:5]
|
||||
self.assertIsRecordset(partners1, 'res.partner')
|
||||
self.assertIsRecordset(partners2, 'res.partner')
|
||||
self.assertEqual(list(partners1), list(partners2))
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_03_query_offset_limit(self):
|
||||
""" Build a recordset with offset and limit, and check equivalence. """
|
||||
partners1 = self.env['res.partner'].search([('id', 'in', self.partners.ids)], order='id asc', offset=3, limit=7)
|
||||
partners2 = self.env['res.partner'].search([('id', 'in', self.partners.ids)], order='id asc')[3:10]
|
||||
self.assertIsRecordset(partners1, 'res.partner')
|
||||
self.assertIsRecordset(partners2, 'res.partner')
|
||||
self.assertEqual(list(partners1), list(partners2))
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_04_query_count(self):
|
||||
""" Test the search method with count=True. """
|
||||
self.cr.execute("SELECT COUNT(*) FROM res_partner WHERE active")
|
||||
count1 = self.cr.fetchone()[0]
|
||||
count2 = self.env['res.partner'].search([], count=True)
|
||||
self.assertIsInstance(count1, int)
|
||||
self.assertIsInstance(count2, int)
|
||||
self.assertEqual(count1, count2)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_05_immutable(self):
|
||||
""" Check that a recordset remains the same, even after updates. """
|
||||
domain = [('name', 'ilike', 'g'), ('id', 'in', self.partners.ids)]
|
||||
partners = self.env['res.partner'].search(domain)
|
||||
self.assertTrue(partners)
|
||||
ids = partners.ids
|
||||
|
||||
# modify those partners, and check that partners has not changed
|
||||
partners.write({'active': False})
|
||||
self.assertEqual(ids, partners.ids)
|
||||
|
||||
# redo the search, and check that the result is now empty
|
||||
partners2 = self.env['res.partner'].search(domain)
|
||||
self.assertFalse(partners2)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_06_fields(self):
|
||||
""" Check that relation fields return records, recordsets or nulls. """
|
||||
user = self.env.user
|
||||
self.assertIsRecord(user, 'res.users')
|
||||
self.assertIsRecord(user.partner_id, 'res.partner')
|
||||
self.assertIsRecordset(user.groups_id, 'res.groups')
|
||||
|
||||
for name, field in self.partners._fields.items():
|
||||
if field.type == 'many2one':
|
||||
for p in self.partners:
|
||||
self.assertIsRecord(p[name], field.comodel_name)
|
||||
elif field.type == 'reference':
|
||||
for p in self.partners:
|
||||
if p[name]:
|
||||
self.assertIsRecord(p[name], field.comodel_name)
|
||||
elif field.type in ('one2many', 'many2many'):
|
||||
for p in self.partners:
|
||||
self.assertIsRecordset(p[name], field.comodel_name)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_07_null(self):
|
||||
""" Check behavior of null instances. """
|
||||
# select a partner without a parent
|
||||
partner = self.env['res.partner'].search([('parent_id', '=', False), ('id', 'in', self.partners.ids)])[0]
|
||||
|
||||
# check partner and related null instances
|
||||
self.assertTrue(partner)
|
||||
self.assertIsRecord(partner, 'res.partner')
|
||||
|
||||
self.assertFalse(partner.parent_id)
|
||||
self.assertIsNull(partner.parent_id, 'res.partner')
|
||||
|
||||
self.assertIs(partner.parent_id.id, False)
|
||||
|
||||
self.assertFalse(partner.parent_id.user_id)
|
||||
self.assertIsNull(partner.parent_id.user_id, 'res.users')
|
||||
|
||||
self.assertIs(partner.parent_id.user_id.name, False)
|
||||
|
||||
self.assertFalse(partner.parent_id.user_id.groups_id)
|
||||
self.assertIsRecordset(partner.parent_id.user_id.groups_id, 'res.groups')
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_40_new_new(self):
|
||||
""" Call new-style methods in the new API style. """
|
||||
partners = self.env['res.partner'].search([('name', 'ilike', 'g'), ('id', 'in', self.partners.ids)])
|
||||
self.assertTrue(partners)
|
||||
|
||||
# call method write on partners itself, and check its effect
|
||||
partners.write({'active': False})
|
||||
for p in partners:
|
||||
self.assertFalse(p.active)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_45_new_new(self):
|
||||
""" Call new-style methods on records (new API style). """
|
||||
partners = self.env['res.partner'].search([('name', 'ilike', 'g'), ('id', 'in', self.partners.ids)])
|
||||
self.assertTrue(partners)
|
||||
|
||||
# call method write on partner records, and check its effects
|
||||
for p in partners:
|
||||
p.write({'active': False})
|
||||
for p in partners:
|
||||
self.assertFalse(p.active)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
@mute_logger('odoo.addons.base.models.ir_model')
|
||||
def test_50_environment(self):
|
||||
""" Test environment on records. """
|
||||
# partners and reachable records are attached to self.env
|
||||
partners = self.env['res.partner'].search([('name', 'ilike', 'j'), ('id', 'in', self.partners.ids)])
|
||||
self.assertEqual(partners.env, self.env)
|
||||
for x in (partners, partners[0], partners[0].company_id):
|
||||
self.assertEqual(x.env, self.env)
|
||||
for p in partners:
|
||||
self.assertEqual(p.env, self.env)
|
||||
|
||||
# check that the current user can read and modify company data
|
||||
partners[0].company_id.name
|
||||
partners[0].company_id.write({'name': 'Fools'})
|
||||
|
||||
# create an environment with a demo user
|
||||
demo = self.env['res.users'].create({
|
||||
'name': 'test_environment_demo',
|
||||
'login': 'test_environment_demo',
|
||||
'password': 'test_environment_demo',
|
||||
})
|
||||
demo_env = self.env(user=demo)
|
||||
self.assertNotEqual(demo_env, self.env)
|
||||
|
||||
# partners and related records are still attached to self.env
|
||||
self.assertEqual(partners.env, self.env)
|
||||
for x in (partners, partners[0], partners[0].company_id):
|
||||
self.assertEqual(x.env, self.env)
|
||||
for p in partners:
|
||||
self.assertEqual(p.env, self.env)
|
||||
|
||||
# create record instances attached to demo_env
|
||||
demo_partners = partners.with_user(demo)
|
||||
self.assertEqual(demo_partners.env, demo_env)
|
||||
for x in (demo_partners, demo_partners[0], demo_partners[0].company_id):
|
||||
self.assertEqual(x.env, demo_env)
|
||||
for p in demo_partners:
|
||||
self.assertEqual(p.env, demo_env)
|
||||
|
||||
# demo user can read but not modify company data
|
||||
demo_partner = self.env['res.partner'].search([('name', '=', 'Landon Roberts')]).with_user(demo)
|
||||
self.assertTrue(demo_partner.company_id, 'This partner is supposed to be linked to a company')
|
||||
demo_partner.company_id.name
|
||||
with self.assertRaises(AccessError):
|
||||
demo_partner.company_id.write({'name': 'Pricks'})
|
||||
|
||||
# remove demo user from all groups
|
||||
demo.write({'groups_id': [Command.clear()]})
|
||||
|
||||
# demo user can no longer access partner data
|
||||
with self.assertRaises(AccessError):
|
||||
demo_partner.company_id.name
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_55_environment_lang(self):
|
||||
""" Check the record env.lang behavior """
|
||||
partner = self.partner_demo
|
||||
self.env['res.lang']._activate_lang('fr_FR')
|
||||
self.assertEqual(partner.with_context(lang=None).env.lang, None, 'None lang context should have None env.lang')
|
||||
self.assertEqual(partner.with_context(lang='en_US').env.lang, 'en_US', 'en_US active lang context should have en_US env.lang')
|
||||
self.assertEqual(partner.with_context(lang='fr_FR').env.lang, 'fr_FR', 'fr_FR active lang context should have fr_FR env.lang')
|
||||
self.assertEqual(partner.with_context(lang='nl_NL').env.lang, None, 'Inactive lang context lang should have None env.lang')
|
||||
self.assertEqual(partner.with_context(lang='Dummy').env.lang, None, 'Ilegal lang context should have None env.lang')
|
||||
|
||||
def test_56_environment_uid_origin(self):
|
||||
"""Check the expected behavior of `env.uid_origin`"""
|
||||
user_demo = self.user_demo
|
||||
user_admin = self.env.ref('base.user_admin')
|
||||
self.assertEqual(self.env.uid_origin, None)
|
||||
self.assertEqual(self.env['base'].with_user(user_demo).env.uid_origin, user_demo.id)
|
||||
self.assertEqual(self.env['base'].with_user(user_demo).with_user(user_admin).env.uid_origin, user_demo.id)
|
||||
self.assertEqual(self.env['base'].with_user(user_admin).with_user(user_demo).env.uid_origin, user_admin.id)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_60_cache(self):
|
||||
""" Check the record cache behavior """
|
||||
Partners = self.env['res.partner']
|
||||
pids = []
|
||||
data = {
|
||||
'partner One': ['Partner One - One', 'Partner One - Two'],
|
||||
'Partner Two': ['Partner Two - One'],
|
||||
'Partner Three': ['Partner Three - One'],
|
||||
}
|
||||
for p in data:
|
||||
pids.append(Partners.create({
|
||||
'name': p,
|
||||
'child_ids': [Command.create({'name': c}) for c in data[p]],
|
||||
}).id)
|
||||
|
||||
partners = Partners.search([('id', 'in', pids)])
|
||||
partner1, partner2 = partners[0], partners[1]
|
||||
children1, children2 = partner1.child_ids, partner2.child_ids
|
||||
self.assertTrue(children1)
|
||||
self.assertTrue(children2)
|
||||
|
||||
# take a child contact
|
||||
child = children1[0]
|
||||
self.assertEqual(child.parent_id, partner1)
|
||||
self.assertIn(child, partner1.child_ids)
|
||||
self.assertNotIn(child, partner2.child_ids)
|
||||
|
||||
# fetch data in the cache
|
||||
for p in partners:
|
||||
p.name, p.company_id.name, p.user_id.name, p.contact_address
|
||||
self.env.cache.check(self.env)
|
||||
|
||||
# change its parent
|
||||
child.write({'parent_id': partner2.id})
|
||||
self.env.cache.check(self.env)
|
||||
|
||||
# check recordsets
|
||||
self.assertEqual(child.parent_id, partner2)
|
||||
self.assertNotIn(child, partner1.child_ids)
|
||||
self.assertIn(child, partner2.child_ids)
|
||||
self.assertEqual(set(partner1.child_ids + child), set(children1))
|
||||
self.assertEqual(set(partner2.child_ids), set(children2 + child))
|
||||
self.env.cache.check(self.env)
|
||||
|
||||
# delete it
|
||||
child.unlink()
|
||||
self.env.cache.check(self.env)
|
||||
|
||||
# check recordsets
|
||||
self.assertEqual(set(partner1.child_ids), set(children1) - set([child]))
|
||||
self.assertEqual(set(partner2.child_ids), set(children2))
|
||||
self.env.cache.check(self.env)
|
||||
|
||||
# convert from the cache format to the write format
|
||||
partner = partner1
|
||||
partner.country_id, partner.child_ids
|
||||
data = partner._convert_to_write(partner._cache)
|
||||
self.assertEqual(data['country_id'], partner.country_id.id)
|
||||
self.assertEqual(data['child_ids'], [Command.set(partner.child_ids.ids)])
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_60_prefetch(self):
|
||||
""" Check the record cache prefetching """
|
||||
partners = self.env['res.partner'].search([('id', 'in', self.partners.ids)], limit=models.PREFETCH_MAX)
|
||||
self.assertTrue(len(partners) > 1)
|
||||
|
||||
# all the records in partners are ready for prefetching
|
||||
self.assertItemsEqual(partners.ids, partners._prefetch_ids)
|
||||
|
||||
# reading ONE partner should fetch them ALL
|
||||
for partner in partners:
|
||||
state = partner.state_id
|
||||
break
|
||||
partner_ids_with_field = [partner.id
|
||||
for partner in partners
|
||||
if 'state_id' in partner._cache]
|
||||
self.assertItemsEqual(partner_ids_with_field, partners.ids)
|
||||
|
||||
# partners' states are ready for prefetching
|
||||
state_ids = {
|
||||
partner._cache['state_id']
|
||||
for partner in partners
|
||||
if partner._cache['state_id'] is not None
|
||||
}
|
||||
self.assertTrue(len(state_ids) > 1)
|
||||
self.assertItemsEqual(state_ids, state._prefetch_ids)
|
||||
|
||||
# reading ONE partner country should fetch ALL partners' countries
|
||||
for partner in partners:
|
||||
if partner.state_id:
|
||||
partner.state_id.name
|
||||
break
|
||||
state_ids_with_field = [st.id for st in partners.state_id if 'name' in st._cache]
|
||||
self.assertItemsEqual(state_ids_with_field, state_ids)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_60_prefetch_model(self):
|
||||
""" Check the prefetching model. """
|
||||
partners = self.env['res.partner'].search([('id', 'in', self.partners.ids)], limit=models.PREFETCH_MAX)
|
||||
self.assertTrue(partners)
|
||||
|
||||
def same_prefetch(a, b):
|
||||
self.assertEqual(set(a._prefetch_ids), set(b._prefetch_ids))
|
||||
|
||||
def diff_prefetch(a, b):
|
||||
self.assertNotEqual(set(a._prefetch_ids), set(b._prefetch_ids))
|
||||
|
||||
# the recordset operations below use different prefetch sets
|
||||
diff_prefetch(partners, partners.browse())
|
||||
diff_prefetch(partners, partners[0])
|
||||
diff_prefetch(partners, partners[:5])
|
||||
|
||||
# the recordset operations below share the prefetch set
|
||||
same_prefetch(partners, partners.browse(partners.ids))
|
||||
same_prefetch(partners, partners.with_user(self.user_demo))
|
||||
same_prefetch(partners, partners.with_context(active_test=False))
|
||||
same_prefetch(partners, partners[:10].with_prefetch(partners._prefetch_ids))
|
||||
|
||||
# iteration and relational fields should use the same prefetch set
|
||||
self.assertEqual(type(partners).country_id.type, 'many2one')
|
||||
self.assertEqual(type(partners).bank_ids.type, 'one2many')
|
||||
self.assertEqual(type(partners).category_id.type, 'many2many')
|
||||
|
||||
vals0 = {
|
||||
'name': 'Empty relational fields',
|
||||
'country_id': False,
|
||||
'bank_ids': [],
|
||||
'category_id': [],
|
||||
}
|
||||
vals1 = {
|
||||
'name': 'Non-empty relational fields',
|
||||
'country_id': self.ref('base.be'),
|
||||
'bank_ids': [Command.create({'acc_number': 'FOO42'})],
|
||||
'category_id': [Command.link(self.partner_category.id)],
|
||||
}
|
||||
partners = partners.create(vals0) + partners.create(vals1)
|
||||
for partner in partners:
|
||||
same_prefetch(partner, partners)
|
||||
same_prefetch(partner.country_id, partners.country_id)
|
||||
same_prefetch(partner.bank_ids, partners.bank_ids)
|
||||
same_prefetch(partner.category_id, partners.category_id)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_60_prefetch_read(self):
|
||||
""" Check that reading a field computes it on self only. """
|
||||
Partner = self.env['res.partner']
|
||||
field = type(Partner).company_type
|
||||
self.assertTrue(field.compute and not field.store)
|
||||
|
||||
partner1 = Partner.create({'name': 'Foo'})
|
||||
partner2 = Partner.create({'name': 'Bar', 'parent_id': partner1.id})
|
||||
self.assertEqual(partner1.child_ids, partner2)
|
||||
|
||||
# reading partner1 should not prefetch 'company_type' on partner2
|
||||
self.env.clear()
|
||||
partner1 = partner1.with_prefetch()
|
||||
partner1.read(['company_type'])
|
||||
self.assertIn('company_type', partner1._cache)
|
||||
self.assertNotIn('company_type', partner2._cache)
|
||||
|
||||
# reading partner1 should not prefetch 'company_type' on partner2
|
||||
self.env.clear()
|
||||
partner1 = partner1.with_prefetch()
|
||||
partner1.read(['child_ids', 'company_type'])
|
||||
self.assertIn('company_type', partner1._cache)
|
||||
self.assertNotIn('company_type', partner2._cache)
|
||||
|
||||
def test_60_reversed(self):
|
||||
records = self.partners
|
||||
self.assertGreater(len(records), 1)
|
||||
|
||||
# check order
|
||||
self.assertEqual(list(reversed(records)), list(reversed(list(records))))
|
||||
|
||||
first = next(iter(records))
|
||||
last = next(reversed(records))
|
||||
self.assertEqual(first, records[0])
|
||||
self.assertEqual(last, records[-1])
|
||||
|
||||
# check prefetching
|
||||
prefetch_ids = records.ids
|
||||
reversed_ids = [record.id for record in reversed(records)]
|
||||
|
||||
self.assertEqual(list(first._prefetch_ids), prefetch_ids)
|
||||
self.assertEqual(list(last._prefetch_ids), reversed_ids)
|
||||
|
||||
self.assertEqual(list(reversed(first._prefetch_ids)), reversed_ids)
|
||||
self.assertEqual(list(reversed(last._prefetch_ids)), prefetch_ids)
|
||||
|
||||
# check prefetching across many2one field
|
||||
prefetch_ids = records.state_id.ids
|
||||
reversed_ids = list(unique(
|
||||
record.state_id.id
|
||||
for record in reversed(records)
|
||||
if record.state_id
|
||||
))
|
||||
|
||||
self.assertEqual(list(first.state_id._prefetch_ids), prefetch_ids)
|
||||
self.assertEqual(list(last.state_id._prefetch_ids), reversed_ids)
|
||||
|
||||
self.assertEqual(list(reversed(first.state_id._prefetch_ids)), reversed_ids)
|
||||
self.assertEqual(list(reversed(last.state_id._prefetch_ids)), prefetch_ids)
|
||||
|
||||
# check prefetching across x2many field
|
||||
prefetch_ids = records.child_ids.ids
|
||||
reversed_ids = list(unique(
|
||||
child.id
|
||||
for record in reversed(records)
|
||||
for child in record.child_ids
|
||||
))
|
||||
|
||||
self.assertEqual(list(first.child_ids._prefetch_ids), prefetch_ids)
|
||||
self.assertEqual(list(last.child_ids._prefetch_ids), reversed_ids)
|
||||
|
||||
self.assertEqual(list(reversed(first.child_ids._prefetch_ids)), reversed_ids)
|
||||
self.assertEqual(list(reversed(last.child_ids._prefetch_ids)), prefetch_ids)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_70_one(self):
|
||||
""" Check method one(). """
|
||||
# check with many records
|
||||
ps = self.env['res.partner'].search([('name', 'ilike', 'a'), ('id', 'in', self.partners.ids)])
|
||||
self.assertTrue(len(ps) > 1)
|
||||
with self.assertRaises(ValueError):
|
||||
ps.ensure_one()
|
||||
|
||||
p1 = ps[0]
|
||||
self.assertEqual(len(p1), 1)
|
||||
self.assertEqual(p1.ensure_one(), p1)
|
||||
|
||||
p0 = self.env['res.partner'].browse()
|
||||
self.assertEqual(len(p0), 0)
|
||||
with self.assertRaises(ValueError):
|
||||
p0.ensure_one()
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_contains(self):
|
||||
""" Test membership on recordset. """
|
||||
p1 = self.partners[0]
|
||||
ps = self.partners
|
||||
self.assertTrue(p1 in ps)
|
||||
|
||||
with self.assertRaisesRegex(TypeError, r"unsupported operand types in: 42 in res\.partner.*"):
|
||||
42 in ps
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: ir\.ui\.menu.* in res\.partner.*"):
|
||||
self.env['ir.ui.menu'] in ps
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_lazy_contains(self):
|
||||
""" Test membership on recordset. """
|
||||
p1 = lazy(lambda: self.partners[0])
|
||||
ps = lazy(lambda: self.partners)
|
||||
self.assertTrue(p1 in ps)
|
||||
|
||||
with self.assertRaisesRegex(TypeError, r"unsupported operand types in: 42 in res\.partner.*"):
|
||||
lazy(lambda: 42) in ps
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: ir\.ui\.menu.* in res\.partner.*"):
|
||||
lazy(lambda: self.env['ir.ui.menu']) in ps
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_set_operations(self):
|
||||
""" Check set operations on recordsets. """
|
||||
pa = self.env['res.partner'].search([('name', 'ilike', 'a'), ('id', 'in', self.partners.ids)])
|
||||
pb = self.env['res.partner'].search([('name', 'ilike', 'b'), ('id', 'in', self.partners.ids)])
|
||||
|
||||
self.assertTrue(pa)
|
||||
self.assertTrue(pb)
|
||||
self.assertTrue(set(pa) & set(pb))
|
||||
|
||||
concat = pa + pb
|
||||
self.assertEqual(list(concat), list(pa) + list(pb))
|
||||
self.assertEqual(len(concat), len(pa) + len(pb))
|
||||
|
||||
difference = pa - pb
|
||||
self.assertEqual(len(difference), len(set(difference)))
|
||||
self.assertEqual(set(difference), set(pa) - set(pb))
|
||||
self.assertLessEqual(difference, pa)
|
||||
|
||||
intersection = pa & pb
|
||||
self.assertEqual(len(intersection), len(set(intersection)))
|
||||
self.assertEqual(set(intersection), set(pa) & set(pb))
|
||||
self.assertLessEqual(intersection, pa)
|
||||
self.assertLessEqual(intersection, pb)
|
||||
|
||||
union = pa | pb
|
||||
self.assertEqual(len(union), len(set(union)))
|
||||
self.assertEqual(set(union), set(pa) | set(pb))
|
||||
self.assertGreaterEqual(union, pa)
|
||||
self.assertGreaterEqual(union, pb)
|
||||
|
||||
# one cannot mix different models with set operations
|
||||
ps = pa
|
||||
ms = self.env['ir.ui.menu'].search([])
|
||||
self.assertNotEqual(ps._name, ms._name)
|
||||
self.assertNotEqual(ps, ms)
|
||||
|
||||
with self.assertRaisesRegex(TypeError, r"unsupported operand types in: res\.partner.* \+ 'string'"):
|
||||
ps + 'string'
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* \+ ir\.ui\.menu.*"):
|
||||
ps + ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* - ir\.ui\.menu.*"):
|
||||
ps - ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* & ir\.ui\.menu.*"):
|
||||
ps & ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* \| ir\.ui\.menu.*"):
|
||||
ps | ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps < ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps <= ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps > ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps >= ms
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_lazy_set_operations(self):
|
||||
""" Check set operations on recordsets. """
|
||||
pa = lazy(lambda: self.env['res.partner'].search([('name', 'ilike', 'a'), ('id', 'in', self.partners.ids)]))
|
||||
pb = lazy(lambda: self.env['res.partner'].search([('name', 'ilike', 'b'), ('id', 'in', self.partners.ids)]))
|
||||
|
||||
self.assertTrue(pa)
|
||||
self.assertTrue(pb)
|
||||
self.assertTrue(set(pa) & set(pb))
|
||||
|
||||
concat = pa + pb
|
||||
self.assertEqual(list(concat), list(pa) + list(pb))
|
||||
self.assertEqual(len(concat), len(pa) + len(pb))
|
||||
|
||||
difference = pa - pb
|
||||
self.assertEqual(len(difference), len(set(difference)))
|
||||
self.assertEqual(set(difference), set(pa) - set(pb))
|
||||
self.assertLessEqual(difference, pa)
|
||||
|
||||
intersection = pa & pb
|
||||
self.assertEqual(len(intersection), len(set(intersection)))
|
||||
self.assertEqual(set(intersection), set(pa) & set(pb))
|
||||
self.assertLessEqual(intersection, pa)
|
||||
self.assertLessEqual(intersection, pb)
|
||||
|
||||
union = pa | pb
|
||||
self.assertEqual(len(union), len(set(union)))
|
||||
self.assertEqual(set(union), set(pa) | set(pb))
|
||||
self.assertGreaterEqual(union, pa)
|
||||
self.assertGreaterEqual(union, pb)
|
||||
|
||||
# one cannot mix different models with set operations
|
||||
ps = pa
|
||||
ms = lazy(lambda: self.env['ir.ui.menu'].search([]))
|
||||
self.assertNotEqual(ps._name, ms._name)
|
||||
self.assertNotEqual(ps, ms)
|
||||
|
||||
with self.assertRaisesRegex(TypeError, r"unsupported operand types in: res\.partner.* \+ 'string'"):
|
||||
ps + 'string'
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* \+ ir\.ui\.menu.*"):
|
||||
ps + ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* - ir\.ui\.menu.*"):
|
||||
ps - ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* & ir\.ui\.menu.*"):
|
||||
ps & ms
|
||||
with self.assertRaisesRegex(TypeError, r"inconsistent models in: res\.partner.* \| ir\.ui\.menu.*"):
|
||||
ps | ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps < ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps <= ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps > ms
|
||||
with self.assertRaises(TypeError):
|
||||
ps >= ms
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_filter(self):
|
||||
""" Check filter on recordsets. """
|
||||
ps = self.partners
|
||||
customers = ps.browse([p.id for p in ps if p.employee])
|
||||
|
||||
# filter on a single field
|
||||
self.assertEqual(ps.filtered(lambda p: p.employee), customers)
|
||||
self.assertEqual(ps.filtered('employee'), customers)
|
||||
|
||||
# filter on a sequence of fields
|
||||
self.assertEqual(
|
||||
ps.filtered(lambda p: p.parent_id.employee),
|
||||
ps.filtered('parent_id.employee')
|
||||
)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_map(self):
|
||||
""" Check map on recordsets. """
|
||||
ps = self.partners
|
||||
parents = ps.browse()
|
||||
for p in ps:
|
||||
parents |= p.parent_id
|
||||
|
||||
# map a single field
|
||||
self.assertEqual(ps.mapped(lambda p: p.parent_id), parents)
|
||||
self.assertEqual(ps.mapped('parent_id'), parents)
|
||||
self.assertEqual(ps.parent_id, parents)
|
||||
|
||||
# map a sequence of fields
|
||||
self.assertEqual(
|
||||
ps.mapped(lambda p: p.parent_id.name),
|
||||
[p.parent_id.name for p in ps]
|
||||
)
|
||||
self.assertEqual(
|
||||
ps.mapped('parent_id.name'),
|
||||
[p.name for p in parents]
|
||||
)
|
||||
self.assertEqual(
|
||||
ps.parent_id.mapped('name'),
|
||||
[p.name for p in parents]
|
||||
)
|
||||
|
||||
# map an empty sequence of fields
|
||||
self.assertEqual(ps.mapped(''), ps)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_80_sorted(self):
|
||||
""" Check sorted on recordsets. """
|
||||
ps = self.env['res.partner'].search([('id', 'in', self.partners.ids)])
|
||||
|
||||
# sort by model order
|
||||
qs = ps[:len(ps) // 2] + ps[len(ps) // 2:]
|
||||
self.assertEqual(qs.sorted().ids, ps.ids)
|
||||
|
||||
# sort by name, with a function or a field name
|
||||
by_name_ids = [p.id for p in sorted(ps, key=lambda p: p.name)]
|
||||
self.assertEqual(ps.sorted(lambda p: p.name).ids, by_name_ids)
|
||||
self.assertEqual(ps.sorted('name').ids, by_name_ids)
|
||||
|
||||
# sort by inverse name, with a field name
|
||||
by_name_ids = [p.id for p in sorted(ps, key=lambda p: p.name, reverse=True)]
|
||||
self.assertEqual(ps.sorted('name', reverse=True).ids, by_name_ids)
|
||||
|
||||
# sorted doesn't filter out new records but don't sort them either (limitation)
|
||||
new_p = self.env['res.partner'].new({
|
||||
'child_ids': [
|
||||
Command.create({'name': 'z'}),
|
||||
Command.create({'name': 'a'}),
|
||||
],
|
||||
})
|
||||
self.assertEqual(len(new_p.child_ids.sorted()), 2)
|
||||
|
||||
# sorted keeps the _prefetch_ids
|
||||
partners_with_children = self.env['res.partner'].create([
|
||||
{
|
||||
'name': 'required',
|
||||
'child_ids': [
|
||||
Command.create({'name': 'z'}),
|
||||
Command.create({'name': 'a'}),
|
||||
],
|
||||
},
|
||||
{
|
||||
'name': 'required',
|
||||
'child_ids': [
|
||||
Command.create({'name': 'z'}),
|
||||
Command.create({'name': 'a'}),
|
||||
],
|
||||
},
|
||||
])
|
||||
partners_with_children.invalidate_model(['name'])
|
||||
# Only one query to fetch name of children of each partner
|
||||
with self.assertQueryCount(1):
|
||||
for partner in partners_with_children:
|
||||
partner.child_ids.sorted('id').mapped('name')
|
||||
|
||||
|
||||
class TestExternalAPI(SavepointCaseWithUserDemo):
|
||||
|
||||
def test_call_kw(self):
|
||||
"""kwargs is not modified by the execution of the call"""
|
||||
partner = self.env['res.partner'].create({'name': 'MyPartner1'})
|
||||
args = (partner.ids, ['name'])
|
||||
kwargs = {'context': {'test': True}}
|
||||
api.call_kw(self.env['res.partner'], 'read', args, kwargs)
|
||||
self.assertEqual(kwargs, {'context': {'test': True}})
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from base64 import b64decode
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
class TestAvatarMixin(TransactionCase):
|
||||
|
||||
""" tests the avatar mixin """
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user_without_image = self.env['res.users'].create({
|
||||
'name': 'Marc Demo',
|
||||
'email': 'mark.brown23@example.com',
|
||||
'image_1920': False,
|
||||
'create_date': '2015-11-12 00:00:00',
|
||||
'login': 'demo_1',
|
||||
'password': 'demo_1'
|
||||
})
|
||||
self.user_without_image.partner_id.create_date = '2015-11-12 00:00:00'
|
||||
|
||||
self.user_without_name = self.env['res.users'].create({
|
||||
'name': '',
|
||||
'email': 'marc.grey25@example.com',
|
||||
'image_1920': False,
|
||||
'login': 'marc_1',
|
||||
'password': 'marc_1',
|
||||
})
|
||||
self.external_partner = self.env['res.partner'].create({
|
||||
'name': 'Josh Demo',
|
||||
'email': 'josh.brown23@example.com',
|
||||
'image_1920': False
|
||||
})
|
||||
|
||||
def test_partner_has_avatar_even_if_it_has_no_image(self):
|
||||
self.assertTrue(self.user_without_image.partner_id.avatar_128)
|
||||
self.assertTrue(self.user_without_image.partner_id.avatar_256)
|
||||
self.assertTrue(self.user_without_image.partner_id.avatar_512)
|
||||
self.assertTrue(self.user_without_image.partner_id.avatar_1024)
|
||||
self.assertTrue(self.user_without_image.partner_id.avatar_1920)
|
||||
|
||||
def test_content_of_generated_partner_avatar(self):
|
||||
expectedAvatar = (
|
||||
"<?xml version='1.0' encoding='UTF-8' ?>"
|
||||
"<svg height='180' width='180' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink'>"
|
||||
"<rect fill='hsl(184, 40%, 45%)' height='180' width='180'/>"
|
||||
"<text fill='#ffffff' font-size='96' text-anchor='middle' x='90' y='125' font-family='sans-serif'>M</text>"
|
||||
"</svg>"
|
||||
)
|
||||
self.assertEqual(expectedAvatar, b64decode(self.user_without_image.partner_id.avatar_1920).decode('utf-8'))
|
||||
|
||||
def test_partner_without_name_has_default_placeholder_image_as_avatar(self):
|
||||
self.assertEqual(self.user_without_name.partner_id._avatar_get_placeholder(), b64decode(self.user_without_name.partner_id.avatar_1920))
|
||||
|
||||
def test_external_partner_has_default_placeholder_image_as_avatar(self):
|
||||
self.assertEqual(self.external_partner._avatar_get_placeholder(), b64decode(self.external_partner.avatar_1920))
|
||||
|
||||
def test_partner_and_user_have_the_same_avatar(self):
|
||||
self.assertEqual(self.user_without_image.partner_id.avatar_1920, self.user_without_image.avatar_1920)
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import check_barcode_encoding, get_barcode_check_digit
|
||||
|
||||
|
||||
class TestBarcode(TransactionCase):
|
||||
def test_barcode_check_digit(self):
|
||||
ean8 = "87111125"
|
||||
self.assertEqual(get_barcode_check_digit("0" * 10 + ean8), int(ean8[-1]))
|
||||
ean13 = "1234567891231"
|
||||
self.assertEqual(get_barcode_check_digit("0" * 5 + ean13), int(ean13[-1]))
|
||||
|
||||
def test_barcode_encoding(self):
|
||||
self.assertTrue(check_barcode_encoding('20220006', 'ean8'))
|
||||
self.assertTrue(check_barcode_encoding('93855341', 'ean8'))
|
||||
self.assertTrue(check_barcode_encoding('2022071416014', 'ean13'))
|
||||
self.assertTrue(check_barcode_encoding('9745213796142', 'ean13'))
|
||||
|
||||
self.assertFalse(check_barcode_encoding('2022a006', 'ean8'), 'should contains digits only')
|
||||
self.assertFalse(check_barcode_encoding('20220000', 'ean8'), 'incorrect check digit')
|
||||
self.assertFalse(check_barcode_encoding('93855341', 'ean13'), 'ean13 is a 13-digits barcode')
|
||||
self.assertFalse(check_barcode_encoding('9745213796142', 'ean8'), 'ean8 is a 8-digits barcode')
|
||||
self.assertFalse(check_barcode_encoding('9745213796148', 'ean13'), 'incorrect check digit')
|
||||
self.assertFalse(check_barcode_encoding('2022!71416014', 'ean13'), 'should contains digits only')
|
||||
self.assertFalse(check_barcode_encoding('0022071416014', 'ean13'), 'when starting with one zero, it indicates that a 12-digit UPC-A code follows')
|
||||
913
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_base.py
Normal file
913
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_base.py
Normal file
|
|
@ -0,0 +1,913 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import ast
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
from odoo import SUPERUSER_ID, Command
|
||||
from odoo.exceptions import RedirectWarning, UserError, ValidationError
|
||||
from odoo.tests import tagged
|
||||
from odoo.tests.common import TransactionCase, BaseCase
|
||||
from odoo.tools import mute_logger
|
||||
from odoo.tools.safe_eval import safe_eval, const_eval, expr_eval
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
|
||||
|
||||
class TestSafeEval(BaseCase):
|
||||
def test_const(self):
|
||||
# NB: True and False are names in Python 2 not consts
|
||||
expected = (1, {"a": {2.5}}, [None, u"foo"])
|
||||
actual = const_eval('(1, {"a": {2.5}}, [None, u"foo"])')
|
||||
self.assertEqual(actual, expected)
|
||||
# Test RETURN_CONST
|
||||
self.assertEqual(const_eval('10'), 10)
|
||||
|
||||
def test_expr(self):
|
||||
# NB: True and False are names in Python 2 not consts
|
||||
expected = 3 * 4
|
||||
actual = expr_eval('3 * 4')
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_expr_eval_opcodes(self):
|
||||
for expr, expected in [
|
||||
('3', 3), # RETURN_CONST
|
||||
('[1,2,3,4][1:3]', [2, 3]), # BINARY_SLICE
|
||||
]:
|
||||
self.assertEqual(expr_eval(expr), expected)
|
||||
|
||||
def test_safe_eval_opcodes(self):
|
||||
for expr, locals_dict, expected in [
|
||||
('[x for x in (1,2)]', {}, [1, 2]), # LOAD_FAST_AND_CLEAR
|
||||
('list(x for x in (1,2))', {}, [1, 2]), # END_FOR, CALL_INTRINSIC_1
|
||||
('v if v is None else w', {'v': False, 'w': 'foo'}, 'foo'), # POP_JUMP_IF_NONE
|
||||
('v if v is not None else w', {'v': None, 'w': 'foo'}, 'foo'), # POP_JUMP_IF_NOT_NONE
|
||||
('{a for a in (1, 2)}', {}, {1, 2}), # RERAISE
|
||||
]:
|
||||
self.assertEqual(safe_eval(expr, locals_dict=locals_dict), expected)
|
||||
|
||||
def test_safe_eval_exec_opcodes(self):
|
||||
for expr, locals_dict, expected in [
|
||||
("""
|
||||
def f(v):
|
||||
if v:
|
||||
x = 1
|
||||
return x
|
||||
result = f(42)
|
||||
""", {}, 1), # LOAD_FAST_CHECK
|
||||
]:
|
||||
safe_eval(dedent(expr), locals_dict=locals_dict, mode="exec", nocopy=True)
|
||||
self.assertEqual(locals_dict['result'], expected)
|
||||
|
||||
def test_01_safe_eval(self):
|
||||
""" Try a few common expressions to verify they work with safe_eval """
|
||||
expected = (1, {"a": 9 * 2}, (True, False, None))
|
||||
actual = safe_eval('(1, {"a": 9 * 2}, (True, False, None))')
|
||||
self.assertEqual(actual, expected, "Simple python expressions are not working with safe_eval")
|
||||
|
||||
def test_02_literal_eval(self):
|
||||
""" Try simple literal definition to verify it works with literal_eval """
|
||||
expected = (1, {"a": 9}, (True, False, None))
|
||||
actual = ast.literal_eval('(1, {"a": 9}, (True, False, None))')
|
||||
self.assertEqual(actual, expected, "Simple python expressions are not working with literal_eval")
|
||||
|
||||
def test_03_literal_eval_arithmetic(self):
|
||||
""" Try arithmetic expression in literal_eval to verify it does not work """
|
||||
with self.assertRaises(ValueError):
|
||||
ast.literal_eval('(1, {"a": 2*9}, (True, False, None))')
|
||||
|
||||
def test_04_literal_eval_forbidden(self):
|
||||
""" Try forbidden expressions in literal_eval to verify they are not allowed """
|
||||
with self.assertRaises(ValueError):
|
||||
ast.literal_eval('{"a": True.__class__}')
|
||||
|
||||
@mute_logger('odoo.tools.safe_eval')
|
||||
def test_05_safe_eval_forbiddon(self):
|
||||
""" Try forbidden expressions in safe_eval to verify they are not allowed"""
|
||||
# no forbidden builtin expression
|
||||
with self.assertRaises(ValueError):
|
||||
safe_eval('open("/etc/passwd","r")')
|
||||
|
||||
# no forbidden opcodes
|
||||
with self.assertRaises(ValueError):
|
||||
safe_eval("import odoo", mode="exec")
|
||||
|
||||
# no dunder
|
||||
with self.assertRaises(NameError):
|
||||
safe_eval("self.__name__", {'self': self}, mode="exec")
|
||||
|
||||
|
||||
# samples use effective TLDs from the Mozilla public suffix
|
||||
# list at http://publicsuffix.org
|
||||
SAMPLES = [
|
||||
('"Raoul Grosbedon" <raoul@chirurgiens-dentistes.fr> ', 'Raoul Grosbedon', 'raoul@chirurgiens-dentistes.fr'),
|
||||
('ryu+giga-Sushi@aizubange.fukushima.jp', '', 'ryu+giga-Sushi@aizubange.fukushima.jp'),
|
||||
('Raoul chirurgiens-dentistes.fr', 'Raoul chirurgiens-dentistes.fr', ''),
|
||||
(" Raoul O'hara <!@historicalsociety.museum>", "Raoul O'hara", '!@historicalsociety.museum'),
|
||||
('Raoul Grosbedon <raoul@CHIRURGIENS-dentistes.fr> ', 'Raoul Grosbedon', 'raoul@CHIRURGIENS-dentistes.fr'),
|
||||
('Raoul megaraoul@chirurgiens-dentistes.fr', 'Raoul', 'megaraoul@chirurgiens-dentistes.fr'),
|
||||
]
|
||||
|
||||
|
||||
@tagged('res_partner')
|
||||
class TestBase(TransactionCaseWithUserDemo):
|
||||
|
||||
def _check_find_or_create(self, test_string, expected_name, expected_email, check_partner=False, should_create=False):
|
||||
partner = self.env['res.partner'].find_or_create(test_string)
|
||||
if should_create and check_partner:
|
||||
self.assertTrue(partner.id > check_partner.id, 'find_or_create failed - should have found existing')
|
||||
elif check_partner:
|
||||
self.assertEqual(partner, check_partner, 'find_or_create failed - should have found existing')
|
||||
self.assertEqual(partner.name, expected_name)
|
||||
self.assertEqual(partner.email or '', expected_email)
|
||||
return partner
|
||||
|
||||
def test_00_res_partner_name_create(self):
|
||||
res_partner = self.env['res.partner']
|
||||
parse = res_partner._parse_partner_name
|
||||
for text, expected_name, expected_mail in SAMPLES:
|
||||
with self.subTest(text=text):
|
||||
self.assertEqual((expected_name, expected_mail.lower()), parse(text))
|
||||
partner_id, dummy = res_partner.name_create(text)
|
||||
partner = res_partner.browse(partner_id)
|
||||
self.assertEqual(expected_name or expected_mail.lower(), partner.name)
|
||||
self.assertEqual(expected_mail.lower() or False, partner.email)
|
||||
|
||||
# name_create supports default_email fallback
|
||||
partner = self.env['res.partner'].browse(
|
||||
self.env['res.partner'].with_context(
|
||||
default_email='John.Wick@example.com'
|
||||
).name_create('"Raoulette Vachette" <Raoul@Grosbedon.fr>')[0]
|
||||
)
|
||||
self.assertEqual(partner.name, 'Raoulette Vachette')
|
||||
self.assertEqual(partner.email, 'raoul@grosbedon.fr')
|
||||
|
||||
partner = self.env['res.partner'].browse(
|
||||
self.env['res.partner'].with_context(
|
||||
default_email='John.Wick@example.com'
|
||||
).name_create('Raoulette Vachette')[0]
|
||||
)
|
||||
self.assertEqual(partner.name, 'Raoulette Vachette')
|
||||
self.assertEqual(partner.email, 'John.Wick@example.com')
|
||||
|
||||
def test_10_res_partner_find_or_create(self):
|
||||
res_partner = self.env['res.partner']
|
||||
|
||||
partner = res_partner.browse(res_partner.name_create(SAMPLES[0][0])[0])
|
||||
self._check_find_or_create(
|
||||
SAMPLES[0][0], SAMPLES[0][1], SAMPLES[0][2],
|
||||
check_partner=partner, should_create=False
|
||||
)
|
||||
|
||||
partner_2 = res_partner.browse(res_partner.name_create('sarah.john@connor.com')[0])
|
||||
found_2 = self._check_find_or_create(
|
||||
'john@connor.com', 'john@connor.com', 'john@connor.com',
|
||||
check_partner=partner_2, should_create=True
|
||||
)
|
||||
|
||||
new = self._check_find_or_create(
|
||||
SAMPLES[1][0], SAMPLES[1][2].lower(), SAMPLES[1][2].lower(),
|
||||
check_partner=found_2, should_create=True
|
||||
)
|
||||
|
||||
new2 = self._check_find_or_create(
|
||||
SAMPLES[2][0], SAMPLES[2][1], SAMPLES[2][2],
|
||||
check_partner=new, should_create=True
|
||||
)
|
||||
|
||||
new3 = self._check_find_or_create(
|
||||
SAMPLES[3][0], SAMPLES[3][1], SAMPLES[3][2],
|
||||
check_partner=new2, should_create=True
|
||||
)
|
||||
|
||||
new4 = self._check_find_or_create(
|
||||
SAMPLES[4][0], SAMPLES[0][1], SAMPLES[0][2],
|
||||
check_partner=partner, should_create=False
|
||||
)
|
||||
|
||||
new5 = self._check_find_or_create(
|
||||
SAMPLES[5][0], SAMPLES[5][1], SAMPLES[5][2],
|
||||
check_partner=new4, should_create=True
|
||||
)
|
||||
|
||||
def test_15_res_partner_name_search(self):
|
||||
res_partner = self.env['res.partner']
|
||||
DATA = [
|
||||
('"A Raoul Grosbedon" <raoul@chirurgiens-dentistes.fr>', False),
|
||||
('B Raoul chirurgiens-dentistes.fr', True),
|
||||
("C Raoul O'hara <!@historicalsociety.museum>", True),
|
||||
('ryu+giga-Sushi@aizubange.fukushima.jp', True),
|
||||
]
|
||||
for name, active in DATA:
|
||||
partner_id, dummy = res_partner.with_context(default_active=active).name_create(name)
|
||||
partners = res_partner.name_search('Raoul')
|
||||
self.assertEqual(len(partners), 2, 'Incorrect search number result for name_search')
|
||||
partners = res_partner.name_search('Raoul', limit=1)
|
||||
self.assertEqual(len(partners), 1, 'Incorrect search number result for name_search with a limit')
|
||||
self.assertEqual(partners[0][1], 'B Raoul chirurgiens-dentistes.fr', 'Incorrect partner returned, should be the first active')
|
||||
|
||||
def test_20_res_partner_address_sync(self):
|
||||
res_partner = self.env['res.partner']
|
||||
ghoststep = res_partner.create({
|
||||
'name': 'GhostStep',
|
||||
'is_company': True,
|
||||
'street': 'Main Street, 10',
|
||||
'phone': '123456789',
|
||||
'email': 'info@ghoststep.com',
|
||||
'vat': 'BE0477472701',
|
||||
'type': 'contact',
|
||||
})
|
||||
p1 = res_partner.browse(res_partner.name_create('Denis Bladesmith <denis.bladesmith@ghoststep.com>')[0])
|
||||
self.assertEqual(p1.type, 'contact', 'Default type must be "contact"')
|
||||
p1phone = '123456789#34'
|
||||
p1.write({'phone': p1phone,
|
||||
'parent_id': ghoststep.id})
|
||||
self.assertEqual(p1.street, ghoststep.street, 'Address fields must be synced')
|
||||
self.assertEqual(p1.phone, p1phone, 'Phone should be preserved after address sync')
|
||||
self.assertEqual(p1.type, 'contact', 'Type should be preserved after address sync')
|
||||
self.assertEqual(p1.email, 'denis.bladesmith@ghoststep.com', 'Email should be preserved after sync')
|
||||
|
||||
# turn off sync
|
||||
p1street = 'Different street, 42'
|
||||
p1.write({'street': p1street,
|
||||
'type': 'invoice'})
|
||||
self.assertEqual(p1.street, p1street, 'Address fields must not be synced after turning sync off')
|
||||
self.assertNotEqual(ghoststep.street, p1street, 'Parent address must never be touched')
|
||||
|
||||
# turn on sync again
|
||||
p1.write({'type': 'contact'})
|
||||
self.assertEqual(p1.street, ghoststep.street, 'Address fields must be synced again')
|
||||
self.assertEqual(p1.phone, p1phone, 'Phone should be preserved after address sync')
|
||||
self.assertEqual(p1.type, 'contact', 'Type should be preserved after address sync')
|
||||
self.assertEqual(p1.email, 'denis.bladesmith@ghoststep.com', 'Email should be preserved after sync')
|
||||
|
||||
# Modify parent, sync to children
|
||||
ghoststreet = 'South Street, 25'
|
||||
ghoststep.write({'street': ghoststreet})
|
||||
self.assertEqual(p1.street, ghoststreet, 'Address fields must be synced automatically')
|
||||
self.assertEqual(p1.phone, p1phone, 'Phone should not be synced')
|
||||
self.assertEqual(p1.email, 'denis.bladesmith@ghoststep.com', 'Email should be preserved after sync')
|
||||
|
||||
p1street = 'My Street, 11'
|
||||
p1.write({'street': p1street})
|
||||
self.assertEqual(ghoststep.street, ghoststreet, 'Touching contact should never alter parent')
|
||||
|
||||
def test_30_res_partner_first_contact_sync(self):
|
||||
""" Test initial creation of company/contact pair where contact address gets copied to
|
||||
company """
|
||||
res_partner = self.env['res.partner']
|
||||
ironshield = res_partner.browse(res_partner.name_create('IronShield')[0])
|
||||
self.assertFalse(ironshield.is_company, 'Partners are not companies by default')
|
||||
self.assertEqual(ironshield.type, 'contact', 'Default type must be "contact"')
|
||||
ironshield.write({'type': 'contact'})
|
||||
|
||||
p1 = res_partner.create({
|
||||
'name': 'Isen Hardearth',
|
||||
'street': 'Strongarm Avenue, 12',
|
||||
'parent_id': ironshield.id,
|
||||
})
|
||||
self.assertEqual(p1.type, 'contact', 'Default type must be "contact", not the copied parent type')
|
||||
self.assertEqual(ironshield.street, p1.street, 'Address fields should be copied to company')
|
||||
|
||||
def test_40_res_partner_address_get(self):
|
||||
""" Test address_get address resolution mechanism: it should first go down through descendants,
|
||||
stopping when encountering another is_copmany entity, then go up, stopping again at the first
|
||||
is_company entity or the root ancestor and if nothing matches, it should use the provided partner
|
||||
itself """
|
||||
res_partner = self.env['res.partner']
|
||||
elmtree = res_partner.browse(res_partner.name_create('Elmtree')[0])
|
||||
branch1 = res_partner.create({'name': 'Branch 1',
|
||||
'parent_id': elmtree.id,
|
||||
'is_company': True})
|
||||
leaf10 = res_partner.create({'name': 'Leaf 10',
|
||||
'parent_id': branch1.id,
|
||||
'type': 'invoice'})
|
||||
branch11 = res_partner.create({'name': 'Branch 11',
|
||||
'parent_id': branch1.id,
|
||||
'type': 'other'})
|
||||
leaf111 = res_partner.create({'name': 'Leaf 111',
|
||||
'parent_id': branch11.id,
|
||||
'type': 'delivery'})
|
||||
branch11.write({'is_company': False}) # force is_company after creating 1rst child
|
||||
branch2 = res_partner.create({'name': 'Branch 2',
|
||||
'parent_id': elmtree.id,
|
||||
'is_company': True})
|
||||
leaf21 = res_partner.create({'name': 'Leaf 21',
|
||||
'parent_id': branch2.id,
|
||||
'type': 'delivery'})
|
||||
leaf22 = res_partner.create({'name': 'Leaf 22',
|
||||
'parent_id': branch2.id})
|
||||
leaf23 = res_partner.create({'name': 'Leaf 23',
|
||||
'parent_id': branch2.id,
|
||||
'type': 'contact'})
|
||||
|
||||
# go up, stop at branch1
|
||||
self.assertEqual(leaf111.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf111.id,
|
||||
'invoice': leaf10.id,
|
||||
'contact': branch1.id,
|
||||
'other': branch11.id}, 'Invalid address resolution')
|
||||
self.assertEqual(branch11.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf111.id,
|
||||
'invoice': leaf10.id,
|
||||
'contact': branch1.id,
|
||||
'other': branch11.id}, 'Invalid address resolution')
|
||||
|
||||
# go down, stop at at all child companies
|
||||
self.assertEqual(elmtree.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': elmtree.id,
|
||||
'invoice': elmtree.id,
|
||||
'contact': elmtree.id,
|
||||
'other': elmtree.id}, 'Invalid address resolution')
|
||||
|
||||
# go down through children
|
||||
self.assertEqual(branch1.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf111.id,
|
||||
'invoice': leaf10.id,
|
||||
'contact': branch1.id,
|
||||
'other': branch11.id}, 'Invalid address resolution')
|
||||
|
||||
self.assertEqual(branch2.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf21.id,
|
||||
'invoice': branch2.id,
|
||||
'contact': branch2.id,
|
||||
'other': branch2.id}, 'Invalid address resolution. Company is the first encountered contact, therefore default for unfound addresses.')
|
||||
|
||||
# go up then down through siblings
|
||||
self.assertEqual(leaf21.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf21.id,
|
||||
'invoice': branch2.id,
|
||||
'contact': branch2.id,
|
||||
'other': branch2.id}, 'Invalid address resolution, should scan commercial entity ancestor and its descendants')
|
||||
self.assertEqual(leaf22.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf21.id,
|
||||
'invoice': leaf22.id,
|
||||
'contact': leaf22.id,
|
||||
'other': leaf22.id}, 'Invalid address resolution, should scan commercial entity ancestor and its descendants')
|
||||
self.assertEqual(leaf23.address_get(['delivery', 'invoice', 'contact', 'other']),
|
||||
{'delivery': leaf21.id,
|
||||
'invoice': leaf23.id,
|
||||
'contact': leaf23.id,
|
||||
'other': leaf23.id}, 'Invalid address resolution, `default` should only override if no partner with specific type exists')
|
||||
|
||||
# empty adr_pref means only 'contact'
|
||||
self.assertEqual(elmtree.address_get([]),
|
||||
{'contact': elmtree.id}, 'Invalid address resolution, no contact means commercial entity ancestor')
|
||||
self.assertEqual(leaf111.address_get([]),
|
||||
{'contact': branch1.id}, 'Invalid address resolution, no contact means finding contact in ancestors')
|
||||
branch11.write({'type': 'contact'})
|
||||
self.assertEqual(leaf111.address_get([]),
|
||||
{'contact': branch11.id}, 'Invalid address resolution, branch11 should now be contact')
|
||||
|
||||
def test_commercial_partner_nullcompany(self):
|
||||
""" The commercial partner is the first/nearest ancestor-or-self which
|
||||
is a company or doesn't have a parent
|
||||
"""
|
||||
P = self.env['res.partner']
|
||||
p0 = P.create({'name': '0', 'email': '0'})
|
||||
self.assertEqual(p0.commercial_partner_id, p0, "partner without a parent is their own commercial partner")
|
||||
|
||||
p1 = P.create({'name': '1', 'email': '1', 'parent_id': p0.id})
|
||||
self.assertEqual(p1.commercial_partner_id, p0, "partner's parent is their commercial partner")
|
||||
p12 = P.create({'name': '12', 'email': '12', 'parent_id': p1.id})
|
||||
self.assertEqual(p12.commercial_partner_id, p0, "partner's GP is their commercial partner")
|
||||
|
||||
p2 = P.create({'name': '2', 'email': '2', 'parent_id': p0.id, 'is_company': True})
|
||||
self.assertEqual(p2.commercial_partner_id, p2, "partner flagged as company is their own commercial partner")
|
||||
p21 = P.create({'name': '21', 'email': '21', 'parent_id': p2.id})
|
||||
self.assertEqual(p21.commercial_partner_id, p2, "commercial partner is closest ancestor with themselves as commercial partner")
|
||||
|
||||
p3 = P.create({'name': '3', 'email': '3', 'is_company': True})
|
||||
self.assertEqual(p3.commercial_partner_id, p3, "being both parent-less and company should be the same as either")
|
||||
|
||||
notcompanies = p0 | p1 | p12 | p21
|
||||
self.env.cr.execute('update res_partner set is_company=null where id = any(%s)', [notcompanies.ids])
|
||||
for parent in notcompanies:
|
||||
p = P.create({
|
||||
'name': parent.name + '_sub',
|
||||
'email': parent.email + '_sub',
|
||||
'parent_id': parent.id,
|
||||
})
|
||||
self.assertEqual(
|
||||
p.commercial_partner_id,
|
||||
parent.commercial_partner_id,
|
||||
"check that is_company=null is properly handled when looking for ancestor"
|
||||
)
|
||||
|
||||
def test_50_res_partner_commercial_sync(self):
|
||||
res_partner = self.env['res.partner']
|
||||
p0 = res_partner.create({'name': 'Sigurd Sunknife',
|
||||
'email': 'ssunknife@gmail.com'})
|
||||
sunhelm = res_partner.create({'name': 'Sunhelm',
|
||||
'is_company': True,
|
||||
'street': 'Rainbow Street, 13',
|
||||
'phone': '1122334455',
|
||||
'email': 'info@sunhelm.com',
|
||||
'vat': 'BE0477472701',
|
||||
'child_ids': [Command.link(p0.id),
|
||||
Command.create({'name': 'Alrik Greenthorn',
|
||||
'email': 'agr@sunhelm.com'})]})
|
||||
p1 = res_partner.create({'name': 'Otto Blackwood',
|
||||
'email': 'otto.blackwood@sunhelm.com',
|
||||
'parent_id': sunhelm.id})
|
||||
p11 = res_partner.create({'name': 'Gini Graywool',
|
||||
'email': 'ggr@sunhelm.com',
|
||||
'parent_id': p1.id})
|
||||
p2 = res_partner.search([('email', '=', 'agr@sunhelm.com')], limit=1)
|
||||
sunhelm.write({'child_ids': [Command.create({'name': 'Ulrik Greenthorn',
|
||||
'email': 'ugr@sunhelm.com'})]})
|
||||
p3 = res_partner.search([('email', '=', 'ugr@sunhelm.com')], limit=1)
|
||||
|
||||
for p in (p0, p1, p11, p2, p3):
|
||||
self.assertEqual(p.commercial_partner_id, sunhelm, 'Incorrect commercial entity resolution')
|
||||
self.assertEqual(p.vat, sunhelm.vat, 'Commercial fields must be automatically synced')
|
||||
sunhelmvat = 'BE0123456749'
|
||||
sunhelm.write({'vat': sunhelmvat})
|
||||
for p in (p0, p1, p11, p2, p3):
|
||||
self.assertEqual(p.vat, sunhelmvat, 'Commercial fields must be automatically and recursively synced')
|
||||
|
||||
p1vat = 'BE0987654394'
|
||||
p1.write({'vat': p1vat})
|
||||
for p in (sunhelm, p0, p11, p2, p3):
|
||||
self.assertEqual(p.vat, sunhelmvat, 'Sync to children should only work downstream and on commercial entities')
|
||||
|
||||
# promote p1 to commercial entity
|
||||
p1.write({'parent_id': sunhelm.id,
|
||||
'is_company': True,
|
||||
'name': 'Sunhelm Subsidiary'})
|
||||
self.assertEqual(p1.vat, p1vat, 'Setting is_company should stop auto-sync of commercial fields')
|
||||
self.assertEqual(p1.commercial_partner_id, p1, 'Incorrect commercial entity resolution after setting is_company')
|
||||
|
||||
# writing on parent should not touch child commercial entities
|
||||
sunhelmvat2 = 'BE0112233453'
|
||||
sunhelm.write({'vat': sunhelmvat2})
|
||||
self.assertEqual(p1.vat, p1vat, 'Setting is_company should stop auto-sync of commercial fields')
|
||||
self.assertEqual(p0.vat, sunhelmvat2, 'Commercial fields must be automatically synced')
|
||||
|
||||
def test_60_read_group(self):
|
||||
title_sir = self.env['res.partner.title'].create({'name': 'Sir...'})
|
||||
title_lady = self.env['res.partner.title'].create({'name': 'Lady...'})
|
||||
user_vals_list = [
|
||||
{'name': 'Alice', 'login': 'alice', 'color': 1, 'function': 'Friend', 'date': '2015-03-28', 'title': title_lady.id},
|
||||
{'name': 'Alice', 'login': 'alice2', 'color': 0, 'function': 'Friend', 'date': '2015-01-28', 'title': title_lady.id},
|
||||
{'name': 'Bob', 'login': 'bob', 'color': 2, 'function': 'Friend', 'date': '2015-03-02', 'title': title_sir.id},
|
||||
{'name': 'Eve', 'login': 'eve', 'color': 3, 'function': 'Eavesdropper', 'date': '2015-03-20', 'title': title_lady.id},
|
||||
{'name': 'Nab', 'login': 'nab', 'color': -3, 'function': '5$ Wrench', 'date': '2014-09-10', 'title': title_sir.id},
|
||||
{'name': 'Nab', 'login': 'nab-she', 'color': 6, 'function': '5$ Wrench', 'date': '2014-01-02', 'title': title_lady.id},
|
||||
]
|
||||
res_users = self.env['res.users']
|
||||
users = res_users.create(user_vals_list)
|
||||
domain = [('id', 'in', users.ids)]
|
||||
|
||||
# group on local char field without domain and without active_test (-> empty WHERE clause)
|
||||
groups_data = res_users.with_context(active_test=False).read_group([], fields=['login'], groupby=['login'], orderby='login DESC')
|
||||
self.assertGreater(len(groups_data), 6, "Incorrect number of results when grouping on a field")
|
||||
|
||||
# group on local char field with limit
|
||||
groups_data = res_users.read_group(domain, fields=['login'], groupby=['login'], orderby='login DESC', limit=3, offset=3)
|
||||
self.assertEqual(len(groups_data), 3, "Incorrect number of results when grouping on a field with limit")
|
||||
self.assertEqual([g['login'] for g in groups_data], ['bob', 'alice2', 'alice'], 'Result mismatch')
|
||||
|
||||
# group on inherited char field, aggregate on int field (second groupby ignored on purpose)
|
||||
groups_data = res_users.read_group(domain, fields=['name', 'color', 'function'], groupby=['function', 'login'])
|
||||
self.assertEqual(len(groups_data), 3, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual(['5$ Wrench', 'Eavesdropper', 'Friend'], [g['function'] for g in groups_data], 'incorrect read_group order')
|
||||
for group_data in groups_data:
|
||||
self.assertIn('color', group_data, "Aggregated data for the column 'color' is not present in read_group return values")
|
||||
self.assertEqual(group_data['color'], 3, "Incorrect sum for aggregated data for the column 'color'")
|
||||
|
||||
# group on inherited char field, reverse order
|
||||
groups_data = res_users.read_group(domain, fields=['name', 'color'], groupby='name', orderby='name DESC')
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Nab', 'Eve', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
|
||||
# group on int field, default ordering
|
||||
groups_data = res_users.read_group(domain, fields=['color'], groupby='color')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-3, 0, 1, 2, 3, 6], 'Incorrect ordering of the list')
|
||||
|
||||
# multi group, second level is int field, should still be summed in first level grouping
|
||||
groups_data = res_users.read_group(domain, fields=['name', 'color'], groupby=['name', 'color'], orderby='name DESC')
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Nab', 'Eve', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['color'] for g in groups_data], [3, 3, 2, 1], 'Incorrect ordering of the list')
|
||||
|
||||
# group on inherited char field, multiple orders with directions
|
||||
groups_data = res_users.read_group(domain, fields=['name', 'color'], groupby='name', orderby='color DESC, name')
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Eve', 'Nab', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['name_count'] for g in groups_data], [1, 2, 1, 2], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) -> Year-Month, default ordering
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'date'], groupby=['date'])
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date'] for g in groups_data], ['January 2014', 'September 2014', 'January 2015', 'March 2015'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [1, 1, 1, 3], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) specifying the :year -> Year default ordering
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'date'], groupby=['date:year'])
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date:year'] for g in groups_data], ['2014', '2015'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) -> Year-Month, custom order
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'date'], groupby=['date'], orderby='date DESC')
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date'] for g in groups_data], ['March 2015', 'January 2015', 'September 2014', 'January 2014'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [3, 1, 1, 1], 'Incorrect number of results')
|
||||
|
||||
# group on inherited many2one (res_partner.title), default order
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'title'], groupby=['title'])
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_lady.id, 'Lady...'), (title_sir.id, 'Sir...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [4, 2], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [10, -1], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), reversed natural order
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby="title desc")
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([(title_sir.id, 'Sir...'), (title_lady.id, 'Lady...')], [g['title'] for g in groups_data], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-1, 10], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), multiple orders with m2o in second position
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby="color desc, title desc")
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_lady.id, 'Lady...'), (title_sir.id, 'Sir...')], 'Incorrect ordering of the result')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [4, 2], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [10, -1], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), ordered by other inherited field (color)
|
||||
groups_data = res_users.read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby='color')
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_sir.id, 'Sir...'), (title_lady.id, 'Lady...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-1, 10], 'Incorrect aggregation of int column')
|
||||
|
||||
def test_61_private_read_group(self):
|
||||
"""
|
||||
the _read_group should behave exactly like read_group (public method) except for sorting the one2many on ID
|
||||
instead of name, so avoiding the join on the "to many" table to get the name
|
||||
"""
|
||||
title_sir = self.env['res.partner.title'].create({'name': 'Sir...'})
|
||||
title_lady = self.env['res.partner.title'].create({'name': 'Lady...'})
|
||||
user_vals_list = [
|
||||
{'name': 'Alice', 'login': 'alice', 'color': 1, 'function': 'Friend', 'date': '2015-03-28', 'title': title_lady.id},
|
||||
{'name': 'Alice', 'login': 'alice2', 'color': 0, 'function': 'Friend', 'date': '2015-01-28', 'title': title_lady.id},
|
||||
{'name': 'Bob', 'login': 'bob', 'color': 2, 'function': 'Friend', 'date': '2015-03-02', 'title': title_sir.id},
|
||||
{'name': 'Eve', 'login': 'eve', 'color': 3, 'function': 'Eavesdropper', 'date': '2015-03-20', 'title': title_lady.id},
|
||||
{'name': 'Nab', 'login': 'nab', 'color': -3, 'function': '5$ Wrench', 'date': '2014-09-10', 'title': title_sir.id},
|
||||
{'name': 'Nab', 'login': 'nab-she', 'color': 6, 'function': '5$ Wrench', 'date': '2014-01-02', 'title': title_lady.id},
|
||||
]
|
||||
res_users = self.env['res.users']
|
||||
users = res_users.create(user_vals_list)
|
||||
domain = [('id', 'in', users.ids)]
|
||||
|
||||
# group on local char field without domain and without active_test (-> empty WHERE clause)
|
||||
groups_data = res_users.with_context(active_test=False)._read_group([], fields=['login'], groupby=['login'], orderby='login DESC')
|
||||
self.assertGreater(len(groups_data), 6, "Incorrect number of results when grouping on a field")
|
||||
|
||||
# group on local char field with limit
|
||||
groups_data = res_users._read_group(domain, fields=['login'], groupby=['login'], orderby='login DESC', limit=3, offset=3)
|
||||
self.assertEqual(len(groups_data), 3, "Incorrect number of results when grouping on a field with limit")
|
||||
self.assertEqual(['bob', 'alice2', 'alice'], [g['login'] for g in groups_data], 'Result mismatch')
|
||||
|
||||
# group on inherited char field, aggregate on int field (second groupby ignored on purpose)
|
||||
groups_data = res_users._read_group(domain, fields=['name', 'color', 'function'], groupby=['function', 'login'])
|
||||
self.assertEqual(len(groups_data), 3, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['function'] for g in groups_data], ['5$ Wrench', 'Eavesdropper', 'Friend'], 'incorrect _read_group order')
|
||||
for group_data in groups_data:
|
||||
self.assertIn('color', group_data, "Aggregated data for the column 'color' is not present in _read_group return values")
|
||||
self.assertEqual(group_data['color'], 3, "Incorrect sum for aggregated data for the column 'color'")
|
||||
|
||||
# group on inherited char field, reverse order
|
||||
groups_data = res_users._read_group(domain, fields=['name', 'color'], groupby='name', orderby='name DESC')
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Nab', 'Eve', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
|
||||
# group on int field, default ordering
|
||||
groups_data = res_users._read_group(domain, fields=['color'], groupby='color')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-3, 0, 1, 2, 3, 6], 'Incorrect ordering of the list')
|
||||
|
||||
# multi group, second level is int field, should still be summed in first level grouping
|
||||
groups_data = res_users._read_group(domain, fields=['name', 'color'], groupby=['name', 'color'], orderby='name DESC')
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Nab', 'Eve', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['color'] for g in groups_data], [3, 3, 2, 1], 'Incorrect ordering of the list')
|
||||
|
||||
# group on inherited char field, multiple orders with directions
|
||||
groups_data = res_users._read_group(domain, fields=['name', 'color'], groupby='name', orderby='color DESC, name')
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['name'] for g in groups_data], ['Eve', 'Nab', 'Bob', 'Alice'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['name_count'] for g in groups_data], [1, 2, 1, 2], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) -> Year-Month, default ordering
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'date'], groupby=['date'])
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date'] for g in groups_data], ['January 2014', 'September 2014', 'January 2015', 'March 2015'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [1, 1, 1, 3], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) specifying the :year -> Year default ordering
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'date'], groupby=['date:year'])
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date:year'] for g in groups_data], ['2014', '2015'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
|
||||
# group on inherited date column (res_partner.date) -> Year-Month, custom order
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'date'], groupby=['date'], orderby='date DESC')
|
||||
self.assertEqual(len(groups_data), 4, "Incorrect number of results when grouping on a field")
|
||||
self.assertEqual([g['date'] for g in groups_data], ['March 2015', 'January 2015', 'September 2014', 'January 2014'], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['date_count'] for g in groups_data], [3, 1, 1, 1], 'Incorrect number of results')
|
||||
|
||||
# group on inherited many2one (res_partner.title), default order
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'title'], groupby=['title'])
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
# here the order of the titles is by ID
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_sir.id, 'Sir...'), (title_lady.id, 'Lady...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-1, 10], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), reversed natural order
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby="title desc")
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
# here the order of the titles is by ID DESC
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_sir.id, 'Sir...'), (title_lady.id, 'Lady...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-1, 10], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), multiple orders with m2o in second position
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby="color desc, title desc")
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_lady.id, 'Lady...'), (title_sir.id, 'Sir...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [4, 2], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [10, -1], 'Incorrect aggregation of int column')
|
||||
|
||||
# group on inherited many2one (res_partner.title), ordered by other inherited field (color)
|
||||
groups_data = res_users._read_group(domain, fields=['function', 'color', 'title'], groupby=['title'], orderby='color')
|
||||
self.assertEqual(len(groups_data), 2, "Incorrect number of results when grouping on a field")
|
||||
# m2o is returned as a (id, label) pair
|
||||
self.assertEqual([g['title'] for g in groups_data], [(title_sir.id, 'Sir...'), (title_lady.id, 'Lady...')], 'Incorrect ordering of the list')
|
||||
self.assertEqual([g['title_count'] for g in groups_data], [2, 4], 'Incorrect number of results')
|
||||
self.assertEqual([g['color'] for g in groups_data], [-1, 10], 'Incorrect aggregation of int column')
|
||||
|
||||
def test_70_archive_internal_partners(self):
|
||||
test_partner = self.env['res.partner'].create({'name':'test partner'})
|
||||
test_user = self.env['res.users'].create({
|
||||
'login': 'test@odoo.com',
|
||||
'partner_id': test_partner.id,
|
||||
})
|
||||
# Cannot archive the partner
|
||||
with self.assertRaises(RedirectWarning):
|
||||
test_partner.with_user(self.env.ref('base.user_admin')).toggle_active()
|
||||
with self.assertRaises(ValidationError):
|
||||
test_partner.with_user(self.user_demo).toggle_active()
|
||||
|
||||
# Can archive the user but the partner stays active
|
||||
test_user.toggle_active()
|
||||
self.assertTrue(test_partner.active, 'Parter related to user should remain active')
|
||||
|
||||
# Now we can archive the partner
|
||||
test_partner.toggle_active()
|
||||
|
||||
# Activate the user should reactivate the partner
|
||||
test_user.toggle_active()
|
||||
self.assertTrue(test_partner.active, 'Activating user must active related partner')
|
||||
|
||||
def test_display_name_translation(self):
|
||||
self.env['res.lang']._activate_lang('fr_FR')
|
||||
self.env.ref('base.module_base')._update_translations(['fr_FR'])
|
||||
|
||||
res_partner = self.env['res.partner']
|
||||
|
||||
parent_contact = res_partner.create({
|
||||
'name': 'Parent',
|
||||
'type': 'contact',
|
||||
})
|
||||
|
||||
child_contact = res_partner.create({
|
||||
'type': 'other',
|
||||
'parent_id': parent_contact.id,
|
||||
})
|
||||
|
||||
self.assertEqual(child_contact.display_name, 'Parent, Other Address')
|
||||
|
||||
self.assertEqual(child_contact.with_context(lang='en_US').translated_display_name, 'Parent, Other Address')
|
||||
|
||||
self.assertEqual(child_contact.with_context(lang='fr_FR').translated_display_name, 'Parent, Autre adresse')
|
||||
|
||||
class TestPartnerRecursion(TransactionCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPartnerRecursion,self).setUp()
|
||||
res_partner = self.env['res.partner']
|
||||
self.p1 = res_partner.browse(res_partner.name_create('Elmtree')[0])
|
||||
self.p2 = res_partner.create({'name': 'Elmtree Child 1', 'parent_id': self.p1.id})
|
||||
self.p3 = res_partner.create({'name': 'Elmtree Grand-Child 1.1', 'parent_id': self.p2.id})
|
||||
|
||||
def test_100_res_partner_recursion(self):
|
||||
self.assertTrue(self.p3._check_recursion())
|
||||
self.assertTrue((self.p1 + self.p2 + self.p3)._check_recursion())
|
||||
|
||||
# split 101, 102, 103 tests to force SQL rollback between them
|
||||
|
||||
def test_101_res_partner_recursion(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
self.p1.write({'parent_id': self.p3.id})
|
||||
|
||||
def test_102_res_partner_recursion(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
self.p2.write({'parent_id': self.p3.id})
|
||||
|
||||
def test_103_res_partner_recursion(self):
|
||||
with self.assertRaises(ValidationError):
|
||||
self.p3.write({'parent_id': self.p3.id})
|
||||
|
||||
def test_104_res_partner_recursion_indirect_cycle(self):
|
||||
""" Indirect hacky write to create cycle in children """
|
||||
p3b = self.p1.create({'name': 'Elmtree Grand-Child 1.2', 'parent_id': self.p2.id})
|
||||
with self.assertRaises(ValidationError):
|
||||
self.p2.write({'child_ids': [Command.update(self.p3.id, {'parent_id': p3b.id}),
|
||||
Command.update(p3b.id, {'parent_id': self.p3.id})]})
|
||||
|
||||
def test_110_res_partner_recursion_multi_update(self):
|
||||
""" multi-write on several partners in same hierarchy must not trigger a false cycle detection """
|
||||
ps = self.p1 + self.p2 + self.p3
|
||||
self.assertTrue(ps.write({'phone': '123456'}))
|
||||
|
||||
def test_111_res_partner_recursion_infinite_loop(self):
|
||||
""" The recursion check must not loop forever """
|
||||
self.p2.parent_id = False
|
||||
self.p3.parent_id = False
|
||||
self.p1.parent_id = self.p2
|
||||
with self.assertRaises(ValidationError):
|
||||
(self.p3|self.p2).write({'parent_id': self.p1.id})
|
||||
|
||||
|
||||
class TestParentStore(TransactionCase):
|
||||
""" Verify that parent_store computation is done right """
|
||||
|
||||
def setUp(self):
|
||||
super(TestParentStore, self).setUp()
|
||||
|
||||
# force res_partner_category.copy() to copy children
|
||||
category = self.env['res.partner.category']
|
||||
self.patch(category._fields['child_ids'], 'copy', True)
|
||||
|
||||
# setup categories
|
||||
self.root = category.create({'name': 'Root category'})
|
||||
self.cat0 = category.create({'name': 'Parent category', 'parent_id': self.root.id})
|
||||
self.cat1 = category.create({'name': 'Child 1', 'parent_id': self.cat0.id})
|
||||
self.cat2 = category.create({'name': 'Child 2', 'parent_id': self.cat0.id})
|
||||
self.cat21 = category.create({'name': 'Child 2-1', 'parent_id': self.cat2.id})
|
||||
|
||||
def test_duplicate_parent(self):
|
||||
""" Duplicate the parent category and verify that the children have been duplicated too """
|
||||
new_cat0 = self.cat0.copy()
|
||||
new_struct = new_cat0.search([('parent_id', 'child_of', new_cat0.id)])
|
||||
self.assertEqual(len(new_struct), 4, "After duplication, the new object must have the childs records")
|
||||
old_struct = new_cat0.search([('parent_id', 'child_of', self.cat0.id)])
|
||||
self.assertEqual(len(old_struct), 4, "After duplication, previous record must have old childs records only")
|
||||
self.assertFalse(new_struct & old_struct, "After duplication, nodes should not be mixed")
|
||||
|
||||
def test_duplicate_children_01(self):
|
||||
""" Duplicate the children then reassign them to the new parent (1st method). """
|
||||
new_cat1 = self.cat1.copy()
|
||||
new_cat2 = self.cat2.copy()
|
||||
new_cat0 = self.cat0.copy({'child_ids': []})
|
||||
(new_cat1 + new_cat2).write({'parent_id': new_cat0.id})
|
||||
new_struct = new_cat0.search([('parent_id', 'child_of', new_cat0.id)])
|
||||
self.assertEqual(len(new_struct), 4, "After duplication, the new object must have the childs records")
|
||||
old_struct = new_cat0.search([('parent_id', 'child_of', self.cat0.id)])
|
||||
self.assertEqual(len(old_struct), 4, "After duplication, previous record must have old childs records only")
|
||||
self.assertFalse(new_struct & old_struct, "After duplication, nodes should not be mixed")
|
||||
|
||||
def test_duplicate_children_02(self):
|
||||
""" Duplicate the children then reassign them to the new parent (2nd method). """
|
||||
new_cat1 = self.cat1.copy()
|
||||
new_cat2 = self.cat2.copy()
|
||||
new_cat0 = self.cat0.copy({'child_ids': [Command.set((new_cat1 + new_cat2).ids)]})
|
||||
new_struct = new_cat0.search([('parent_id', 'child_of', new_cat0.id)])
|
||||
self.assertEqual(len(new_struct), 4, "After duplication, the new object must have the childs records")
|
||||
old_struct = new_cat0.search([('parent_id', 'child_of', self.cat0.id)])
|
||||
self.assertEqual(len(old_struct), 4, "After duplication, previous record must have old childs records only")
|
||||
self.assertFalse(new_struct & old_struct, "After duplication, nodes should not be mixed")
|
||||
|
||||
def test_duplicate_children_03(self):
|
||||
""" Duplicate the children then reassign them to the new parent (3rd method). """
|
||||
new_cat1 = self.cat1.copy()
|
||||
new_cat2 = self.cat2.copy()
|
||||
new_cat0 = self.cat0.copy({'child_ids': []})
|
||||
new_cat0.write({'child_ids': [Command.link(new_cat1.id), Command.link(new_cat2.id)]})
|
||||
new_struct = new_cat0.search([('parent_id', 'child_of', new_cat0.id)])
|
||||
self.assertEqual(len(new_struct), 4, "After duplication, the new object must have the childs records")
|
||||
old_struct = new_cat0.search([('parent_id', 'child_of', self.cat0.id)])
|
||||
self.assertEqual(len(old_struct), 4, "After duplication, previous record must have old childs records only")
|
||||
self.assertFalse(new_struct & old_struct, "After duplication, nodes should not be mixed")
|
||||
|
||||
|
||||
class TestGroups(TransactionCase):
|
||||
|
||||
def test_res_groups_fullname_search(self):
|
||||
all_groups = self.env['res.groups'].search([])
|
||||
|
||||
groups = all_groups.search([('full_name', 'like', '%Sale%')])
|
||||
self.assertItemsEqual(groups.ids, [g.id for g in all_groups if 'Sale' in g.full_name],
|
||||
"did not match search for 'Sale'")
|
||||
|
||||
groups = all_groups.search([('full_name', 'like', '%Technical%')])
|
||||
self.assertItemsEqual(groups.ids, [g.id for g in all_groups if 'Technical' in g.full_name],
|
||||
"did not match search for 'Technical'")
|
||||
|
||||
groups = all_groups.search([('full_name', 'like', '%Sales /%')])
|
||||
self.assertItemsEqual(groups.ids, [g.id for g in all_groups if 'Sales /' in g.full_name],
|
||||
"did not match search for 'Sales /'")
|
||||
|
||||
groups = all_groups.search([('full_name', 'in', ['Administration / Access Rights','Contact Creation'])])
|
||||
self.assertTrue(groups, "did not match search for 'Administration / Access Rights' and 'Contact Creation'")
|
||||
|
||||
def test_res_group_recursion(self):
|
||||
# four groups with no cycle, check them all together
|
||||
a = self.env['res.groups'].create({'name': 'A'})
|
||||
b = self.env['res.groups'].create({'name': 'B'})
|
||||
c = self.env['res.groups'].create({'name': 'G', 'implied_ids': [Command.set((a + b).ids)]})
|
||||
d = self.env['res.groups'].create({'name': 'D', 'implied_ids': [Command.set(c.ids)]})
|
||||
self.assertTrue((a + b + c + d)._check_m2m_recursion('implied_ids'))
|
||||
|
||||
# create a cycle and check
|
||||
a.implied_ids = d
|
||||
self.assertFalse(a._check_m2m_recursion('implied_ids'))
|
||||
|
||||
def test_res_group_copy(self):
|
||||
a = self.env['res.groups'].with_context(lang='en_US').create({'name': 'A'})
|
||||
b = a.copy()
|
||||
self.assertFalse(a.name == b.name)
|
||||
|
||||
def test_apply_groups(self):
|
||||
a = self.env['res.groups'].create({'name': 'A'})
|
||||
b = self.env['res.groups'].create({'name': 'B'})
|
||||
c = self.env['res.groups'].create({'name': 'C', 'implied_ids': [Command.set(a.ids)]})
|
||||
|
||||
# C already implies A, we want both B+C to imply A
|
||||
(b + c)._apply_group(a)
|
||||
|
||||
self.assertIn(a, b.implied_ids)
|
||||
self.assertIn(a, c.implied_ids)
|
||||
|
||||
def test_remove_groups(self):
|
||||
u1 = self.env['res.users'].create({'login': 'u1', 'name': 'U1'})
|
||||
u2 = self.env['res.users'].create({'login': 'u2', 'name': 'U2'})
|
||||
default = self.env.ref('base.default_user')
|
||||
portal = self.env.ref('base.group_portal')
|
||||
p = self.env['res.users'].create({'login': 'p', 'name': 'P', 'groups_id': [Command.set([portal.id])]})
|
||||
|
||||
a = self.env['res.groups'].create({'name': 'A', 'users': [Command.set(u1.ids)]})
|
||||
b = self.env['res.groups'].create({'name': 'B', 'users': [Command.set(u1.ids)]})
|
||||
c = self.env['res.groups'].create({'name': 'C', 'implied_ids': [Command.set(a.ids)], 'users': [Command.set([p.id, u2.id, default.id])]})
|
||||
d = self.env['res.groups'].create({'name': 'D', 'implied_ids': [Command.set(a.ids)], 'users': [Command.set([u2.id, default.id])]})
|
||||
|
||||
def assertUsersEqual(users, group):
|
||||
self.assertEqual(
|
||||
sorted([r.login for r in users]),
|
||||
sorted([r.login for r in group.with_context(active_test=False).users])
|
||||
)
|
||||
# sanity checks
|
||||
assertUsersEqual([u1, u2, p, default], a)
|
||||
assertUsersEqual([u1], b)
|
||||
assertUsersEqual([u2, p, default], c)
|
||||
assertUsersEqual([u2, default], d)
|
||||
|
||||
# C already implies A, we want none of B+C to imply A
|
||||
(b + c)._remove_group(a)
|
||||
|
||||
self.assertNotIn(a, b.implied_ids)
|
||||
self.assertNotIn(a, c.implied_ids)
|
||||
self.assertIn(a, d.implied_ids)
|
||||
|
||||
# - Since B didn't imply A, removing A from the implied groups of (B+C)
|
||||
# should not remove user U1 from A, even though C implied A, since C does
|
||||
# not have U1 as a user
|
||||
# - P should be removed as was only added via inheritance to C
|
||||
# - U2 should not be removed from A since it is implied via C but also via D
|
||||
assertUsersEqual([u1, u2, default], a)
|
||||
assertUsersEqual([u1], b)
|
||||
assertUsersEqual([u2, p, default], c)
|
||||
assertUsersEqual([u2, default], d)
|
||||
|
||||
# When adding the template user to a new group, it should add it to existing internal users
|
||||
e = self.env['res.groups'].create({'name': 'E'})
|
||||
default.write({'groups_id': [Command.link(e.id)]})
|
||||
self.assertIn(u1, e.users)
|
||||
self.assertIn(u2, e.users)
|
||||
self.assertIn(default, e.with_context(active_test=False).users)
|
||||
self.assertNotIn(p, e.users)
|
||||
|
||||
|
||||
class TestUsers(TransactionCase):
|
||||
def test_superuser(self):
|
||||
""" The superuser is inactive and must remain as such. """
|
||||
user = self.env['res.users'].browse(SUPERUSER_ID)
|
||||
self.assertFalse(user.active)
|
||||
with self.assertRaises(UserError):
|
||||
user.write({'active': True})
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests import common
|
||||
|
||||
|
||||
class TestSingleTransactionCase(common.SingleTransactionCase):
|
||||
"""
|
||||
Check the whole-class transaction behavior of SingleTransactionCase.
|
||||
"""
|
||||
|
||||
def test_00(self):
|
||||
""" Create a partner. """
|
||||
self.env['res.partner'].create({'name': 'test_per_class_teardown_partner'})
|
||||
partners = self.env['res.partner'].search([('name', '=', 'test_per_class_teardown_partner')])
|
||||
self.assertEqual(1, len(partners), "Test partner not found.")
|
||||
|
||||
def test_01(self):
|
||||
""" Find the created partner. """
|
||||
partners = self.env['res.partner'].search([('name', '=', 'test_per_class_teardown_partner')])
|
||||
self.assertEqual(1, len(partners), "Test partner not found.")
|
||||
|
||||
def test_20a(self):
|
||||
""" Create a partner with a XML ID """
|
||||
pid, _ = self.env['res.partner'].name_create('Mr Blue')
|
||||
self.env['ir.model.data'].create({'name': 'test_partner_blue',
|
||||
'module': 'base',
|
||||
'model': 'res.partner',
|
||||
'res_id': pid})
|
||||
|
||||
def test_20b(self):
|
||||
""" Resolve xml id with ref() and browse_ref() """
|
||||
xid = 'base.test_partner_blue'
|
||||
partner = self.env.ref(xid)
|
||||
pid = self.ref(xid)
|
||||
self.assertTrue(pid, "ref() should resolve xid to database ID")
|
||||
self.assertEqual(pid, partner.id, "ref() is not consistent with env.ref()")
|
||||
partner2 = self.browse_ref(xid)
|
||||
self.assertEqual(partner, partner2, "browse_ref() should resolve xid to browse records")
|
||||
|
||||
|
||||
class TestTransactionCase(common.TransactionCase):
|
||||
"""
|
||||
Check the per-method transaction behavior of TransactionCase.
|
||||
"""
|
||||
|
||||
def test_00(self):
|
||||
""" Create a partner. """
|
||||
partners = self.env['res.partner'].search([('name', '=', 'test_per_class_teardown_partner')])
|
||||
self.assertEqual(0, len(partners), "Test partner found.")
|
||||
self.env['res.partner'].create({'name': 'test_per_class_teardown_partner'})
|
||||
partners = self.env['res.partner'].search([('name', '=', 'test_per_class_teardown_partner')])
|
||||
self.assertEqual(1, len(partners), "Test partner not found.")
|
||||
|
||||
def test_01(self):
|
||||
""" Don't find the created partner. """
|
||||
partners = self.env['res.partner'].search([('name', '=', 'test_per_class_teardown_partner')])
|
||||
self.assertEqual(0, len(partners), "Test partner found.")
|
||||
|
||||
def test_20a(self):
|
||||
""" Create a partner with a XML ID then resolve xml id with ref() and browse_ref() """
|
||||
pid, _ = self.env['res.partner'].name_create('Mr Yellow')
|
||||
self.env['ir.model.data'].create({'name': 'test_partner_yellow',
|
||||
'module': 'base',
|
||||
'model': 'res.partner',
|
||||
'res_id': pid})
|
||||
xid = 'base.test_partner_yellow'
|
||||
partner = self.env.ref(xid)
|
||||
pid = self.ref(xid)
|
||||
self.assertEqual(pid, partner.id, "ref() should resolve xid to database ID")
|
||||
partner2 = self.browse_ref(xid)
|
||||
self.assertEqual(partner, partner2, "browse_ref() should resolve xid to browse records")
|
||||
130
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_cache.py
Normal file
130
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_cache.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import psutil
|
||||
import unittest
|
||||
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
from odoo.exceptions import CacheMiss
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestRecordCache(TransactionCaseWithUserDemo):
|
||||
|
||||
def test_cache(self):
|
||||
""" Check the record cache object. """
|
||||
Model = self.env['res.partner']
|
||||
name = type(Model).name
|
||||
ref = type(Model).ref
|
||||
|
||||
cache = self.env.cache
|
||||
|
||||
def check1(record, field, value):
|
||||
# value is None means no value in cache
|
||||
self.assertEqual(cache.contains(record, field), value is not None)
|
||||
try:
|
||||
self.assertEqual(cache.get(record, field), value)
|
||||
self.assertIsNotNone(value)
|
||||
except CacheMiss:
|
||||
self.assertIsNone(value)
|
||||
self.assertEqual(field in cache.get_fields(record), value is not None)
|
||||
self.assertEqual(record in cache.get_records(record, field), value is not None)
|
||||
|
||||
def check(record, name_val, ref_val):
|
||||
""" check the values of fields 'name' and 'ref' on record. """
|
||||
check1(record, name, name_val)
|
||||
check1(record, ref, ref_val)
|
||||
|
||||
foo1, bar1 = Model.browse([1, 2])
|
||||
foo2, bar2 = Model.with_user(self.user_demo).browse([1, 2])
|
||||
self.assertNotEqual(foo1.env.uid, foo2.env.uid)
|
||||
|
||||
# cache is empty
|
||||
cache.invalidate()
|
||||
check(foo1, None, None)
|
||||
check(foo2, None, None)
|
||||
check(bar1, None, None)
|
||||
check(bar2, None, None)
|
||||
|
||||
self.assertCountEqual(cache.get_missing_ids(foo1 + bar1, name), [1, 2])
|
||||
self.assertCountEqual(cache.get_missing_ids(foo2 + bar2, name), [1, 2])
|
||||
|
||||
# set values in one environment only
|
||||
cache.set(foo1, name, 'FOO1_NAME')
|
||||
cache.set(foo1, ref, 'FOO1_REF')
|
||||
cache.set(bar1, name, 'BAR1_NAME')
|
||||
cache.set(bar1, ref, 'BAR1_REF')
|
||||
check(foo1, 'FOO1_NAME', 'FOO1_REF')
|
||||
check(foo2, 'FOO1_NAME', 'FOO1_REF')
|
||||
check(bar1, 'BAR1_NAME', 'BAR1_REF')
|
||||
check(bar2, 'BAR1_NAME', 'BAR1_REF')
|
||||
self.assertCountEqual(cache.get_missing_ids(foo1 + bar1, name), [])
|
||||
self.assertCountEqual(cache.get_missing_ids(foo2 + bar2, name), [])
|
||||
|
||||
# set values in both environments
|
||||
cache.set(foo2, name, 'FOO2_NAME')
|
||||
cache.set(foo2, ref, 'FOO2_REF')
|
||||
cache.set(bar2, name, 'BAR2_NAME')
|
||||
cache.set(bar2, ref, 'BAR2_REF')
|
||||
check(foo1, 'FOO2_NAME', 'FOO2_REF')
|
||||
check(foo2, 'FOO2_NAME', 'FOO2_REF')
|
||||
check(bar1, 'BAR2_NAME', 'BAR2_REF')
|
||||
check(bar2, 'BAR2_NAME', 'BAR2_REF')
|
||||
self.assertCountEqual(cache.get_missing_ids(foo1 + bar1, name), [])
|
||||
self.assertCountEqual(cache.get_missing_ids(foo2 + bar2, name), [])
|
||||
|
||||
# remove value in one environment
|
||||
cache.remove(foo1, name)
|
||||
check(foo1, None, 'FOO2_REF')
|
||||
check(foo2, None, 'FOO2_REF')
|
||||
check(bar1, 'BAR2_NAME', 'BAR2_REF')
|
||||
check(bar2, 'BAR2_NAME', 'BAR2_REF')
|
||||
self.assertCountEqual(cache.get_missing_ids(foo1 + bar1, name), [1])
|
||||
self.assertCountEqual(cache.get_missing_ids(foo2 + bar2, name), [1])
|
||||
|
||||
# partial invalidation
|
||||
cache.invalidate([(name, None), (ref, foo1.ids)])
|
||||
check(foo1, None, None)
|
||||
check(foo2, None, None)
|
||||
check(bar1, None, 'BAR2_REF')
|
||||
check(bar2, None, 'BAR2_REF')
|
||||
|
||||
# total invalidation
|
||||
cache.invalidate()
|
||||
check(foo1, None, None)
|
||||
check(foo2, None, None)
|
||||
check(bar1, None, None)
|
||||
check(bar2, None, None)
|
||||
|
||||
@unittest.skipIf(
|
||||
not(platform.system() == 'Linux' and platform.machine() == 'x86_64'),
|
||||
"This test only makes sense on 64-bit Linux-like systems",
|
||||
)
|
||||
def test_memory(self):
|
||||
""" Check memory consumption of the cache. """
|
||||
NB_RECORDS = 100000
|
||||
MAX_MEMORY = 100
|
||||
|
||||
cache = self.env.cache
|
||||
model = self.env['res.partner']
|
||||
records = [model.new() for index in range(NB_RECORDS)]
|
||||
|
||||
process = psutil.Process(os.getpid())
|
||||
rss0 = process.memory_info().rss
|
||||
|
||||
char_names = [
|
||||
'name', 'display_name', 'email', 'website', 'phone', 'mobile',
|
||||
'street', 'street2', 'city', 'zip', 'vat', 'ref',
|
||||
]
|
||||
for name in char_names:
|
||||
field = model._fields[name]
|
||||
for record in records:
|
||||
cache.set(record, field, 'test')
|
||||
|
||||
mem_usage = process.memory_info().rss - rss0
|
||||
self.assertLess(
|
||||
mem_usage, MAX_MEMORY * 1024 * 1024,
|
||||
"Caching %s records must take less than %sMB of memory" % (NB_RECORDS, MAX_MEMORY),
|
||||
)
|
||||
267
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_cloc.py
Normal file
267
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_cloc.py
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from odoo.tools import cloc
|
||||
from odoo.tests import TransactionCase, tagged
|
||||
|
||||
XML_TEST = """<!-- Comment -->
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<odoo>
|
||||
<node>Line</node>
|
||||
<!-- Comment -->
|
||||
<node>Line</node>
|
||||
<!-- Comment
|
||||
Multi
|
||||
Line -->
|
||||
<![CDATA[
|
||||
Line
|
||||
]]>
|
||||
<![CDATA[
|
||||
<!-- comment in CDATA -->
|
||||
cdata Line
|
||||
yes6]]>
|
||||
<![CDATA[<!-- not a comment-->]]>
|
||||
<![CDATA[<!-- not a comment
|
||||
but counted as is
|
||||
-->]]>
|
||||
<!-- <![CDATA[ This is a valid comment ]]> -->
|
||||
<!-- <![CDATA[ Multi line
|
||||
comment]]> -->
|
||||
<record id="my_id" model="model">
|
||||
<field name="name">name</field>
|
||||
</record>
|
||||
<![CDATA[ <!-- no a comment]]>
|
||||
<node>not a comment but found as is</node>
|
||||
<!-- comment -->
|
||||
<node>After closed comment back to normal</node>
|
||||
</odoo>
|
||||
"""
|
||||
|
||||
PY_TEST_NO_RETURN = '''line = 1
|
||||
line = 2'''
|
||||
|
||||
PY_TEST = '''
|
||||
# comment 1
|
||||
|
||||
def func(): # eol comment 3
|
||||
""" docstring
|
||||
"""
|
||||
pass
|
||||
|
||||
def query():
|
||||
long_query = """
|
||||
SELECT *
|
||||
FROM table
|
||||
WHERE id = 1;
|
||||
"""
|
||||
return query
|
||||
|
||||
print(i.lineno, i, getattr(i,'s',None), getattr(i,'value',None))
|
||||
'''
|
||||
|
||||
JS_TEST = r'''
|
||||
/*
|
||||
comment
|
||||
*/
|
||||
|
||||
function() {
|
||||
return 1+2; // comment
|
||||
}
|
||||
|
||||
function() {
|
||||
hello = 4; /*
|
||||
comment
|
||||
*/
|
||||
console.log(hello);
|
||||
regex = /\/*h/;
|
||||
legit_code_counted = 1;
|
||||
regex2 = /.*/;
|
||||
}
|
||||
'''
|
||||
|
||||
CSS_TEST = '''
|
||||
/*
|
||||
Comment
|
||||
*/
|
||||
|
||||
p {
|
||||
text-align: center;
|
||||
color: red;
|
||||
text-overflow: ' /* ';
|
||||
}
|
||||
|
||||
|
||||
#content, #footer, #supplement {
|
||||
position: absolute;
|
||||
left: 510px;
|
||||
width: 200px;
|
||||
text-overflow: ' */ ';
|
||||
}
|
||||
'''
|
||||
|
||||
SCSS_TEST = '''
|
||||
/*
|
||||
Comment
|
||||
*/
|
||||
|
||||
// Standalone list views
|
||||
.o_content > .o_list_view > .table-responsive > .table {
|
||||
// List views always have the table-sm class, maybe we should remove
|
||||
// it (and consider it does not exist) and change the default table paddings
|
||||
@include o-list-view-full-width-padding($base-x: $table-cell-padding-x-sm, $base-y: $table-cell-padding-y-sm, $ratio: 2);
|
||||
&:not(.o_list_table_grouped) {
|
||||
@include media-breakpoint-up(xl) {
|
||||
@include o-list-view-full-width-padding($base-x: $table-cell-padding-x-sm, $base-y: $table-cell-padding-y-sm, $ratio: 2.5);
|
||||
}
|
||||
}
|
||||
|
||||
.o_optional_columns_dropdown_toggle {
|
||||
padding: 8px 10px;
|
||||
}
|
||||
}
|
||||
|
||||
#content, #footer, #supplement {
|
||||
text-overflow: '/*';
|
||||
left: 510px;
|
||||
width: 200px;
|
||||
text-overflow: '*/';
|
||||
}
|
||||
'''
|
||||
|
||||
class TestClocCustomization(TransactionCase):
|
||||
def create_xml_id(self, module, name, rec):
|
||||
self.env['ir.model.data'].create({
|
||||
'name': name,
|
||||
'model': rec._name,
|
||||
'res_id': rec.id,
|
||||
'module': module,
|
||||
})
|
||||
|
||||
def create_field(self, name):
|
||||
field = self.env['ir.model.fields'].with_context(studio=True).create({
|
||||
'name': name,
|
||||
'field_description': name,
|
||||
'model': 'res.partner',
|
||||
'model_id': self.env.ref('base.model_res_partner').id,
|
||||
'ttype': 'integer',
|
||||
'store': False,
|
||||
'compute': "for rec in self: rec['x_invoice_count'] = 10",
|
||||
})
|
||||
# Simulate the effect of https://github.com/odoo/odoo/commit/9afce4805fc8bac45fdba817488aa867fddff69b
|
||||
# Updating a module create xml_id of the module even for manual field if it's the original module
|
||||
# of the model
|
||||
self.create_xml_id('base', name, field)
|
||||
return field
|
||||
|
||||
def create_server_action(self, name):
|
||||
return self.env['ir.actions.server'].create({
|
||||
'name': name,
|
||||
'code': """
|
||||
for rec in records:
|
||||
rec['name'] = test
|
||||
""",
|
||||
'state': 'code',
|
||||
'type': 'ir.actions.server',
|
||||
'model_id': self.env.ref('base.model_res_partner').id,
|
||||
})
|
||||
|
||||
def test_ignore_auto_generated_computed_field(self):
|
||||
"""
|
||||
Check that we count custom fields with no module or studio not auto generated
|
||||
Having an xml_id but no existing module is consider as not belonging to a module
|
||||
"""
|
||||
f1 = self.create_field('x_invoice_count')
|
||||
self.create_xml_id('studio_customization', 'invoice_count', f1)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 0, 'Studio auto generated count field should not be counted in cloc')
|
||||
f2 = self.create_field('x_studio_custom_field')
|
||||
self.create_xml_id('studio_customization', 'studio_custom', f2)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 1, 'Count other studio computed field')
|
||||
self.create_field('x_custom_field')
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 2, 'Count fields without xml_id')
|
||||
f4 = self.create_field('x_custom_field_export')
|
||||
self.create_xml_id('__export__', 'studio_custom', f4)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 3, 'Count fields with xml_id but without module')
|
||||
|
||||
def test_several_xml_id(self):
|
||||
sa = self.create_server_action("Test double xml_id")
|
||||
self.create_xml_id("__export__", "first", sa)
|
||||
self.create_xml_id("base", "second", sa)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 2, 'Count Should count SA with a non standard xml_id')
|
||||
self.create_xml_id("__import__", "third", sa)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 2, 'SA with several xml_id should be counted only once')
|
||||
|
||||
def test_cloc_exclude_xml_id(self):
|
||||
sa = self.create_server_action("Test double xml_id")
|
||||
self.create_xml_id("__cloc_exclude__", "sa_first", sa)
|
||||
self.create_xml_id("__upgrade__", "sa_second", sa)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 0, 'Should not count SA with cloc_exclude xml_id')
|
||||
|
||||
f1 = self.create_field('x_invoice_count')
|
||||
self.create_xml_id("__cloc_exclude__", "field_first", f1)
|
||||
self.create_xml_id("__upgrade__", "field_second", f1)
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 0, 'Should not count Field with cloc_exclude xml_id')
|
||||
|
||||
def test_field_no_xml_id(self):
|
||||
self.env['ir.model.fields'].create({
|
||||
'name': "x_no_xml_id",
|
||||
'field_description': "no_xml_id",
|
||||
'model': 'res.partner',
|
||||
'model_id': self.env.ref('base.model_res_partner').id,
|
||||
'ttype': 'integer',
|
||||
'store': False,
|
||||
'compute': "for rec in self: rec['x_invoice_count'] = 10",
|
||||
})
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 1, 'Should count field with no xml_id at all')
|
||||
|
||||
|
||||
class TestClocParser(TransactionCase):
|
||||
|
||||
def test_parser(self):
|
||||
cl = cloc.Cloc()
|
||||
xml_count = cl.parse_xml(XML_TEST)
|
||||
self.assertEqual(xml_count, (18, 31))
|
||||
py_count = cl.parse_py(PY_TEST_NO_RETURN)
|
||||
self.assertEqual(py_count, (2, 2))
|
||||
py_count = cl.parse_py(PY_TEST)
|
||||
if self._python_version >= (3, 8, 0):
|
||||
# Multi line str lineno return the begining of the str
|
||||
# in python 3.8, it result in a different count for
|
||||
# multi str used in expressions
|
||||
self.assertEqual(py_count, (7, 16))
|
||||
else:
|
||||
self.assertEqual(py_count, (8, 16))
|
||||
js_count = cl.parse_js(JS_TEST)
|
||||
self.assertEqual(js_count, (10, 17))
|
||||
css_count = cl.parse_css(CSS_TEST)
|
||||
self.assertEqual(css_count, (11, 17))
|
||||
scss_count = cl.parse_scss(SCSS_TEST)
|
||||
self.assertEqual(scss_count, (17, 26))
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install')
|
||||
class TestClocStdNoCusto(TransactionCase):
|
||||
|
||||
def test_no_custo_install(self):
|
||||
"""
|
||||
Make sure after the installation of module
|
||||
no database customization is counted
|
||||
"""
|
||||
cl = cloc.Cloc()
|
||||
cl.count_customization(self.env)
|
||||
self.assertEqual(cl.code.get('odoo/studio', 0), 0, 'Module should not generate customization in database')
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.addons.base.models.ir_config_parameter import _default_parameters
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestIrConfigParameter(TransactionCase):
|
||||
|
||||
def test_default_parameters(self):
|
||||
""" Check the behavior of _default_parameters
|
||||
when updating keys and deleting records. """
|
||||
for key in _default_parameters:
|
||||
config_parameter = self.env['ir.config_parameter'].search([('key', '=', key)], limit=1)
|
||||
with self.assertRaises(ValidationError):
|
||||
config_parameter.unlink()
|
||||
|
||||
new_key = f"{key}_updated"
|
||||
with self.assertRaises(ValidationError):
|
||||
config_parameter.write({'key': new_key})
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from datetime import date
|
||||
|
||||
from odoo.tests import BaseCase
|
||||
from odoo.tools.date_utils import get_fiscal_year
|
||||
|
||||
|
||||
class TestDateUtils(BaseCase):
|
||||
|
||||
def test_fiscal_year(self):
|
||||
self.assertEqual(get_fiscal_year(date(2024, 12, 31)), (date(2024, 1, 1), date(2024, 12, 31)))
|
||||
self.assertEqual(get_fiscal_year(date(2024, 12, 31), 30, 11), (date(2024, 12, 1), date(2025, 11, 30)))
|
||||
self.assertEqual(get_fiscal_year(date(2024, 10, 31), 30, 11), (date(2023, 12, 1), date(2024, 11, 30)))
|
||||
self.assertEqual(get_fiscal_year(date(2024, 10, 31), 30, 12), (date(2023, 12, 31), date(2024, 12, 30)))
|
||||
|
||||
self.assertEqual(get_fiscal_year(date(2024, 10, 31), month=11), (date(2023, 12, 1), date(2024, 11, 30)))
|
||||
self.assertEqual(get_fiscal_year(date(2024, 2, 29)), (date(2024, 1, 1), date(2024, 12, 31)))
|
||||
|
||||
self.assertEqual(get_fiscal_year(date(2024, 12, 31), 29, 2), (date(2024, 3, 1), date(2025, 2, 28)))
|
||||
self.assertEqual(get_fiscal_year(date(2024, 12, 31), 28, 2), (date(2024, 3, 1), date(2025, 2, 28)))
|
||||
self.assertEqual(get_fiscal_year(date(2023, 12, 31), 28, 2), (date(2023, 3, 1), date(2024, 2, 29)))
|
||||
self.assertEqual(get_fiscal_year(date(2023, 12, 31), 29, 2), (date(2023, 3, 1), date(2024, 2, 29)))
|
||||
|
||||
self.assertEqual(get_fiscal_year(date(2024, 2, 29), 28, 2), (date(2023, 3, 1), date(2024, 2, 29)))
|
||||
self.assertEqual(get_fiscal_year(date(2023, 2, 28), 28, 2), (date(2022, 3, 1), date(2023, 2, 28)))
|
||||
self.assertEqual(get_fiscal_year(date(2023, 2, 28), 29, 2), (date(2022, 3, 1), date(2023, 2, 28)))
|
||||
|
|
@ -0,0 +1,281 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.extensions import ISOLATION_LEVEL_REPEATABLE_READ
|
||||
|
||||
import odoo
|
||||
from odoo.sql_db import db_connect, TestCursor
|
||||
from odoo.tests import common
|
||||
from odoo.tests.common import BaseCase
|
||||
from odoo.tools.misc import config
|
||||
|
||||
ADMIN_USER_ID = common.ADMIN_USER_ID
|
||||
|
||||
def registry():
|
||||
return odoo.registry(common.get_db_name())
|
||||
|
||||
|
||||
class TestRealCursor(BaseCase):
|
||||
|
||||
def test_execute_bad_params(self):
|
||||
"""
|
||||
Try to use iterable but non-list or int params in query parameters.
|
||||
"""
|
||||
with registry().cursor() as cr:
|
||||
with self.assertRaises(ValueError):
|
||||
cr.execute("SELECT id FROM res_users WHERE login=%s", 'admin')
|
||||
with self.assertRaises(ValueError):
|
||||
cr.execute("SELECT id FROM res_users WHERE id=%s", 1)
|
||||
with self.assertRaises(ValueError):
|
||||
cr.execute("SELECT id FROM res_users WHERE id=%s", '1')
|
||||
|
||||
def test_using_closed_cursor(self):
|
||||
with registry().cursor() as cr:
|
||||
cr.close()
|
||||
with self.assertRaises(psycopg2.InterfaceError):
|
||||
cr.execute("SELECT 1")
|
||||
|
||||
def test_multiple_close_call_cursor(self):
|
||||
cr = registry().cursor()
|
||||
cr.close()
|
||||
cr.close()
|
||||
|
||||
def test_transaction_isolation_cursor(self):
|
||||
with registry().cursor() as cr:
|
||||
self.assertEqual(cr.connection.isolation_level, ISOLATION_LEVEL_REPEATABLE_READ)
|
||||
|
||||
class TestTestCursor(common.TransactionCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# make the registry in test mode
|
||||
self.registry.enter_test_mode(self.cr)
|
||||
self.addCleanup(self.registry.leave_test_mode)
|
||||
# now we make a test cursor for self.cr
|
||||
self.cr = self.registry.cursor()
|
||||
self.addCleanup(self.cr.close)
|
||||
self.env = odoo.api.Environment(self.cr, odoo.SUPERUSER_ID, {})
|
||||
self.record = self.env['res.partner'].create({'name': 'Foo'})
|
||||
|
||||
def write(self, record, value):
|
||||
record.ref = value
|
||||
|
||||
def flush(self, record):
|
||||
record.flush_model(['ref'])
|
||||
|
||||
def check(self, record, value):
|
||||
# make sure to fetch the field from the database
|
||||
record.invalidate_recordset()
|
||||
self.assertEqual(record.read(['ref'])[0]['ref'], value)
|
||||
|
||||
def test_single_cursor(self):
|
||||
""" Check the behavior of a single test cursor. """
|
||||
self.assertIsInstance(self.cr, TestCursor)
|
||||
self.write(self.record, 'A')
|
||||
self.cr.commit()
|
||||
|
||||
self.write(self.record, 'B')
|
||||
self.cr.rollback()
|
||||
self.check(self.record, 'A')
|
||||
|
||||
self.write(self.record, 'C')
|
||||
self.cr.rollback()
|
||||
self.check(self.record, 'A')
|
||||
|
||||
def test_sub_commit(self):
|
||||
""" Check the behavior of a subcursor that commits. """
|
||||
self.assertIsInstance(self.cr, TestCursor)
|
||||
self.write(self.record, 'A')
|
||||
self.cr.commit()
|
||||
|
||||
self.write(self.record, 'B')
|
||||
self.flush(self.record)
|
||||
|
||||
# check behavior of a "sub-cursor" that commits
|
||||
with self.registry.cursor() as cr:
|
||||
self.assertIsInstance(cr, TestCursor)
|
||||
record = self.record.with_env(self.env(cr=cr))
|
||||
self.check(record, 'B')
|
||||
self.write(record, 'C')
|
||||
|
||||
self.check(self.record, 'C')
|
||||
|
||||
self.cr.rollback()
|
||||
self.check(self.record, 'A')
|
||||
|
||||
def test_sub_rollback(self):
|
||||
""" Check the behavior of a subcursor that rollbacks. """
|
||||
self.assertIsInstance(self.cr, TestCursor)
|
||||
self.write(self.record, 'A')
|
||||
self.cr.commit()
|
||||
|
||||
self.write(self.record, 'B')
|
||||
self.flush(self.record)
|
||||
|
||||
# check behavior of a "sub-cursor" that rollbacks
|
||||
with self.assertRaises(ValueError):
|
||||
with self.registry.cursor() as cr:
|
||||
self.assertIsInstance(cr, TestCursor)
|
||||
record = self.record.with_env(self.env(cr=cr))
|
||||
self.check(record, 'B')
|
||||
self.write(record, 'C')
|
||||
raise ValueError(42)
|
||||
|
||||
self.check(self.record, 'B')
|
||||
|
||||
self.cr.rollback()
|
||||
self.check(self.record, 'A')
|
||||
|
||||
def test_interleaving(self):
|
||||
"""If test cursors are retrieved independently it becomes possible for
|
||||
the savepoint operations to be interleaved (especially as some are lazy
|
||||
e.g. the request cursor, so cursors might be semantically nested but
|
||||
technically interleaved), and for them to commit one another:
|
||||
|
||||
.. code-block:: sql
|
||||
|
||||
SAVEPOINT A
|
||||
SAVEPOINT B
|
||||
RELEASE SAVEPOINT A
|
||||
RELEASE SAVEPOINT B -- "savepoint b does not exist"
|
||||
"""
|
||||
a = self.registry.cursor()
|
||||
_b = self.registry.cursor()
|
||||
# `a` should warn that it found un-closed cursor `b` when trying to close itself
|
||||
with self.assertLogs('odoo.sql_db', level=logging.WARNING) as cm:
|
||||
a.close()
|
||||
[msg] = cm.output
|
||||
self.assertIn('WARNING:odoo.sql_db:Found different un-closed cursor', msg)
|
||||
# avoid a warning on teardown (when self.cr finds a still on the stack)
|
||||
# as well as ensure the stack matches our expectations
|
||||
self.assertEqual(a._cursors_stack.pop(), a)
|
||||
|
||||
def test_borrow_connection(self):
|
||||
"""Tests the behavior of the postgresql connection pool recycling/borrowing"""
|
||||
origin_db_port = config['db_port']
|
||||
if not origin_db_port and hasattr(self.env.cr._cnx, 'info'):
|
||||
# Check the edge case of the db port set,
|
||||
# which is set as an integer in our DSN/connection_info
|
||||
# but as string in the DSN of psycopg2
|
||||
# The connections must be recycled/borrowed when the db_port is set
|
||||
# e.g
|
||||
# `connection.dsn`
|
||||
# {'database': '14.0', 'port': 5432, 'sslmode': 'prefer'}
|
||||
# must match
|
||||
# `cr._cnx.dsn`
|
||||
# 'port=5432 sslmode=prefer dbname=14.0'
|
||||
config['db_port'] = self.env.cr._cnx.info.port
|
||||
|
||||
cursors = []
|
||||
try:
|
||||
connection = db_connect(self.cr.dbname)
|
||||
|
||||
# Case #1: 2 cursors, both opened/used, do not recycle/borrow.
|
||||
# The 2nd cursor must not use the connection of the 1st cursor as it's used (not closed).
|
||||
cursors.append(connection.cursor())
|
||||
cursors.append(connection.cursor())
|
||||
# Ensure the port is within psycopg's dsn, as explained in an above comment,
|
||||
# we want to test the behavior of the connections borrowing including the port provided in the dsn.
|
||||
if config['db_port']:
|
||||
self.assertTrue('port=' in cursors[0]._cnx.dsn)
|
||||
# Check the connection of the 1st cursor is different than the connection of the 2nd cursor.
|
||||
self.assertNotEqual(id(cursors[0]._cnx), id(cursors[1]._cnx))
|
||||
|
||||
# Case #2: Close 1st cursor, open 3rd cursor, must recycle/borrow.
|
||||
# The 3rd must recycle/borrow the connection of the 1st one.
|
||||
cursors[0].close()
|
||||
cursors.append(connection.cursor())
|
||||
# Check the connection of this 3rd cursor uses the connection of the 1st cursor that has been closed.
|
||||
self.assertEqual(id(cursors[0]._cnx), id(cursors[2]._cnx))
|
||||
|
||||
finally:
|
||||
# Cleanups:
|
||||
# - Close the cursors which have been left opened
|
||||
# - Reset the config `db_port`
|
||||
for cursor in cursors:
|
||||
if not cursor.closed:
|
||||
cursor.close()
|
||||
config['db_port'] = origin_db_port
|
||||
|
||||
|
||||
class TestCursorHooks(common.TransactionCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.log = []
|
||||
|
||||
def prepare_hooks(self, cr):
|
||||
self.log.clear()
|
||||
cr.precommit.add(partial(self.log.append, 'preC'))
|
||||
cr.postcommit.add(partial(self.log.append, 'postC'))
|
||||
cr.prerollback.add(partial(self.log.append, 'preR'))
|
||||
cr.postrollback.add(partial(self.log.append, 'postR'))
|
||||
self.assertEqual(self.log, [])
|
||||
|
||||
def test_hooks_on_cursor(self):
|
||||
cr = self.registry.cursor()
|
||||
|
||||
# check hook on commit()
|
||||
self.prepare_hooks(cr)
|
||||
cr.commit()
|
||||
self.assertEqual(self.log, ['preC', 'postC'])
|
||||
|
||||
# check hook on flush(), then on rollback()
|
||||
self.prepare_hooks(cr)
|
||||
cr.flush()
|
||||
self.assertEqual(self.log, ['preC'])
|
||||
cr.rollback()
|
||||
self.assertEqual(self.log, ['preC', 'preR', 'postR'])
|
||||
|
||||
# check hook on close()
|
||||
self.prepare_hooks(cr)
|
||||
cr.close()
|
||||
self.assertEqual(self.log, ['preR', 'postR'])
|
||||
|
||||
def test_hooks_on_testcursor(self):
|
||||
self.registry.enter_test_mode(self.cr)
|
||||
self.addCleanup(self.registry.leave_test_mode)
|
||||
|
||||
cr = self.registry.cursor()
|
||||
|
||||
# check hook on commit(); post-commit hooks are ignored
|
||||
self.prepare_hooks(cr)
|
||||
cr.commit()
|
||||
self.assertEqual(self.log, ['preC'])
|
||||
|
||||
# check hook on flush(), then on rollback()
|
||||
self.prepare_hooks(cr)
|
||||
cr.flush()
|
||||
self.assertEqual(self.log, ['preC'])
|
||||
cr.rollback()
|
||||
self.assertEqual(self.log, ['preC', 'preR', 'postR'])
|
||||
|
||||
# check hook on close()
|
||||
self.prepare_hooks(cr)
|
||||
cr.close()
|
||||
self.assertEqual(self.log, ['preR', 'postR'])
|
||||
|
||||
class TestCursorHooksTransactionCaseCleanup(common.TransactionCase):
|
||||
"""Check savepoint cases handle commit hooks properly."""
|
||||
def test_isolation_first(self):
|
||||
def mutate_second_test_ref():
|
||||
for name in ['precommit', 'postcommit', 'prerollback', 'postrollback']:
|
||||
del self.env.cr.precommit.data.get(f'test_cursor_hooks_savepoint_case_cleanup_test_second_{name}', [''])[0]
|
||||
self.env.cr.precommit.add(mutate_second_test_ref)
|
||||
|
||||
def test_isolation_second(self):
|
||||
references = [['not_empty']] * 4
|
||||
cr = self.env.cr
|
||||
commit_callbacks = [cr.precommit, cr.postcommit, cr.prerollback, cr.postrollback]
|
||||
callback_names = ['precommit', 'postcommit', 'prerollback', 'postrollback']
|
||||
|
||||
for callback_name, callbacks, reference in zip(callback_names, commit_callbacks, references):
|
||||
callbacks.data.setdefault(f"test_cursor_hooks_savepoint_case_cleanup_test_second_{callback_name}", reference)
|
||||
|
||||
for callback in commit_callbacks:
|
||||
callback.run()
|
||||
|
||||
for callback_name, reference in zip(callback_names, references):
|
||||
self.assertTrue(bool(reference), f"{callback_name} failed to clean up between transaction tests")
|
||||
self.assertTrue(reference[0] == 'not_empty', f"{callback_name} failed to clean up between transaction tests")
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import inspect
|
||||
|
||||
from odoo.tests.common import TransactionCase, tagged
|
||||
|
||||
DEPRECATED_MODEL_ATTRIBUTES = [
|
||||
'view_init',
|
||||
'_needaction',
|
||||
'_sql',
|
||||
'_execute_sql',
|
||||
]
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install', 'deprecation')
|
||||
class TestModelDeprecations(TransactionCase):
|
||||
|
||||
def test_model_attributes(self):
|
||||
for model_name, Model in self.registry.items():
|
||||
for attr in DEPRECATED_MODEL_ATTRIBUTES:
|
||||
with self.subTest(model=model_name, attr=attr):
|
||||
value = getattr(Model, attr, None)
|
||||
if value is None:
|
||||
continue
|
||||
msg = f"Deprecated method/attribute {model_name}.{attr}"
|
||||
module = inspect.getmodule(value)
|
||||
if module:
|
||||
msg += f" in {module}"
|
||||
self.fail(msg)
|
||||
1831
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_expression.py
Normal file
1831
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_expression.py
Normal file
File diff suppressed because it is too large
Load diff
249
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_float.py
Normal file
249
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_float.py
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from math import log10
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import float_compare, float_is_zero, float_repr, float_round, float_split, float_split_str
|
||||
|
||||
|
||||
class TestFloatPrecision(TransactionCase):
|
||||
""" Tests on float precision. """
|
||||
|
||||
def test_rounding_02(self):
|
||||
""" Test rounding methods with 2 digits. """
|
||||
currency = self.env.ref('base.EUR')
|
||||
|
||||
def try_round(amount, expected):
|
||||
digits = max(0, -int(log10(currency.rounding)))
|
||||
result = float_repr(currency.round(amount), precision_digits=digits)
|
||||
self.assertEqual(result, expected, 'Rounding error: got %s, expected %s' % (result, expected))
|
||||
|
||||
try_round(2.674,'2.67')
|
||||
try_round(2.675,'2.68') # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_round(-2.675,'-2.68') # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_round(0.001,'0.00')
|
||||
try_round(-0.001,'-0.00')
|
||||
try_round(0.0049,'0.00') # 0.0049 is closer to 0 than to 0.01, so should round down
|
||||
try_round(0.005,'0.01') # the rule is to round half away from zero
|
||||
try_round(-0.005,'-0.01') # the rule is to round half away from zero
|
||||
try_round(6.6 * 0.175, '1.16') # 6.6 * 0.175 is rounded to 1.15 with epsilon = 53
|
||||
try_round(-6.6 * 0.175, '-1.16')
|
||||
|
||||
def try_zero(amount, expected):
|
||||
self.assertEqual(currency.is_zero(amount), expected,
|
||||
"Rounding error: %s should be zero!" % amount)
|
||||
|
||||
try_zero(0.01, False)
|
||||
try_zero(-0.01, False)
|
||||
try_zero(0.001, True)
|
||||
try_zero(-0.001, True)
|
||||
try_zero(0.0046, True)
|
||||
try_zero(-0.0046, True)
|
||||
try_zero(2.68-2.675, False) # 2.68 - 2.675 = 0.005 -> rounds to 0.01
|
||||
try_zero(2.68-2.676, True) # 2.68 - 2.675 = 0.004 -> rounds to 0.0
|
||||
try_zero(2.676-2.68, True) # 2.675 - 2.68 = -0.004 -> rounds to -0.0
|
||||
try_zero(2.675-2.68, False) # 2.675 - 2.68 = -0.005 -> rounds to -0.01
|
||||
|
||||
def try_compare(amount1, amount2, expected):
|
||||
self.assertEqual(currency.compare_amounts(amount1, amount2), expected,
|
||||
"Rounding error, compare_amounts(%s,%s) should be %s" % (amount1, amount2, expected))
|
||||
|
||||
try_compare(0.001, 0.001, 0)
|
||||
try_compare(-0.001, -0.001, 0)
|
||||
try_compare(0.001, 0.002, 0)
|
||||
try_compare(-0.001, -0.002, 0)
|
||||
try_compare(2.675, 2.68, 0)
|
||||
try_compare(2.676, 2.68, 0)
|
||||
try_compare(-2.676, -2.68, 0)
|
||||
try_compare(2.674, 2.68, -1)
|
||||
try_compare(-2.674, -2.68, 1)
|
||||
try_compare(3, 2.68, 1)
|
||||
try_compare(-3, -2.68, -1)
|
||||
try_compare(0.01, 0, 1)
|
||||
try_compare(-0.01, 0, -1)
|
||||
|
||||
def test_rounding_03(self):
|
||||
""" Test rounding methods with 3 digits. """
|
||||
|
||||
def try_round(amount, expected, digits=3, method='HALF-UP'):
|
||||
value = float_round(amount, precision_digits=digits, rounding_method=method)
|
||||
result = float_repr(value, precision_digits=digits)
|
||||
self.assertEqual(result, expected, 'Rounding error: got %s, expected %s' % (result, expected))
|
||||
|
||||
try_round(2.6745, '2.675')
|
||||
try_round(-2.6745, '-2.675')
|
||||
try_round(2.6744, '2.674')
|
||||
try_round(-2.6744, '-2.674')
|
||||
try_round(0.0004, '0.000')
|
||||
try_round(-0.0004, '-0.000')
|
||||
try_round(357.4555, '357.456')
|
||||
try_round(-357.4555, '-357.456')
|
||||
try_round(457.4554, '457.455')
|
||||
try_round(-457.4554, '-457.455')
|
||||
|
||||
# Try some rounding value with rounding method UP instead of HALF-UP
|
||||
# We use 8.175 because when normalizing 8.175 with precision_digits=3 it gives
|
||||
# us 8175,0000000001234 as value, and if not handle correctly the rounding UP
|
||||
# value will be incorrect (should be 8,175 and not 8,176)
|
||||
try_round(8.175, '8.175', method='UP')
|
||||
try_round(8.1751, '8.176', method='UP')
|
||||
try_round(-8.175, '-8.175', method='UP')
|
||||
try_round(-8.1751, '-8.176', method='UP')
|
||||
try_round(-6.000, '-6.000', method='UP')
|
||||
try_round(1.8, '2', 0, method='UP')
|
||||
try_round(-1.8, '-2', 0, method='UP')
|
||||
|
||||
# Try some rounding value with rounding method DOWN instead of HALF-UP
|
||||
# We use 2.425 because when normalizing 2.425 with precision_digits=3 it gives
|
||||
# us 2424.9999999999995 as value, and if not handle correctly the rounding DOWN
|
||||
# value will be incorrect (should be 2.425 and not 2.424)
|
||||
try_round(2.425, '2.425', method='DOWN')
|
||||
try_round(2.4249, '2.424', method='DOWN')
|
||||
try_round(-2.425, '-2.425', method='DOWN')
|
||||
try_round(-2.4249, '-2.424', method='DOWN')
|
||||
try_round(-2.500, '-2.500', method='DOWN')
|
||||
try_round(1.8, '1', 0, method='DOWN')
|
||||
try_round(-1.8, '-1', 0, method='DOWN')
|
||||
|
||||
# Extended float range test, inspired by Cloves Almeida's test on bug #882036.
|
||||
fractions = [.0, .015, .01499, .675, .67499, .4555, .4555, .45555]
|
||||
expecteds = ['.00', '.02', '.01', '.68', '.67', '.46', '.456', '.4556']
|
||||
precisions = [2, 2, 2, 2, 2, 2, 3, 4]
|
||||
# Note: max precision for double floats is 53 bits of precision or
|
||||
# 17 significant decimal digits
|
||||
for magnitude in range(7):
|
||||
for frac, exp, prec in zip(fractions, expecteds, precisions):
|
||||
for sign in [-1,1]:
|
||||
for x in range(0, 10000, 97):
|
||||
n = x * 10 ** magnitude
|
||||
f = sign * (n + frac)
|
||||
f_exp = ('-' if f != 0 and sign == -1 else '') + str(n) + exp
|
||||
try_round(f, f_exp, digits=prec)
|
||||
|
||||
def try_zero(amount, expected):
|
||||
self.assertEqual(float_is_zero(amount, precision_digits=3), expected,
|
||||
"Rounding error: %s should be zero!" % amount)
|
||||
|
||||
try_zero(0.0002, True)
|
||||
try_zero(-0.0002, True)
|
||||
try_zero(0.00034, True)
|
||||
try_zero(0.0005, False)
|
||||
try_zero(-0.0005, False)
|
||||
try_zero(0.0008, False)
|
||||
try_zero(-0.0008, False)
|
||||
|
||||
def try_compare(amount1, amount2, expected):
|
||||
self.assertEqual(float_compare(amount1, amount2, precision_digits=3), expected,
|
||||
"Rounding error, compare_amounts(%s,%s) should be %s" % (amount1, amount2, expected))
|
||||
|
||||
try_compare(0.0003, 0.0004, 0)
|
||||
try_compare(-0.0003, -0.0004, 0)
|
||||
try_compare(0.0002, 0.0005, -1)
|
||||
try_compare(-0.0002, -0.0005, 1)
|
||||
try_compare(0.0009, 0.0004, 1)
|
||||
try_compare(-0.0009, -0.0004, -1)
|
||||
try_compare(557.4555, 557.4556, 0)
|
||||
try_compare(-557.4555, -557.4556, 0)
|
||||
try_compare(657.4444, 657.445, -1)
|
||||
try_compare(-657.4444, -657.445, 1)
|
||||
|
||||
# Rounding to unusual rounding units (e.g. coin values)
|
||||
def try_round(amount, expected, precision_rounding=None, method='HALF-UP'): # pylint: disable=function-redefined
|
||||
value = float_round(amount, precision_rounding=precision_rounding, rounding_method=method)
|
||||
result = float_repr(value, precision_digits=2)
|
||||
self.assertEqual(result, expected, 'Rounding error: got %s, expected %s' % (result, expected))
|
||||
|
||||
try_round(-457.4554, '-457.45', precision_rounding=0.05)
|
||||
try_round(457.444, '457.50', precision_rounding=0.5)
|
||||
try_round(457.3, '455.00', precision_rounding=5)
|
||||
try_round(457.5, '460.00', precision_rounding=5)
|
||||
try_round(457.1, '456.00', precision_rounding=3)
|
||||
try_round(2.5, '2.50', precision_rounding=0.05, method='DOWN')
|
||||
try_round(-2.5, '-2.50', precision_rounding=0.05, method='DOWN')
|
||||
|
||||
def test_rounding_04(self):
|
||||
""" check that proper rounding is performed for float persistence """
|
||||
currency = self.env.ref('base.EUR')
|
||||
currency_rate = self.env['res.currency.rate']
|
||||
|
||||
def try_roundtrip(value, expected, date):
|
||||
rate = currency_rate.create({'name': date,
|
||||
'rate': value,
|
||||
'currency_id': currency.id})
|
||||
self.assertEqual(rate.rate, expected,
|
||||
'Roundtrip error: got %s back from db, expected %s' % (rate, expected))
|
||||
|
||||
# res.currency.rate no more uses 6 digits of precision by default, it now uses whatever precision it gets
|
||||
try_roundtrip(10000.999999, 10000.999999, '2000-01-03')
|
||||
|
||||
#TODO re-enable those tests when tests are made on dedicated models
|
||||
# (res.currency.rate don't accept negative value anymore)
|
||||
#try_roundtrip(-2.6748955, -2.674896, '2000-01-02')
|
||||
#try_roundtrip(-10000.999999, -10000.999999, '2000-01-04')
|
||||
|
||||
def test_float_split_05(self):
|
||||
""" Test split method with 2 digits. """
|
||||
currency = self.env.ref('base.EUR')
|
||||
|
||||
def try_split(value, expected, split_fun, rounding=None):
|
||||
digits = max(0, -int(log10(currency.rounding))) if rounding is None else rounding
|
||||
result = split_fun(value, precision_digits=digits)
|
||||
self.assertEqual(result, expected, 'Split error: got %s, expected %s' % (result, expected))
|
||||
|
||||
try_split(2.674, ('2', '67'), float_split_str)
|
||||
try_split(2.675, ('2', '68'), float_split_str) # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_split(-2.675, ('-2', '68'), float_split_str) # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_split(0.001, ('0', '00'), float_split_str)
|
||||
try_split(-0.001, ('-0', '00'), float_split_str)
|
||||
try_split(42, ('42', '00'), float_split_str)
|
||||
try_split(0.1, ('0', '10'), float_split_str)
|
||||
try_split(13.0, ('13', ''), float_split_str, rounding=0)
|
||||
|
||||
try_split(2.674, (2, 67), float_split)
|
||||
try_split(2.675, (2, 68), float_split) # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_split(-2.675, (-2, 68), float_split) # in Python 2.7.2, round(2.675,2) gives 2.67
|
||||
try_split(0.001, (0, 0), float_split)
|
||||
try_split(-0.001, (0, 0), float_split)
|
||||
try_split(42, (42, 0), float_split)
|
||||
try_split(0.1, (0, 10), float_split)
|
||||
try_split(13.0, (13, 0), float_split, rounding=0)
|
||||
|
||||
|
||||
def test_rounding_invalid(self):
|
||||
""" verify that invalid parameters are forbidden """
|
||||
with self.assertRaises(AssertionError):
|
||||
float_is_zero(0.01, precision_digits=3, precision_rounding=0.01)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_is_zero(0.0, precision_rounding=0.0)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_is_zero(0.0, precision_rounding=-0.1)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_compare(0.01, 0.02, precision_digits=3, precision_rounding=0.01)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_compare(1.0, 1.0, precision_rounding=0.0)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_compare(1.0, 1.0, precision_rounding=-0.1)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_round(0.01, precision_digits=3, precision_rounding=0.01)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_round(1.25, precision_rounding=0.0)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
float_round(1.25, precision_rounding=-0.1)
|
||||
|
||||
def test_amount_to_text_10(self):
|
||||
""" verify that amount_to_text works as expected """
|
||||
currency = self.env.ref('base.EUR')
|
||||
|
||||
amount_target = currency.amount_to_text(0.29)
|
||||
amount_test = currency.amount_to_text(0.28)
|
||||
self.assertNotEqual(amount_test, amount_target,
|
||||
"Amount in text should not depend on float representation")
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests import TransactionCase, tagged, Form
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestFormCreate(TransactionCase):
|
||||
"""
|
||||
Test that the basic Odoo models records can be created on
|
||||
the interface.
|
||||
"""
|
||||
|
||||
def test_create_res_partner(self):
|
||||
# YTI: Clean that brol
|
||||
if hasattr(self.env['res.partner'], 'property_account_payable_id'):
|
||||
# Required for `property_account_payable_id`, `property_account_receivable_id` to be visible in the view
|
||||
# By default, it's the `group` `group_account_readonly` which is required to see it, in the `account` module
|
||||
# But once `account_accountant` gets installed, it becomes `account.group_account_manager`
|
||||
# https://github.com/odoo/enterprise/blob/bfa643278028da0bfabded2f87ccb7e323d697c1/account_accountant/views/product_views.xml#L9
|
||||
self.env.user.groups_id += self.env.ref('account.group_account_readonly')
|
||||
self.env.user.groups_id += self.env.ref('account.group_account_manager')
|
||||
partner_form = Form(self.env['res.partner'])
|
||||
partner_form.name = 'a partner'
|
||||
# YTI: Clean that brol
|
||||
if hasattr(self.env['res.partner'], 'property_account_payable_id'):
|
||||
property_account_payable_id = self.env['account.account'].create({
|
||||
'name': 'Test Account',
|
||||
'account_type': 'liability_payable',
|
||||
'code': 'TestAccountPayable',
|
||||
'reconcile': True
|
||||
})
|
||||
property_account_receivable_id = self.env['account.account'].create({
|
||||
'name': 'Test Account',
|
||||
'account_type': 'asset_receivable',
|
||||
'code': 'TestAccountReceivable',
|
||||
'reconcile': True
|
||||
})
|
||||
partner_form.property_account_payable_id = property_account_payable_id
|
||||
partner_form.property_account_receivable_id = property_account_receivable_id
|
||||
partner_form.save()
|
||||
|
||||
def test_create_res_users(self):
|
||||
user_form = Form(self.env['res.users'])
|
||||
user_form.login = 'a user login'
|
||||
user_form.name = 'a user name'
|
||||
user_form.save()
|
||||
|
||||
def test_create_res_company(self):
|
||||
company_form = Form(self.env['res.company'])
|
||||
company_form.name = 'a company'
|
||||
company_form.save()
|
||||
|
||||
def test_create_res_group(self):
|
||||
group_form = Form(self.env['res.groups'])
|
||||
group_form.name = 'a group'
|
||||
group_form.save()
|
||||
|
||||
def test_create_res_bank(self):
|
||||
bank_form = Form(self.env['res.bank'])
|
||||
bank_form.name = 'a bank'
|
||||
bank_form.save()
|
||||
|
||||
def test_create_res_country(self):
|
||||
country_form = Form(self.env['res.country'])
|
||||
country_form.name = 'a country'
|
||||
country_form.code = 'AA'
|
||||
country_form.save()
|
||||
|
||||
def test_create_res_lang(self):
|
||||
lang_form = Form(self.env['res.lang'])
|
||||
lang_form.name = 'a lang name'
|
||||
lang_form.code = 'a lang code'
|
||||
lang_form.save()
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
from odoo.addons.base.tests.test_views import ViewCase
|
||||
|
||||
|
||||
class FormatAddressCase(ViewCase):
|
||||
def assertAddressView(self, model):
|
||||
# pe_partner_address_form
|
||||
address_arch = """<form><div class="o_address_format"><field name="city"/></div></form>"""
|
||||
address_view = self.View.create({
|
||||
'name': 'view',
|
||||
'model': model,
|
||||
'arch': address_arch,
|
||||
'priority': 900,
|
||||
})
|
||||
|
||||
# view can be created without address_view
|
||||
form_arch = """<form><field name="id"/><div class="o_address_format"><field name="street"/></div></form>"""
|
||||
view = self.View.create({
|
||||
'name': 'view',
|
||||
'model': model,
|
||||
'arch': form_arch,
|
||||
})
|
||||
|
||||
# default view, no address_view defined
|
||||
arch = self.env[model].get_view(view.id)['arch']
|
||||
self.assertIn('"street"', arch)
|
||||
self.assertNotIn('"city"', arch)
|
||||
|
||||
# custom view, address_view defined
|
||||
self.env.company.country_id.address_view_id = address_view
|
||||
arch = self.env[model].get_view(view.id)['arch']
|
||||
self.assertNotIn('"street"', arch)
|
||||
self.assertIn('"city"', arch)
|
||||
self.assertRegex(arch, r'<form>.*<div class="o_address_format">.*</div>.*</form>')
|
||||
# no_address_format context
|
||||
arch = self.env[model].with_context(no_address_format=True).get_view(view.id)['arch']
|
||||
self.assertIn('"street"', arch)
|
||||
self.assertNotIn('"city"', arch)
|
||||
|
||||
belgium = self.env.ref('base.be')
|
||||
france = self.env.ref('base.fr')
|
||||
|
||||
belgium.address_view_id = None
|
||||
france.address_view_id = address_view
|
||||
|
||||
company_a, company_b = self.env['res.company'].create([
|
||||
{'name': 'foo', 'country_id': belgium.id},
|
||||
{'name': 'bar', 'country_id': france.id},
|
||||
])
|
||||
|
||||
arch = self.env[model].with_company(company_a).get_view(view.id)['arch']
|
||||
self.assertIn('"street"', arch)
|
||||
self.assertNotIn('"city"', arch)
|
||||
|
||||
arch = self.env[model].with_company(company_b).get_view(view.id)['arch']
|
||||
self.assertNotIn('"street"', arch)
|
||||
self.assertIn('"city"', arch)
|
||||
|
||||
|
||||
class TestPartnerFormatAddress(FormatAddressCase):
|
||||
def test_address_view(self):
|
||||
self.env.company.country_id = self.env.ref('base.us')
|
||||
self.assertAddressView('res.partner')
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import functools
|
||||
|
||||
from odoo.tests.common import BaseCase
|
||||
from odoo.tools import frozendict, lazy
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class TestFrozendict(BaseCase):
|
||||
def test_frozendict_immutable(self):
|
||||
""" Ensure that a frozendict is immutable. """
|
||||
vals = {'name': 'Joe', 'age': 42}
|
||||
frozen_vals = frozendict(vals)
|
||||
|
||||
# check __setitem__, __delitem__
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals['surname'] = 'Jack'
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals['name'] = 'Jack'
|
||||
with self.assertRaises(Exception):
|
||||
del frozen_vals['name']
|
||||
|
||||
# check update, setdefault, pop, popitem, clear
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.update({'surname': 'Jack'})
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.update({'name': 'Jack'})
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.setdefault('surname', 'Jack')
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.pop('surname', 'Jack')
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.pop('name', 'Jack')
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.popitem()
|
||||
with self.assertRaises(Exception):
|
||||
frozen_vals.clear()
|
||||
|
||||
def test_frozendict_hash(self):
|
||||
""" Ensure that a frozendict is hashable. """
|
||||
# dict with simple values
|
||||
hash(frozendict({'name': 'Joe', 'age': 42}))
|
||||
|
||||
# dict with tuples, lists, and embedded dicts
|
||||
hash(frozendict({
|
||||
'user_id': (42, 'Joe'),
|
||||
'line_ids': [Command.create({'values': [42]})],
|
||||
}))
|
||||
|
||||
|
||||
class TestLazy(BaseCase):
|
||||
def test_lazy_compare(self):
|
||||
""" Ensure that a lazy can be compared with an other lazy. """
|
||||
self.assertEqual(lazy(lambda: 1) <= lazy(lambda: 42), True)
|
||||
self.assertEqual(lazy(lambda: 42) <= lazy(lambda: 1), False)
|
||||
self.assertEqual(lazy(lambda: 42) == lazy(lambda: 42), True)
|
||||
self.assertEqual(lazy(lambda: 1) == lazy(lambda: 42), False)
|
||||
self.assertEqual(lazy(lambda: 42) != lazy(lambda: 42), False)
|
||||
self.assertEqual(lazy(lambda: 1) != lazy(lambda: 42), True)
|
||||
|
||||
# Object like recordset implement __eq__
|
||||
class Obj:
|
||||
def __init__(self, num):
|
||||
self.num = num
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Obj):
|
||||
return self.num == other.num
|
||||
raise ValueError('Object does not have the correct type')
|
||||
|
||||
self.assertEqual(lazy(lambda: Obj(42)) == lazy(lambda: Obj(42)), True)
|
||||
self.assertEqual(lazy(lambda: Obj(1)) == lazy(lambda: Obj(42)), False)
|
||||
self.assertEqual(lazy(lambda: Obj(42)) != lazy(lambda: Obj(42)), False)
|
||||
self.assertEqual(lazy(lambda: Obj(1)) != lazy(lambda: Obj(42)), True)
|
||||
|
|
@ -0,0 +1,155 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import threading
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo.http import Controller, request, route
|
||||
from odoo.tests.common import ChromeBrowser, HttpCase, tagged
|
||||
from odoo.tools import config, logging
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestHttpCase(HttpCase):
|
||||
|
||||
def test_console_error_string(self):
|
||||
with self.assertLogs(level='ERROR') as log_catcher:
|
||||
with self.assertRaises(AssertionError) as error_catcher:
|
||||
code = "console.error('test error','message')"
|
||||
with patch('odoo.tests.common.ChromeBrowser.take_screenshot', return_value=None):
|
||||
self.browser_js(url_path='about:blank', code=code)
|
||||
# second line must contains error message
|
||||
self.assertEqual(error_catcher.exception.args[0].splitlines()[-1], "test error message")
|
||||
self.assertEqual(len(log_catcher.output), 1)
|
||||
self.assertIn('test error message', log_catcher.output[0])
|
||||
|
||||
def test_console_error_object(self):
|
||||
with self.assertLogs(level='ERROR') as log_catcher:
|
||||
with self.assertRaises(AssertionError) as error_catcher:
|
||||
code = "console.error(TypeError('test error message'))"
|
||||
with patch('odoo.tests.common.ChromeBrowser.take_screenshot', return_value=None):
|
||||
self.browser_js(url_path='about:blank', code=code)
|
||||
# second line must contains error message
|
||||
self.assertEqual(error_catcher.exception.args[0].splitlines()[-2:],
|
||||
['TypeError: test error message', ' at <anonymous>:1:15'])
|
||||
self.assertEqual(len(log_catcher.output), 1)
|
||||
self.assertIn('TypeError: test error message\n at <anonymous>:1:15', log_catcher.output[0])
|
||||
|
||||
def test_console_log_object(self):
|
||||
logger = logging.getLogger('odoo')
|
||||
level = logger.level
|
||||
logger.setLevel(logging.INFO)
|
||||
self.addCleanup(logger.setLevel, level)
|
||||
|
||||
with self.assertLogs() as log_catcher:
|
||||
code = "console.log({custom:{1:'test', 2:'a'}, value:1, description:'dummy'});console.log('test successful');"
|
||||
self.browser_js(url_path='about:blank', code=code)
|
||||
console_log_count = 0
|
||||
for log in log_catcher.output:
|
||||
if '.browser:' in log:
|
||||
text = log.split('.browser:', 1)[1]
|
||||
if text == 'test successful':
|
||||
continue
|
||||
self.assertEqual(text, "Object(custom=Object, value=1, description='dummy')")
|
||||
console_log_count += 1
|
||||
self.assertEqual(console_log_count, 1)
|
||||
|
||||
@patch.dict(config.options, {"dev_mode": []})
|
||||
def test_404_assets(self):
|
||||
IrAttachment = self.env['ir.attachment']
|
||||
# Ensure no assets exists
|
||||
IrAttachment.search([('url', '=like', '/web/assets/%')]).unlink()
|
||||
response = self.url_open('/NoSuchPage')
|
||||
self.assertEqual(response.status_code, 404, "Page should not exist")
|
||||
self.assertFalse(
|
||||
IrAttachment.search_count([('url', '=like', '/web/assets/%')]),
|
||||
"Assets should not have been generated because the transaction was rolled back"
|
||||
# Well, they should - but this is part of a compromise to avoid
|
||||
# being in the way of the read-only mode.
|
||||
)
|
||||
response = self.url_open('/')
|
||||
self.assertEqual(response.status_code, 200, "Page should exist")
|
||||
self.assertTrue(
|
||||
IrAttachment.search_count([('url', '=like', '/web/assets/%')]),
|
||||
"Assets should have been generated"
|
||||
)
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestChromeBrowser(HttpCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
screencasts_dir = config['screencasts'] or config['screenshots']
|
||||
with patch.dict(config.options, {'screencasts': screencasts_dir, 'screenshots': config['screenshots']}):
|
||||
self.browser = ChromeBrowser(self)
|
||||
self.addCleanup(self.browser.stop)
|
||||
self.addCleanup(self.browser.clear)
|
||||
|
||||
def test_screencasts(self):
|
||||
self.browser.start_screencast()
|
||||
self.browser.navigate_to('about:blank')
|
||||
self.browser._wait_ready()
|
||||
code = "setTimeout(() => console.log('test successful'), 2000); setInterval(() => document.body.innerText = (new Date()).getTime(), 100);"
|
||||
self.browser._wait_code_ok(code, 10)
|
||||
self.browser._save_screencast()
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestChromeBrowserOddDimensions(TestChromeBrowser):
|
||||
browser_size = "1215x768"
|
||||
|
||||
|
||||
class TestRequestRemaining(HttpCase):
|
||||
# This test case tries to reproduce the case where a request is lost between two test and is execute during the secone one.
|
||||
#
|
||||
# - Test A browser js finishes with a pending request
|
||||
# - _wait_remaining_requests misses the request since the thread may not be totally spawned (or correctly named)
|
||||
# - Test B starts and a SELECT is executed
|
||||
# - The request is executed and makes a concurrent fetchall
|
||||
# - The test B tries to fetchall and fails since the cursor is already used by the request
|
||||
#
|
||||
# Note that similar cases can also consume savepoint, make the main cursor readonly, ...
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.thread_a = None
|
||||
# this lock is used to ensure the request is executed after test b starts
|
||||
cls.main_lock = threading.Lock()
|
||||
cls.main_lock.acquire()
|
||||
|
||||
def test_requests_a(self):
|
||||
class Dummycontroller(Controller):
|
||||
@route('/web/concurrent', type='http', auth='public', sitemap=False)
|
||||
def wait(c, **params):
|
||||
self.assertEqual(request.env.cr.__class__.__name__, 'TestCursor')
|
||||
request.env.cr.execute('SELECT 1')
|
||||
request.env.cr.fetchall()
|
||||
# not that the previous queries are not really needed since the http stack will check the registry
|
||||
# but this makes the test more clear and robust
|
||||
_logger.info('B finish')
|
||||
|
||||
self.env.registry.clear_caches()
|
||||
self.addCleanup(self.env.registry.clear_caches)
|
||||
|
||||
def late_request_thread():
|
||||
# In some rare case the request may arrive after _wait_remaining_requests.
|
||||
# this thread is trying to reproduce this case.
|
||||
_logger.info('Waiting for B to start')
|
||||
if self.main_lock.acquire(timeout=10):
|
||||
self.url_open("/web/concurrent", timeout=10)
|
||||
else:
|
||||
_logger.error('Something went wrong and thread was not able to aquire lock')
|
||||
TestRequestRemaining.thread_a = threading.Thread(target=late_request_thread)
|
||||
self.thread_a.start()
|
||||
|
||||
def test_requests_b(self):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
with self.assertLogs('odoo.tests.common', level="ERROR") as lc:
|
||||
self.main_lock.release()
|
||||
_logger.info('B started, waiting for A to finish')
|
||||
self.thread_a.join()
|
||||
self.assertEqual(lc.output, ['ERROR:odoo.tests.common:Request with path /web/concurrent has been ignored during test as it it does not contain the test_cursor cookie or it is expired. (required "/base/tests/test_http_case.py:TestRequestRemaining.test_requests_b", got "/base/tests/test_http_case.py:TestRequestRemaining.test_requests_a")'])
|
||||
self.env.cr.fetchall()
|
||||
344
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_image.py
Normal file
344
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_image.py
Normal file
|
|
@ -0,0 +1,344 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import io
|
||||
import binascii
|
||||
|
||||
from PIL import Image, ImageDraw, PngImagePlugin
|
||||
|
||||
from odoo import tools
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
def img_open(data):
|
||||
return Image.open(io.BytesIO(data))
|
||||
|
||||
|
||||
class TestImage(TransactionCase):
|
||||
"""Tests for the different image tools helpers."""
|
||||
def setUp(self):
|
||||
super(TestImage, self).setUp()
|
||||
self.bg_color = (135, 90, 123)
|
||||
self.fill_color = (0, 160, 157)
|
||||
|
||||
self.img_1x1_png = base64.b64decode(b'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC')
|
||||
self.img_svg = b'<svg></svg>'
|
||||
self.img_1920x1080_jpeg = tools.image_apply_opt(Image.new('RGB', (1920, 1080)), 'JPEG')
|
||||
# The following image contains a tag `Lens Info` with a value of `3.99mm f/1.8`
|
||||
# This particular tag 0xa432 makes the `exif_transpose` method fail in 5.4.1 < Pillow < 7.2.0
|
||||
self.img_exif_jpg = base64.b64decode(b"""/9j/4AAQSkZJRgABAQAAAQABAAD/4QDQRXhpZgAATU0AKgAAAAgABgESAAMAAAABAAYAAAEaAAUA
|
||||
AAABAAAAVgEbAAUAAAABAAAAXgEoAAMAAAABAAEAAAITAAMAAAABAAEAAIdpAAQAAAABAAAAZgAA
|
||||
AAAAAAABAAAAAQAAAAEAAAABAAWQAAAHAAAABDAyMzGRAQAHAAAABAECAwCgAAAHAAAABDAxMDCg
|
||||
AQADAAAAAf//AACkMgAFAAAABAAAAKgAAAAAAAABjwAAAGQAAAGPAAAAZAAAAAkAAAAFAAAACQAA
|
||||
AAX/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAx
|
||||
NDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIy
|
||||
MjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAADAAYDASIAAhEBAxEB/8QAHwAAAQUBAQEB
|
||||
AQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1Fh
|
||||
ByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZ
|
||||
WmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXG
|
||||
x8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAEC
|
||||
AwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHB
|
||||
CSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0
|
||||
dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX
|
||||
2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD3+iiigD//2Q==""")
|
||||
|
||||
# Draw a red square in the middle of the image, this will be used to
|
||||
# verify crop is working. The border is going to be `self.bg_color` and
|
||||
# the middle is going to be `self.fill_color`.
|
||||
|
||||
# horizontal image (border is left/right)
|
||||
image = Image.new('RGB', (1920, 1080), color=self.bg_color)
|
||||
offset = (image.size[0] - image.size[1]) / 2
|
||||
draw = ImageDraw.Draw(image)
|
||||
draw.rectangle(xy=[
|
||||
(offset, 0),
|
||||
(image.size[0] - offset, image.size[1])
|
||||
], fill=self.fill_color)
|
||||
self.img_1920x1080_png = tools.image_apply_opt(image, 'PNG')
|
||||
|
||||
# vertical image (border is top/bottom)
|
||||
image = Image.new('RGB', (1080, 1920), color=self.bg_color)
|
||||
offset = (image.size[1] - image.size[0]) / 2
|
||||
draw = ImageDraw.Draw(image)
|
||||
draw.rectangle(xy=[
|
||||
(0, offset),
|
||||
(image.size[0], image.size[1] - offset)
|
||||
], fill=self.fill_color)
|
||||
self.img_1080x1920_png = tools.image_apply_opt(image, 'PNG')
|
||||
|
||||
def test_00_base64_to_image(self):
|
||||
"""Test that base64 is correctly opened as a PIL image."""
|
||||
image = img_open(self.img_1x1_png)
|
||||
self.assertEqual(type(image), PngImagePlugin.PngImageFile, "base64 as bytes, correct format")
|
||||
self.assertEqual(image.size, (1, 1), "base64 as bytes, correct size")
|
||||
|
||||
with self.assertRaises(UserError, msg="This file could not be decoded as an image file. Please try with a different file."):
|
||||
image = tools.base64_to_image(b'oazdazpodazdpok')
|
||||
|
||||
with self.assertRaises(UserError, msg="This file could not be decoded as an image file. Please try with a different file."):
|
||||
image = tools.base64_to_image(b'oazdazpodazdpokd')
|
||||
|
||||
def test_01_image_to_base64(self):
|
||||
"""Test that a PIL image is correctly saved as base64."""
|
||||
image = Image.new('RGB', (1, 1))
|
||||
image_base64 = tools.image_to_base64(image, 'PNG')
|
||||
self.assertEqual(image_base64, base64.b64encode(self.img_1x1_png))
|
||||
|
||||
def test_02_image_fix_orientation(self):
|
||||
"""Test that the orientation of images is correct."""
|
||||
|
||||
# Colors that can be distinguished among themselves even with jpeg loss.
|
||||
blue = (0, 0, 255)
|
||||
yellow = (255, 255, 0)
|
||||
green = (0, 255, 0)
|
||||
pink = (255, 0, 255)
|
||||
# Image large enough so jpeg loss is not a huge factor in the corners.
|
||||
size = 50
|
||||
expected = (blue, yellow, green, pink)
|
||||
|
||||
# They are all supposed to be same image: (blue, yellow, green, pink) in
|
||||
# that order, but each encoded with a different orientation.
|
||||
self._orientation_test(1, (blue, yellow, green, pink), size, expected) # top/left
|
||||
self._orientation_test(2, (yellow, blue, pink, green), size, expected) # top/right
|
||||
self._orientation_test(3, (pink, green, yellow, blue), size, expected) # bottom/right
|
||||
self._orientation_test(4, (green, pink, blue, yellow), size, expected) # bottom/left
|
||||
self._orientation_test(5, (blue, green, yellow, pink), size, expected) # left/top
|
||||
self._orientation_test(6, (yellow, pink, blue, green), size, expected) # right/top
|
||||
self._orientation_test(7, (pink, yellow, green, blue), size, expected) # right/bottom
|
||||
self._orientation_test(8, (green, blue, pink, yellow), size, expected) # left/bottom
|
||||
|
||||
def test_03_image_fix_orientation_exif(self):
|
||||
"""Test that a jpg image with exif orientation tag gets rotated"""
|
||||
image = img_open(self.img_exif_jpg)
|
||||
self.assertEqual(image.size, (6,3))
|
||||
image = tools.image_fix_orientation(image)
|
||||
self.assertEqual(image.size, (3,6))
|
||||
|
||||
def test_10_image_process_source(self):
|
||||
"""Test the source parameter of image_process."""
|
||||
self.assertFalse(tools.image_process(False), "return False if source is falsy")
|
||||
self.assertEqual(tools.image_process(self.img_svg), self.img_svg, "return source if format is SVG")
|
||||
|
||||
# in the following tests, pass `quality` to force the processing
|
||||
with self.assertRaises(UserError, msg="This file could not be decoded as an image file. Please try with a different file."):
|
||||
tools.image_process(b'oazdazpodazdpokd', quality=95)
|
||||
|
||||
image = img_open(tools.image_process(self.img_1920x1080_jpeg, quality=95))
|
||||
self.assertEqual(image.size, (1920, 1080), "OK return the image")
|
||||
|
||||
def test_11_image_process_size(self):
|
||||
"""Test the size parameter of image_process."""
|
||||
|
||||
# Format of `tests`: (original image, size parameter, expected result, text)
|
||||
tests = [
|
||||
(self.img_1920x1080_jpeg, (192, 108), (192, 108), "resize to given size"),
|
||||
(self.img_1920x1080_jpeg, (1920, 1080), (1920, 1080), "same size, no change"),
|
||||
(self.img_1920x1080_jpeg, (192, None), (192, 108), "set height from ratio"),
|
||||
(self.img_1920x1080_jpeg, (0, 108), (192, 108), "set width from ratio"),
|
||||
(self.img_1920x1080_jpeg, (192, 200), (192, 108), "adapt to width"),
|
||||
(self.img_1920x1080_jpeg, (400, 108), (192, 108), "adapt to height"),
|
||||
(self.img_1920x1080_jpeg, (3000, 2000), (1920, 1080), "don't resize above original, both set"),
|
||||
(self.img_1920x1080_jpeg, (3000, False), (1920, 1080), "don't resize above original, width set"),
|
||||
(self.img_1920x1080_jpeg, (None, 2000), (1920, 1080), "don't resize above original, height set"),
|
||||
(self.img_1080x1920_png, (3000, 192), (108, 192), "vertical image, resize if below"),
|
||||
]
|
||||
|
||||
count = 0
|
||||
for test in tests:
|
||||
image = img_open(tools.image_process(test[0], size=test[1]))
|
||||
self.assertEqual(image.size, test[2], test[3])
|
||||
count = count + 1
|
||||
self.assertEqual(count, 10, "ensure the loop is ran")
|
||||
|
||||
def test_12_image_process_verify_resolution(self):
|
||||
"""Test the verify_resolution parameter of image_process."""
|
||||
res = tools.image_process(self.img_1920x1080_jpeg, verify_resolution=True)
|
||||
self.assertNotEqual(res, False, "size ok")
|
||||
image_excessive = tools.image_apply_opt(Image.new('RGB', (50001, 1000)), 'PNG')
|
||||
with self.assertRaises(UserError, msg="size excessive"):
|
||||
tools.image_process(image_excessive, verify_resolution=True)
|
||||
|
||||
def test_13_image_process_quality(self):
|
||||
"""Test the quality parameter of image_process."""
|
||||
|
||||
# CASE: PNG RGBA doesn't apply quality, just optimize
|
||||
image = tools.image_apply_opt(Image.new('RGBA', (1080, 1920)), 'PNG')
|
||||
res = tools.image_process(image)
|
||||
self.assertLessEqual(len(res), len(image))
|
||||
|
||||
# CASE: PNG RGB doesn't apply quality, just optimize
|
||||
image = tools.image_apply_opt(Image.new('P', (1080, 1920)), 'PNG')
|
||||
res = tools.image_process(image)
|
||||
self.assertLessEqual(len(res), len(image))
|
||||
|
||||
# CASE: JPEG optimize + reduced quality
|
||||
res = tools.image_process(self.img_1920x1080_jpeg)
|
||||
self.assertLessEqual(len(res), len(self.img_1920x1080_jpeg))
|
||||
|
||||
# CASE: GIF doesn't apply quality, just optimize
|
||||
image = tools.image_apply_opt(Image.new('RGB', (1080, 1920)), 'GIF')
|
||||
res = tools.image_process(image)
|
||||
self.assertLessEqual(len(res), len(image))
|
||||
|
||||
def test_14_image_process_crop(self):
|
||||
"""Test the crop parameter of image_process."""
|
||||
|
||||
# Optimized PNG use palette, getpixel below will return palette value.
|
||||
fill = 0
|
||||
bg = 1
|
||||
|
||||
# Images with small dimensions
|
||||
small_width = tools.image_apply_opt(Image.new('RGBA', (1, 16)), 'PNG')
|
||||
small_height = tools.image_apply_opt(Image.new('RGBA', (16, 1)), 'PNG')
|
||||
|
||||
# Format of `tests`: (original base64 image, size parameter, crop parameter, res size, res color (top, bottom, left, right), text)
|
||||
tests = [
|
||||
(self.img_1920x1080_png, None, None, (1920, 1080), (fill, fill, bg, bg), "horizontal, verify initial"),
|
||||
(self.img_1920x1080_png, (2000, 2000), 'center', (1080, 1080), (fill, fill, fill, fill), "horizontal, crop biggest possible"),
|
||||
(self.img_1920x1080_png, (2000, 4000), 'center', (540, 1080), (fill, fill, fill, fill), "horizontal, size vertical, limit height"),
|
||||
(self.img_1920x1080_png, (4000, 2000), 'center', (1920, 960), (fill, fill, bg, bg), "horizontal, size horizontal, limit width"),
|
||||
(self.img_1920x1080_png, (512, 512), 'center', (512, 512), (fill, fill, fill, fill), "horizontal, type center"),
|
||||
(self.img_1920x1080_png, (512, 512), 'top', (512, 512), (fill, fill, fill, fill), "horizontal, type top"),
|
||||
(self.img_1920x1080_png, (512, 512), 'bottom', (512, 512), (fill, fill, fill, fill), "horizontal, type bottom"),
|
||||
(self.img_1920x1080_png, (512, 512), 'wrong', (512, 512), (fill, fill, fill, fill), "horizontal, wrong crop value, use center"),
|
||||
(self.img_1920x1080_png, (192, 0), None, (192, 108), (fill, fill, bg, bg), "horizontal, not cropped, just do resize"),
|
||||
(small_height, (25, 50), 'center', (1, 1), (fill, fill, fill, fill), "horizontal, small height, size vertical"),
|
||||
|
||||
(self.img_1080x1920_png, None, None, (1080, 1920), (bg, bg, fill, fill), "vertical, verify initial"),
|
||||
(self.img_1080x1920_png, (2000, 2000), 'center', (1080, 1080), (fill, fill, fill, fill), "vertical, crop biggest possible"),
|
||||
(self.img_1080x1920_png, (2000, 4000), 'center', (960, 1920), (bg, bg, fill, fill), "vertical, size vertical, limit height"),
|
||||
(self.img_1080x1920_png, (4000, 2000), 'center', (1080, 540), (fill, fill, fill, fill), "vertical, size horizontal, limit width"),
|
||||
(self.img_1080x1920_png, (512, 512), 'center', (512, 512), (fill, fill, fill, fill), "vertical, type center"),
|
||||
(self.img_1080x1920_png, (512, 512), 'top', (512, 512), (bg, fill, fill, fill), "vertical, type top"),
|
||||
(self.img_1080x1920_png, (512, 512), 'bottom', (512, 512), (fill, bg, fill, fill), "vertical, type bottom"),
|
||||
(self.img_1080x1920_png, (512, 512), 'wrong', (512, 512), (fill, fill, fill, fill), "vertical, wrong crop value, use center"),
|
||||
(self.img_1080x1920_png, (108, 0), None, (108, 192), (bg, bg, fill, fill), "vertical, not cropped, just do resize"),
|
||||
(small_width, (50, 25), 'center', (1, 1), (fill, fill, fill, fill), "vertical, small width, size horizontal"),
|
||||
]
|
||||
|
||||
count = 0
|
||||
for test in tests:
|
||||
count = count + 1
|
||||
# process the image, pass quality to make sure the result is palette
|
||||
image = img_open(tools.image_process(test[0], size=test[1], crop=test[2], quality=95))
|
||||
# verify size
|
||||
self.assertEqual(image.size, test[3], "%s - correct size" % test[5])
|
||||
|
||||
half_width, half_height = image.size[0] / 2, image.size[1] / 2
|
||||
top, bottom, left, right = 0, image.size[1] - 1, 0, image.size[0] - 1
|
||||
# verify top
|
||||
px = (half_width, top)
|
||||
self.assertEqual(image.getpixel(px), test[4][0], "%s - color top (%s, %s)" % (test[5], px[0], px[1]))
|
||||
# verify bottom
|
||||
px = (half_width, bottom)
|
||||
self.assertEqual(image.getpixel(px), test[4][1], "%s - color bottom (%s, %s)" % (test[5], px[0], px[1]))
|
||||
# verify left
|
||||
px = (left, half_height)
|
||||
self.assertEqual(image.getpixel(px), test[4][2], "%s - color left (%s, %s)" % (test[5], px[0], px[1]))
|
||||
# verify right
|
||||
px = (right, half_height)
|
||||
self.assertEqual(image.getpixel(px), test[4][3], "%s - color right (%s, %s)" % (test[5], px[0], px[1]))
|
||||
|
||||
self.assertEqual(count, 2 * 10, "ensure the loop is ran")
|
||||
|
||||
def test_15_image_process_colorize(self):
|
||||
"""Test the colorize parameter of image_process."""
|
||||
|
||||
# verify initial condition
|
||||
image_rgba = Image.new('RGBA', (1, 1))
|
||||
self.assertEqual(image_rgba.mode, 'RGBA')
|
||||
self.assertEqual(image_rgba.getpixel((0, 0)), (0, 0, 0, 0))
|
||||
rgba = tools.image_apply_opt(image_rgba, 'PNG')
|
||||
|
||||
# CASE: color random, color has changed
|
||||
image = img_open(tools.image_process(rgba, colorize=True))
|
||||
self.assertEqual(image.mode, 'RGB')
|
||||
self.assertNotEqual(image.getpixel((0, 0)), (0, 0, 0))
|
||||
|
||||
def test_16_image_process_format(self):
|
||||
"""Test the format parameter of image_process."""
|
||||
|
||||
image = img_open(tools.image_process(self.img_1920x1080_jpeg, output_format='PNG'))
|
||||
self.assertEqual(image.format, 'PNG', "change format to PNG")
|
||||
|
||||
image = img_open(tools.image_process(self.img_1x1_png, output_format='JpEg'))
|
||||
self.assertEqual(image.format, 'JPEG', "change format to JPEG (case insensitive)")
|
||||
|
||||
image = img_open(tools.image_process(self.img_1920x1080_jpeg, output_format='BMP'))
|
||||
self.assertEqual(image.format, 'PNG', "change format to BMP converted to PNG")
|
||||
|
||||
image_1080_1920_rgba = tools.image_apply_opt(Image.new('RGBA', (108, 192)), 'PNG')
|
||||
image = img_open(tools.image_process(image_1080_1920_rgba, output_format='jpeg'))
|
||||
self.assertEqual(image.format, 'JPEG', "change format PNG with RGBA to JPEG")
|
||||
|
||||
# pass quality to force the image to be processed
|
||||
image_1080_1920_tiff = tools.image_apply_opt(Image.new('RGB', (108, 192)), 'TIFF')
|
||||
image = img_open(tools.image_process(image_1080_1920_tiff, quality=95))
|
||||
self.assertEqual(image.format, 'JPEG', "unsupported format to JPEG")
|
||||
|
||||
def test_20_image_data_uri(self):
|
||||
"""Test that image_data_uri is working as expected."""
|
||||
self.assertEqual(tools.image_data_uri(base64.b64encode(self.img_1x1_png)), 'data:image/png;base64,' + base64.b64encode(self.img_1x1_png).decode('ascii'))
|
||||
|
||||
def test_21_image_guess_size_from_field_name(self):
|
||||
f = tools.image_guess_size_from_field_name
|
||||
# Test case: empty field_name input
|
||||
self.assertEqual(f(''), (0, 0))
|
||||
# Test case: custom field_name input
|
||||
self.assertEqual(f('custom_field'), (0, 0))
|
||||
# Test case: field_name input that starts with 'x_'
|
||||
self.assertEqual(f('x_field'), (0, 0))
|
||||
# Test case: field_name input that starts with 'x_' and ends with a number less than 16
|
||||
self.assertEqual(f('x_studio_image_1'), (0, 0))
|
||||
# Test case: field_name input that starts with 'x_' and ends with a number greater than 16
|
||||
self.assertEqual(f('x_studio_image_32'), (0, 0))
|
||||
# Test case: field_name input that has a suffix less than 16
|
||||
self.assertEqual(f('image_15'), (0, 0))
|
||||
# Test case: field_name input that has a suffix equal to 16
|
||||
self.assertEqual(f('image_16'), (16, 16))
|
||||
# Test case: field_name input that has a suffix greater than 16
|
||||
self.assertEqual(f('image_32'), (32, 32))
|
||||
# Test case: field_name input that has a suffix with 2 numbers
|
||||
self.assertEqual(f('image_1920_1080'), (1080, 1080))
|
||||
# Test case: field_name input that has a float as suffix
|
||||
self.assertEqual(f('image_32.5'), (0, 0))
|
||||
# Test case: field_name input that has a suffix greater than 16 but no underscore
|
||||
self.assertEqual(f('image32'), (0, 0))
|
||||
|
||||
def _assertAlmostEqualSequence(self, rgb1, rgb2, delta=10):
|
||||
self.assertEqual(len(rgb1), len(rgb2))
|
||||
for index, t in enumerate(zip(rgb1, rgb2)):
|
||||
self.assertAlmostEqual(t[0], t[1], delta=delta, msg="%s vs %s at %d" % (rgb1, rgb2, index))
|
||||
|
||||
def _get_exif_colored_square(self, orientation, colors, size):
|
||||
image = Image.new('RGB', (size, size), color=self.bg_color)
|
||||
draw = ImageDraw.Draw(image)
|
||||
# Paint the colors on the 4 corners, to be able to test which colors
|
||||
# move on which corners.
|
||||
draw.rectangle(xy=[(0, 0), (size // 2, size // 2)], fill=colors[0]) # top/left
|
||||
draw.rectangle(xy=[(size // 2, 0), (size, size // 2)], fill=colors[1]) # top/right
|
||||
draw.rectangle(xy=[(0, size // 2), (size // 2, size)], fill=colors[2]) # bottom/left
|
||||
draw.rectangle(xy=[(size // 2, size // 2), (size, size)], fill=colors[3]) # bottom/right
|
||||
# Set the proper exif tag based on orientation params.
|
||||
exif = b'Exif\x00\x00II*\x00\x08\x00\x00\x00\x01\x00\x12\x01\x03\x00\x01\x00\x00\x00' + bytes([orientation]) + b'\x00\x00\x00\x00\x00\x00\x00'
|
||||
# The image image is saved with the exif tag.
|
||||
return tools.image_apply_opt(image, 'JPEG', exif=exif)
|
||||
|
||||
def _orientation_test(self, orientation, colors, size, expected):
|
||||
# Generate the test image based on orientation and order of colors.
|
||||
image = self._get_exif_colored_square(orientation, colors, size)
|
||||
# The image is read again now that it has orientation added.
|
||||
fixed_image = tools.image_fix_orientation(img_open(image))
|
||||
# Ensure colors are in the right order (blue, yellow, green, pink).
|
||||
self._assertAlmostEqualSequence(fixed_image.getpixel((0, 0)), expected[0]) # top/left
|
||||
self._assertAlmostEqualSequence(fixed_image.getpixel((size - 1, 0)), expected[1]) # top/right
|
||||
self._assertAlmostEqualSequence(fixed_image.getpixel((0, size - 1)), expected[2]) # bottom/left
|
||||
self._assertAlmostEqualSequence(fixed_image.getpixel((size - 1, size - 1)), expected[3]) # bottom/right
|
||||
|
||||
def test_ptype_image_to_jpeg(self):
|
||||
"""converts to RGB when saving as JPEG"""
|
||||
image1 = Image.new('P', (1, 1), color='red')
|
||||
image2 = Image.new('RGB', (1, 1), color='red')
|
||||
self.assertEqual(tools.image.image_apply_opt(image1, 'JPEG'), tools.image.image_apply_opt(image2, 'JPEG'))
|
||||
|
|
@ -0,0 +1,665 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from datetime import date
|
||||
from psycopg2 import IntegrityError, ProgrammingError
|
||||
|
||||
import odoo
|
||||
from odoo.exceptions import UserError, ValidationError, AccessError
|
||||
from odoo.tools import mute_logger
|
||||
from odoo.tests import common
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class TestServerActionsBase(TransactionCaseWithUserDemo):
|
||||
|
||||
def setUp(self):
|
||||
super(TestServerActionsBase, self).setUp()
|
||||
|
||||
# Data on which we will run the server action
|
||||
self.test_country = self.env['res.country'].create({
|
||||
'name': 'TestingCountry',
|
||||
'code': 'TY',
|
||||
'address_format': 'SuperFormat',
|
||||
})
|
||||
self.test_partner = self.env['res.partner'].create({
|
||||
'city': 'OrigCity',
|
||||
'country_id': self.test_country.id,
|
||||
'email': 'test.partner@test.example.com',
|
||||
'name': 'TestingPartner',
|
||||
})
|
||||
self.context = {
|
||||
'active_model': 'res.partner',
|
||||
'active_id': self.test_partner.id,
|
||||
}
|
||||
|
||||
# Model data
|
||||
Model = self.env['ir.model']
|
||||
Fields = self.env['ir.model.fields']
|
||||
self.comment_html = '<p>MyComment</p>'
|
||||
self.res_partner_model = Model.search([('model', '=', 'res.partner')])
|
||||
self.res_partner_name_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'name')])
|
||||
self.res_partner_city_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'city')])
|
||||
self.res_partner_country_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'country_id')])
|
||||
self.res_partner_parent_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'parent_id')])
|
||||
self.res_partner_children_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'child_ids')])
|
||||
self.res_partner_category_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'category_id')])
|
||||
self.res_partner_latitude_field = Fields.search([('model', '=', 'res.partner'), ('name', '=', 'partner_latitude')])
|
||||
self.res_country_model = Model.search([('model', '=', 'res.country')])
|
||||
self.res_country_name_field = Fields.search([('model', '=', 'res.country'), ('name', '=', 'name')])
|
||||
self.res_country_code_field = Fields.search([('model', '=', 'res.country'), ('name', '=', 'code')])
|
||||
self.res_partner_category_model = Model.search([('model', '=', 'res.partner.category')])
|
||||
self.res_partner_category_name_field = Fields.search([('model', '=', 'res.partner.category'), ('name', '=', 'name')])
|
||||
|
||||
# create server action to
|
||||
self.action = self.env['ir.actions.server'].create({
|
||||
'name': 'TestAction',
|
||||
'model_id': self.res_partner_model.id,
|
||||
'model_name': 'res.partner',
|
||||
'state': 'code',
|
||||
'code': 'record.write({"comment": "%s"})' % self.comment_html,
|
||||
})
|
||||
|
||||
|
||||
class TestServerActions(TestServerActionsBase):
|
||||
|
||||
def test_00_action(self):
|
||||
self.action.with_context(self.context).run()
|
||||
self.assertEqual(self.test_partner.comment, self.comment_html, 'ir_actions_server: invalid condition check')
|
||||
self.test_partner.write({'comment': False})
|
||||
|
||||
# Do: create contextual action
|
||||
self.action.create_action()
|
||||
self.assertEqual(self.action.binding_model_id.model, 'res.partner')
|
||||
|
||||
# Do: remove contextual action
|
||||
self.action.unlink_action()
|
||||
self.assertFalse(self.action.binding_model_id)
|
||||
|
||||
def test_10_code(self):
|
||||
self.action.write({
|
||||
'state': 'code',
|
||||
'code': ("partner_name = record.name + '_code'\n"
|
||||
"record.env['res.partner'].create({'name': partner_name})"),
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: code server action correctly finished should return False')
|
||||
|
||||
partners = self.test_partner.search([('name', 'ilike', 'TestingPartner_code')])
|
||||
self.assertEqual(len(partners), 1, 'ir_actions_server: 1 new partner should have been created')
|
||||
|
||||
def test_20_crud_create(self):
|
||||
# Do: create a new record in another model
|
||||
self.action.write({
|
||||
'state': 'object_create',
|
||||
'crud_model_id': self.res_country_model.id,
|
||||
'link_field_id': False,
|
||||
'fields_lines': [Command.clear(),
|
||||
Command.create({'col1': self.res_country_name_field.id, 'value': 'record.name', 'evaluation_type': 'equation'}),
|
||||
Command.create({'col1': self.res_country_code_field.id, 'value': 'record.name[0:2]', 'evaluation_type': 'equation'})],
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
|
||||
# Test: new country created
|
||||
country = self.test_country.search([('name', 'ilike', 'TestingPartner')])
|
||||
self.assertEqual(len(country), 1, 'ir_actions_server: TODO')
|
||||
self.assertEqual(country.code, 'TE', 'ir_actions_server: TODO')
|
||||
|
||||
def test_20_crud_create_link_many2one(self):
|
||||
_city = 'TestCity'
|
||||
_name = 'TestNew'
|
||||
|
||||
# Do: create a new record in the same model and link it with a many2one
|
||||
self.action.write({
|
||||
'state': 'object_create',
|
||||
'crud_model_id': self.action.model_id.id,
|
||||
'link_field_id': self.res_partner_parent_field.id,
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_name_field.id, 'value': _name}),
|
||||
Command.create({'col1': self.res_partner_city_field.id, 'value': _city})],
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
|
||||
# Test: new partner created
|
||||
partner = self.test_partner.search([('name', 'ilike', _name)])
|
||||
self.assertEqual(len(partner), 1, 'ir_actions_server: TODO')
|
||||
self.assertEqual(partner.city, _city, 'ir_actions_server: TODO')
|
||||
# Test: new partner linked
|
||||
self.assertEqual(self.test_partner.parent_id, partner, 'ir_actions_server: TODO')
|
||||
|
||||
def test_20_crud_create_link_one2many(self):
|
||||
_name = 'TestNew'
|
||||
|
||||
# Do: create a new record in the same model and link it with a one2many
|
||||
self.action.write({
|
||||
'state': 'object_create',
|
||||
'crud_model_id': self.action.model_id.id,
|
||||
'link_field_id': self.res_partner_children_field.id,
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_name_field.id, 'value': _name})],
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
|
||||
# Test: new partner created
|
||||
partner = self.test_partner.search([('name', 'ilike', _name)])
|
||||
self.assertEqual(len(partner), 1, 'ir_actions_server: TODO')
|
||||
self.assertEqual(partner.name, _name, 'ir_actions_server: TODO')
|
||||
# Test: new partner linked
|
||||
self.assertIn(partner, self.test_partner.child_ids, 'ir_actions_server: TODO')
|
||||
|
||||
def test_20_crud_create_link_many2many(self):
|
||||
# Do: create a new record in another model
|
||||
self.action.write({
|
||||
'state': 'object_create',
|
||||
'crud_model_id': self.res_partner_category_model.id,
|
||||
'link_field_id': self.res_partner_category_field.id,
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_category_name_field.id, 'value': 'record.name', 'evaluation_type': 'equation'})],
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
|
||||
# Test: new category created
|
||||
category = self.env['res.partner.category'].search([('name', 'ilike', 'TestingPartner')])
|
||||
self.assertEqual(len(category), 1, 'ir_actions_server: TODO')
|
||||
self.assertIn(category, self.test_partner.category_id)
|
||||
|
||||
def test_30_crud_write(self):
|
||||
_name = 'TestNew'
|
||||
|
||||
# Do: update partner name
|
||||
self.action.write({
|
||||
'state': 'object_write',
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_name_field.id, 'value': _name})],
|
||||
})
|
||||
run_res = self.action.with_context(self.context).run()
|
||||
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
|
||||
# Test: partner updated
|
||||
partner = self.test_partner.search([('name', 'ilike', _name)])
|
||||
self.assertEqual(len(partner), 1, 'ir_actions_server: TODO')
|
||||
self.assertEqual(partner.city, 'OrigCity', 'ir_actions_server: TODO')
|
||||
|
||||
def test_object_write_equation(self):
|
||||
# Do: update partners city
|
||||
self.action.write({
|
||||
'state': 'object_write',
|
||||
'fields_lines': [Command.create({
|
||||
'col1': self.res_partner_city_field.id,
|
||||
'evaluation_type': 'equation',
|
||||
'value': 'record.id',
|
||||
})],
|
||||
})
|
||||
partners = self.test_partner + self.test_partner.copy()
|
||||
self.action.with_context(self.context, active_ids=partners.ids).run()
|
||||
# Test: partners updated
|
||||
self.assertEqual(partners[0].city, str(partners[0].id))
|
||||
self.assertEqual(partners[1].city, str(partners[1].id))
|
||||
|
||||
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
|
||||
def test_40_multi(self):
|
||||
# Data: 2 server actions that will be nested
|
||||
action1 = self.action.create({
|
||||
'name': 'Subaction1',
|
||||
'sequence': 1,
|
||||
'model_id': self.res_partner_model.id,
|
||||
'state': 'code',
|
||||
'code': 'action = {"type": "ir.actions.act_window"}',
|
||||
})
|
||||
action2 = self.action.create({
|
||||
'name': 'Subaction2',
|
||||
'sequence': 2,
|
||||
'model_id': self.res_partner_model.id,
|
||||
'crud_model_id': self.res_partner_model.id,
|
||||
'state': 'object_create',
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_name_field.id, 'value': 'RaoulettePoiluchette'}),
|
||||
Command.create({'col1': self.res_partner_city_field.id, 'value': 'TestingCity'})],
|
||||
})
|
||||
action3 = self.action.create({
|
||||
'name': 'Subaction3',
|
||||
'sequence': 3,
|
||||
'model_id': self.res_partner_model.id,
|
||||
'state': 'code',
|
||||
'code': 'action = {"type": "ir.actions.act_url"}',
|
||||
})
|
||||
self.action.write({
|
||||
'state': 'multi',
|
||||
'child_ids': [Command.set([action1.id, action2.id, action3.id])],
|
||||
})
|
||||
|
||||
# Do: run the action
|
||||
res = self.action.with_context(self.context).run()
|
||||
|
||||
# Test: new partner created
|
||||
# currently res_partner overrides default['name'] whatever its value
|
||||
partner = self.test_partner.search([('name', 'ilike', 'RaoulettePoiluchette')])
|
||||
self.assertEqual(len(partner), 1)
|
||||
# Test: action returned
|
||||
self.assertEqual(res.get('type'), 'ir.actions.act_url')
|
||||
|
||||
# Test loops
|
||||
with self.assertRaises(ValidationError):
|
||||
self.action.write({
|
||||
'child_ids': [Command.set([self.action.id])]
|
||||
})
|
||||
|
||||
def test_50_groups(self):
|
||||
""" check the action is returned only for groups dedicated to user """
|
||||
Actions = self.env['ir.actions.actions']
|
||||
|
||||
group0 = self.env['res.groups'].create({'name': 'country group'})
|
||||
|
||||
self.context = {
|
||||
'active_model': 'res.country',
|
||||
'active_id': self.test_country.id,
|
||||
}
|
||||
|
||||
# Do: update model and group
|
||||
self.action.write({
|
||||
'model_id': self.res_country_model.id,
|
||||
'binding_model_id': self.res_country_model.id,
|
||||
'groups_id': [Command.link(group0.id)],
|
||||
'code': 'record.write({"vat_label": "VatFromTest"})',
|
||||
})
|
||||
|
||||
# Test: action is not returned
|
||||
bindings = Actions.get_bindings('res.country')
|
||||
self.assertFalse(bindings)
|
||||
|
||||
with self.assertRaises(AccessError):
|
||||
self.action.with_context(self.context).run()
|
||||
self.assertFalse(self.test_country.vat_label)
|
||||
|
||||
# add group to the user, and test again
|
||||
self.env.user.write({'groups_id': [Command.link(group0.id)]})
|
||||
|
||||
bindings = Actions.get_bindings('res.country')
|
||||
self.assertItemsEqual(bindings.get('action'), self.action.read(['name', 'sequence', 'binding_view_types']))
|
||||
|
||||
self.action.with_context(self.context).run()
|
||||
self.assertEqual(self.test_country.vat_label, 'VatFromTest', 'vat label should be changed to VatFromTest')
|
||||
|
||||
def test_60_sort(self):
|
||||
""" check the actions sorted by sequence """
|
||||
Actions = self.env['ir.actions.actions']
|
||||
|
||||
# Do: update model
|
||||
self.action.write({
|
||||
'model_id': self.res_country_model.id,
|
||||
'binding_model_id': self.res_country_model.id,
|
||||
})
|
||||
self.action2 = self.action.copy({'name': 'TestAction2', 'sequence': 1})
|
||||
|
||||
# Test: action returned by sequence
|
||||
bindings = Actions.get_bindings('res.country')
|
||||
self.assertEqual([vals.get('name') for vals in bindings['action']], ['TestAction2', 'TestAction'])
|
||||
self.assertEqual([vals.get('sequence') for vals in bindings['action']], [1, 5])
|
||||
|
||||
def test_70_copy_action(self):
|
||||
# first check that the base case (reset state) works normally
|
||||
r = self.env['ir.actions.todo'].create({
|
||||
'action_id': self.action.id,
|
||||
'state': 'done',
|
||||
})
|
||||
self.assertEqual(r.state, 'done')
|
||||
self.assertEqual(
|
||||
r.copy().state, 'open',
|
||||
"by default state should be reset by copy"
|
||||
)
|
||||
|
||||
# then check that on server action we've changed that
|
||||
self.assertEqual(
|
||||
self.action.copy().state, 'code',
|
||||
"copying a server action should not reset the state"
|
||||
)
|
||||
|
||||
def test_80_permission(self):
|
||||
self.action.write({
|
||||
'state': 'code',
|
||||
'code': """record.write({'date': datetime.date.today()})""",
|
||||
})
|
||||
|
||||
user_demo = self.user_demo
|
||||
self_demo = self.action.with_user(user_demo.id)
|
||||
|
||||
# can write on contact partner
|
||||
self.test_partner.type = "contact"
|
||||
self.test_partner.with_user(user_demo.id).check_access_rule("write")
|
||||
|
||||
self_demo.with_context(self.context).run()
|
||||
self.assertEqual(self.test_partner.date, date.today())
|
||||
|
||||
# but can not write on private address
|
||||
self.test_partner.type = "private"
|
||||
with self.assertRaises(AccessError):
|
||||
self.test_partner.with_user(user_demo.id).check_access_rule("write")
|
||||
# nor execute a server action on it
|
||||
with self.assertRaises(AccessError), mute_logger('odoo.addons.base.models.ir_actions'):
|
||||
self_demo.with_context(self.context).run()
|
||||
|
||||
def test_90_convert_to_float(self):
|
||||
# make sure eval_value convert the value into float for float-type fields
|
||||
self.action.write({
|
||||
'state': 'object_write',
|
||||
'fields_lines': [Command.create({'col1': self.res_partner_latitude_field.id, 'value': '20.99'})],
|
||||
})
|
||||
line = self.action.fields_lines[0]
|
||||
self.assertEqual(line.eval_value()[line.id], 20.99)
|
||||
|
||||
|
||||
class TestCustomFields(common.TransactionCase):
|
||||
MODEL = 'res.partner'
|
||||
COMODEL = 'res.users'
|
||||
|
||||
def setUp(self):
|
||||
# check that the registry is properly reset
|
||||
fnames = set(self.registry[self.MODEL]._fields)
|
||||
|
||||
@self.addCleanup
|
||||
def check_registry():
|
||||
assert set(self.registry[self.MODEL]._fields) == fnames
|
||||
|
||||
self.addCleanup(self.registry.reset_changes)
|
||||
self.addCleanup(self.registry.clear_caches)
|
||||
|
||||
super().setUp()
|
||||
|
||||
def create_field(self, name, *, field_type='char'):
|
||||
""" create a custom field and return it """
|
||||
model = self.env['ir.model'].search([('model', '=', self.MODEL)])
|
||||
field = self.env['ir.model.fields'].create({
|
||||
'model_id': model.id,
|
||||
'name': name,
|
||||
'field_description': name,
|
||||
'ttype': field_type,
|
||||
})
|
||||
self.assertIn(name, self.env[self.MODEL]._fields)
|
||||
return field
|
||||
|
||||
def create_view(self, name):
|
||||
""" create a view with the given field name """
|
||||
return self.env['ir.ui.view'].create({
|
||||
'name': 'yet another view',
|
||||
'model': self.MODEL,
|
||||
'arch': '<tree string="X"><field name="%s"/></tree>' % name,
|
||||
})
|
||||
|
||||
def test_create_custom(self):
|
||||
""" custom field names must be start with 'x_' """
|
||||
with self.assertRaises(ValidationError):
|
||||
self.create_field('foo')
|
||||
|
||||
def test_rename_custom(self):
|
||||
""" custom field names must be start with 'x_' """
|
||||
field = self.create_field('x_foo')
|
||||
with self.assertRaises(ValidationError):
|
||||
field.name = 'foo'
|
||||
|
||||
def test_create_valid(self):
|
||||
""" field names must be valid pg identifiers """
|
||||
with self.assertRaises(ValidationError):
|
||||
self.create_field('x_foo bar')
|
||||
|
||||
def test_rename_valid(self):
|
||||
""" field names must be valid pg identifiers """
|
||||
field = self.create_field('x_foo')
|
||||
with self.assertRaises(ValidationError):
|
||||
field.name = 'x_foo bar'
|
||||
|
||||
def test_create_unique(self):
|
||||
""" one cannot create two fields with the same name on a given model """
|
||||
self.create_field('x_foo')
|
||||
with self.assertRaises(IntegrityError), mute_logger('odoo.sql_db'):
|
||||
self.create_field('x_foo')
|
||||
|
||||
def test_rename_unique(self):
|
||||
""" one cannot create two fields with the same name on a given model """
|
||||
field1 = self.create_field('x_foo')
|
||||
field2 = self.create_field('x_bar')
|
||||
with self.assertRaises(IntegrityError), mute_logger('odoo.sql_db'):
|
||||
field2.name = field1.name
|
||||
|
||||
def test_remove_without_view(self):
|
||||
""" try removing a custom field that does not occur in views """
|
||||
field = self.create_field('x_foo')
|
||||
field.unlink()
|
||||
|
||||
def test_rename_without_view(self):
|
||||
""" try renaming a custom field that does not occur in views """
|
||||
field = self.create_field('x_foo')
|
||||
field.name = 'x_bar'
|
||||
|
||||
@mute_logger('odoo.addons.base.models.ir_ui_view')
|
||||
def test_remove_with_view(self):
|
||||
""" try removing a custom field that occurs in a view """
|
||||
field = self.create_field('x_foo')
|
||||
self.create_view('x_foo')
|
||||
|
||||
# try to delete the field, this should fail but not modify the registry
|
||||
with self.assertRaises(UserError):
|
||||
field.unlink()
|
||||
self.assertIn('x_foo', self.env[self.MODEL]._fields)
|
||||
|
||||
@mute_logger('odoo.addons.base.models.ir_ui_view')
|
||||
def test_rename_with_view(self):
|
||||
""" try renaming a custom field that occurs in a view """
|
||||
field = self.create_field('x_foo')
|
||||
self.create_view('x_foo')
|
||||
|
||||
# try to delete the field, this should fail but not modify the registry
|
||||
with self.assertRaises(UserError):
|
||||
field.name = 'x_bar'
|
||||
self.assertIn('x_foo', self.env[self.MODEL]._fields)
|
||||
|
||||
def test_unlink_base(self):
|
||||
""" one cannot delete a non-custom field expect for uninstallation """
|
||||
field = self.env['ir.model.fields']._get(self.MODEL, 'ref')
|
||||
self.assertTrue(field)
|
||||
|
||||
with self.assertRaisesRegex(UserError, 'This column contains module data'):
|
||||
field.unlink()
|
||||
|
||||
# but it works in the context of uninstalling a module
|
||||
field.with_context(_force_unlink=True).unlink()
|
||||
|
||||
def test_unlink_with_inverse(self):
|
||||
""" create a custom o2m and then delete its m2o inverse """
|
||||
model = self.env['ir.model']._get(self.MODEL)
|
||||
comodel = self.env['ir.model']._get(self.COMODEL)
|
||||
|
||||
m2o_field = self.env['ir.model.fields'].create({
|
||||
'model_id': comodel.id,
|
||||
'name': 'x_my_m2o',
|
||||
'field_description': 'my_m2o',
|
||||
'ttype': 'many2one',
|
||||
'relation': self.MODEL,
|
||||
})
|
||||
|
||||
o2m_field = self.env['ir.model.fields'].create({
|
||||
'model_id': model.id,
|
||||
'name': 'x_my_o2m',
|
||||
'field_description': 'my_o2m',
|
||||
'ttype': 'one2many',
|
||||
'relation': self.COMODEL,
|
||||
'relation_field': m2o_field.name,
|
||||
})
|
||||
|
||||
# normal mode: you cannot break dependencies
|
||||
with self.assertRaises(UserError):
|
||||
m2o_field.unlink()
|
||||
|
||||
# uninstall mode: unlink dependant fields
|
||||
m2o_field.with_context(_force_unlink=True).unlink()
|
||||
self.assertFalse(o2m_field.exists())
|
||||
|
||||
def test_unlink_with_dependant(self):
|
||||
""" create a computed field, then delete its dependency """
|
||||
# Also applies to compute fields
|
||||
comodel = self.env['ir.model'].search([('model', '=', self.COMODEL)])
|
||||
|
||||
field = self.create_field('x_my_char')
|
||||
|
||||
dependant = self.env['ir.model.fields'].create({
|
||||
'model_id': comodel.id,
|
||||
'name': 'x_oh_boy',
|
||||
'field_description': 'x_oh_boy',
|
||||
'ttype': 'char',
|
||||
'related': 'partner_id.x_my_char',
|
||||
})
|
||||
|
||||
# normal mode: you cannot break dependencies
|
||||
with self.assertRaises(UserError):
|
||||
field.unlink()
|
||||
|
||||
# uninstall mode: unlink dependant fields
|
||||
field.with_context(_force_unlink=True).unlink()
|
||||
self.assertFalse(dependant.exists())
|
||||
|
||||
def test_unlink_inherited_custom(self):
|
||||
""" Creating a field on a model automatically creates an inherited field
|
||||
in the comodel, and the latter can only be removed by deleting the
|
||||
"parent" field.
|
||||
"""
|
||||
field = self.create_field('x_foo')
|
||||
self.assertEqual(field.state, 'manual')
|
||||
|
||||
inherited_field = self.env['ir.model.fields']._get(self.COMODEL, 'x_foo')
|
||||
self.assertTrue(inherited_field)
|
||||
self.assertEqual(inherited_field.state, 'base')
|
||||
|
||||
# one cannot delete the inherited field itself
|
||||
with self.assertRaises(UserError):
|
||||
inherited_field.unlink()
|
||||
|
||||
# but the inherited field is deleted when its parent field is
|
||||
field.unlink()
|
||||
self.assertFalse(field.exists())
|
||||
self.assertFalse(inherited_field.exists())
|
||||
self.assertFalse(self.env['ir.model.fields'].search_count([
|
||||
('model', 'in', [self.MODEL, self.COMODEL]),
|
||||
('name', '=', 'x_foo'),
|
||||
]))
|
||||
|
||||
def test_create_binary(self):
|
||||
""" binary custom fields should be created as attachment=True to avoid
|
||||
bloating the DB when creating e.g. image fields via studio
|
||||
"""
|
||||
self.create_field('x_image', field_type='binary')
|
||||
custom_binary = self.env[self.MODEL]._fields['x_image']
|
||||
|
||||
self.assertTrue(custom_binary.attachment)
|
||||
|
||||
def test_related_field(self):
|
||||
""" create a custom related field, and check filled values """
|
||||
#
|
||||
# Add a custom field equivalent to the following definition:
|
||||
#
|
||||
# class Partner(models.Model)
|
||||
# _inherit = 'res.partner'
|
||||
# x_oh_boy = fields.Char(related="country_id.code", store=True)
|
||||
#
|
||||
|
||||
# pick N=100 records in comodel
|
||||
countries = self.env['res.country'].search([('code', '!=', False)], limit=100)
|
||||
self.assertEqual(len(countries), 100, "Not enough records in comodel 'res.country'")
|
||||
|
||||
# create records in model, with N distinct values for the related field
|
||||
partners = self.env['res.partner'].create([
|
||||
{'name': country.code, 'country_id': country.id} for country in countries
|
||||
])
|
||||
self.env.flush_all()
|
||||
|
||||
# create a non-computed field, and assert how many queries it takes
|
||||
model_id = self.env['ir.model']._get_id('res.partner')
|
||||
query_count = 41
|
||||
with self.assertQueryCount(query_count):
|
||||
self.env.registry.clear_caches()
|
||||
self.env['ir.model.fields'].create({
|
||||
'model_id': model_id,
|
||||
'name': 'x_oh_box',
|
||||
'field_description': 'x_oh_box',
|
||||
'ttype': 'char',
|
||||
'store': True,
|
||||
})
|
||||
|
||||
# same with a related field, it only takes 8 extra queries
|
||||
with self.assertQueryCount(query_count + 8):
|
||||
self.env.registry.clear_caches()
|
||||
self.env['ir.model.fields'].create({
|
||||
'model_id': model_id,
|
||||
'name': 'x_oh_boy',
|
||||
'field_description': 'x_oh_boy',
|
||||
'ttype': 'char',
|
||||
'related': 'country_id.code',
|
||||
'store': True,
|
||||
})
|
||||
|
||||
# check the computed values
|
||||
for partner in partners:
|
||||
self.assertEqual(partner.x_oh_boy, partner.country_id.code)
|
||||
|
||||
def test_relation_of_a_custom_field(self):
|
||||
""" change the relation model of a custom field """
|
||||
model = self.env['ir.model'].search([('model', '=', self.MODEL)])
|
||||
field = self.env['ir.model.fields'].create({
|
||||
'name': 'x_foo',
|
||||
'model_id': model.id,
|
||||
'field_description': 'x_foo',
|
||||
'ttype': 'many2many',
|
||||
'relation': self.COMODEL,
|
||||
})
|
||||
|
||||
# change the relation
|
||||
with self.assertRaises(ValidationError):
|
||||
field.relation = 'foo'
|
||||
|
||||
def test_selection(self):
|
||||
""" custom selection field """
|
||||
Model = self.env[self.MODEL]
|
||||
model = self.env['ir.model'].search([('model', '=', self.MODEL)])
|
||||
field = self.env['ir.model.fields'].create({
|
||||
'model_id': model.id,
|
||||
'name': 'x_sel',
|
||||
'field_description': "Custom Selection",
|
||||
'ttype': 'selection',
|
||||
'selection_ids': [
|
||||
Command.create({'value': 'foo', 'name': 'Foo', 'sequence': 0}),
|
||||
Command.create({'value': 'bar', 'name': 'Bar', 'sequence': 1}),
|
||||
],
|
||||
})
|
||||
|
||||
x_sel = Model._fields['x_sel']
|
||||
self.assertEqual(x_sel.type, 'selection')
|
||||
self.assertEqual(x_sel.selection, [('foo', 'Foo'), ('bar', 'Bar')])
|
||||
|
||||
# add selection value 'baz'
|
||||
field.selection_ids.create({
|
||||
'field_id': field.id, 'value': 'baz', 'name': 'Baz', 'sequence': 2,
|
||||
})
|
||||
x_sel = Model._fields['x_sel']
|
||||
self.assertEqual(x_sel.type, 'selection')
|
||||
self.assertEqual(x_sel.selection, [('foo', 'Foo'), ('bar', 'Bar'), ('baz', 'Baz')])
|
||||
|
||||
# assign values to records
|
||||
rec1 = Model.create({'name': 'Rec1', 'x_sel': 'foo'})
|
||||
rec2 = Model.create({'name': 'Rec2', 'x_sel': 'bar'})
|
||||
rec3 = Model.create({'name': 'Rec3', 'x_sel': 'baz'})
|
||||
self.assertEqual(rec1.x_sel, 'foo')
|
||||
self.assertEqual(rec2.x_sel, 'bar')
|
||||
self.assertEqual(rec3.x_sel, 'baz')
|
||||
|
||||
# remove selection value 'foo'
|
||||
field.selection_ids[0].unlink()
|
||||
x_sel = Model._fields['x_sel']
|
||||
self.assertEqual(x_sel.type, 'selection')
|
||||
self.assertEqual(x_sel.selection, [('bar', 'Bar'), ('baz', 'Baz')])
|
||||
|
||||
self.assertEqual(rec1.x_sel, False)
|
||||
self.assertEqual(rec2.x_sel, 'bar')
|
||||
self.assertEqual(rec3.x_sel, 'baz')
|
||||
|
||||
# update selection value 'bar'
|
||||
field.selection_ids[0].value = 'quux'
|
||||
x_sel = Model._fields['x_sel']
|
||||
self.assertEqual(x_sel.type, 'selection')
|
||||
self.assertEqual(x_sel.selection, [('quux', 'Bar'), ('baz', 'Baz')])
|
||||
|
||||
self.assertEqual(rec1.x_sel, False)
|
||||
self.assertEqual(rec2.x_sel, 'quux')
|
||||
self.assertEqual(rec3.x_sel, 'baz')
|
||||
|
|
@ -0,0 +1,349 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import base64
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
|
||||
from PIL import Image
|
||||
|
||||
import odoo
|
||||
from odoo.exceptions import AccessError
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
from odoo.tools import image_to_base64
|
||||
|
||||
HASH_SPLIT = 2 # FIXME: testing implementations detail is not a good idea
|
||||
|
||||
|
||||
class TestIrAttachment(TransactionCaseWithUserDemo):
|
||||
def setUp(self):
|
||||
super(TestIrAttachment, self).setUp()
|
||||
self.Attachment = self.env['ir.attachment']
|
||||
self.filestore = self.Attachment._filestore()
|
||||
|
||||
# Blob1
|
||||
self.blob1 = b'blob1'
|
||||
self.blob1_b64 = base64.b64encode(self.blob1)
|
||||
self.blob1_hash = hashlib.sha1(self.blob1).hexdigest()
|
||||
self.blob1_fname = self.blob1_hash[:HASH_SPLIT] + '/' + self.blob1_hash
|
||||
|
||||
# Blob2
|
||||
self.blob2 = b'blob2'
|
||||
self.blob2_b64 = base64.b64encode(self.blob2)
|
||||
|
||||
def assertApproximately(self, value, expectedSize, delta=1):
|
||||
# we don't used bin_size in context, because on write, the cached value is the data and not
|
||||
# the size, so we need on each write to invalidate cache if we really want to get the size.
|
||||
try:
|
||||
value = base64.b64decode(value.decode())
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
size = len(value) / 1024 # kb
|
||||
|
||||
self.assertAlmostEqual(size, expectedSize, delta=delta)
|
||||
|
||||
def test_01_store_in_db(self):
|
||||
# force storing in database
|
||||
self.env['ir.config_parameter'].set_param('ir_attachment.location', 'db')
|
||||
|
||||
# 'ir_attachment.location' is undefined test database storage
|
||||
a1 = self.Attachment.create({'name': 'a1', 'raw': self.blob1})
|
||||
self.assertEqual(a1.datas, self.blob1_b64)
|
||||
|
||||
self.assertEqual(a1.db_datas, self.blob1)
|
||||
|
||||
def test_02_store_on_disk(self):
|
||||
a2 = self.Attachment.create({'name': 'a2', 'raw': self.blob1})
|
||||
self.assertEqual(a2.store_fname, self.blob1_fname)
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.filestore, a2.store_fname)))
|
||||
|
||||
def test_03_no_duplication(self):
|
||||
a2 = self.Attachment.create({'name': 'a2', 'raw': self.blob1})
|
||||
a3 = self.Attachment.create({'name': 'a3', 'raw': self.blob1})
|
||||
self.assertEqual(a3.store_fname, a2.store_fname)
|
||||
|
||||
def test_04_keep_file(self):
|
||||
a2 = self.Attachment.create({'name': 'a2', 'raw': self.blob1})
|
||||
a3 = self.Attachment.create({'name': 'a3', 'raw': self.blob1})
|
||||
|
||||
a2_fn = os.path.join(self.filestore, a2.store_fname)
|
||||
|
||||
a3.unlink()
|
||||
self.assertTrue(os.path.isfile(a2_fn))
|
||||
|
||||
def test_05_change_data_change_file(self):
|
||||
a2 = self.Attachment.create({'name': 'a2', 'raw': self.blob1})
|
||||
a2_store_fname1 = a2.store_fname
|
||||
a2_fn = os.path.join(self.filestore, a2_store_fname1)
|
||||
|
||||
self.assertTrue(os.path.isfile(a2_fn))
|
||||
|
||||
a2.write({'raw': self.blob2})
|
||||
|
||||
a2_store_fname2 = a2.store_fname
|
||||
self.assertNotEqual(a2_store_fname1, a2_store_fname2)
|
||||
|
||||
a2_fn = os.path.join(self.filestore, a2_store_fname2)
|
||||
self.assertTrue(os.path.isfile(a2_fn))
|
||||
|
||||
def test_07_write_mimetype(self):
|
||||
"""
|
||||
Tests the consistency of documents' mimetypes
|
||||
"""
|
||||
|
||||
Attachment = self.Attachment.with_user(self.user_demo.id)
|
||||
a2 = Attachment.create({'name': 'a2', 'datas': self.blob1_b64, 'mimetype': 'image/png'})
|
||||
self.assertEqual(a2.mimetype, 'image/png', "the new mimetype should be the one given on write")
|
||||
a3 = Attachment.create({'name': 'a3', 'datas': self.blob1_b64, 'mimetype': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'})
|
||||
self.assertEqual(a3.mimetype, 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', "should preserve office mime type")
|
||||
a4 = Attachment.create({'name': 'a4', 'datas': self.blob1_b64, 'mimetype': 'Application/VND.OpenXMLformats-officedocument.wordprocessingml.document'})
|
||||
self.assertEqual(a4.mimetype, 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', "should preserve office mime type (lowercase)")
|
||||
|
||||
def test_08_neuter_xml_mimetype(self):
|
||||
"""
|
||||
Tests that potentially harmful mimetypes (XML mimetypes that can lead to XSS attacks) are converted to text
|
||||
"""
|
||||
Attachment = self.Attachment.with_user(self.user_demo.id)
|
||||
document = Attachment.create({'name': 'document', 'datas': self.blob1_b64})
|
||||
document.write({'datas': self.blob1_b64, 'mimetype': 'text/xml'})
|
||||
self.assertEqual(document.mimetype, 'text/plain', "XML mimetype should be forced to text")
|
||||
document.write({'datas': self.blob1_b64, 'mimetype': 'image/svg+xml'})
|
||||
self.assertEqual(document.mimetype, 'text/plain', "SVG mimetype should be forced to text")
|
||||
document.write({'datas': self.blob1_b64, 'mimetype': 'text/html'})
|
||||
self.assertEqual(document.mimetype, 'text/plain', "HTML mimetype should be forced to text")
|
||||
document.write({'datas': self.blob1_b64, 'mimetype': 'application/xhtml+xml'})
|
||||
self.assertEqual(document.mimetype, 'text/plain', "XHTML mimetype should be forced to text")
|
||||
|
||||
def test_09_dont_neuter_xml_mimetype_for_admin(self):
|
||||
"""
|
||||
Admin user does not have a mime type filter
|
||||
"""
|
||||
document = self.Attachment.create({'name': 'document', 'datas': self.blob1_b64})
|
||||
document.write({'datas': self.blob1_b64, 'mimetype': 'text/xml'})
|
||||
self.assertEqual(document.mimetype, 'text/xml', "XML mimetype should not be forced to text, for admin user")
|
||||
|
||||
def test_10_image_autoresize(self):
|
||||
Attachment = self.env['ir.attachment']
|
||||
img_bin = io.BytesIO()
|
||||
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||
with Image.open(os.path.join(dir_path, 'odoo.jpg'), 'r') as logo:
|
||||
img = Image.new('RGB', (4000, 2000), '#4169E1')
|
||||
img.paste(logo)
|
||||
img.save(img_bin, 'JPEG')
|
||||
|
||||
img_encoded = image_to_base64(img, 'JPEG')
|
||||
img_bin = img_bin.getvalue()
|
||||
|
||||
fullsize = 124.99
|
||||
|
||||
####################################
|
||||
### test create/write on 'datas'
|
||||
####################################
|
||||
attach = Attachment.with_context(image_no_postprocess=True).create({
|
||||
'name': 'image',
|
||||
'datas': img_encoded,
|
||||
})
|
||||
self.assertApproximately(attach.datas, fullsize) # no resize, no compression
|
||||
|
||||
attach = attach.with_context(image_no_postprocess=False)
|
||||
attach.datas = img_encoded
|
||||
self.assertApproximately(attach.datas, 12.06) # default resize + default compression
|
||||
|
||||
# resize + default quality (80)
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '1024x768')
|
||||
attach.datas = img_encoded
|
||||
self.assertApproximately(attach.datas, 3.71)
|
||||
|
||||
# resize + quality 50
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_quality', '50')
|
||||
attach.datas = img_encoded
|
||||
self.assertApproximately(attach.datas, 3.57)
|
||||
|
||||
# no resize + no quality implicit
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '0')
|
||||
attach.datas = img_encoded
|
||||
self.assertApproximately(attach.datas, fullsize)
|
||||
|
||||
# Check that we only compress quality when we resize. We avoid to compress again during a new write.
|
||||
# no resize + quality -> should have no effect
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '10000x10000')
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_quality', '50')
|
||||
attach.datas = img_encoded
|
||||
self.assertApproximately(attach.datas, fullsize)
|
||||
|
||||
####################################
|
||||
### test create/write on 'raw'
|
||||
####################################
|
||||
|
||||
# reset default ~ delete
|
||||
self.env['ir.config_parameter'].search([('key', 'ilike', 'base.image_autoresize%')]).unlink()
|
||||
|
||||
attach = Attachment.with_context(image_no_postprocess=True).create({
|
||||
'name': 'image',
|
||||
'raw': img_bin,
|
||||
})
|
||||
self.assertApproximately(attach.raw, fullsize) # no resize, no compression
|
||||
|
||||
attach = attach.with_context(image_no_postprocess=False)
|
||||
attach.raw = img_bin
|
||||
self.assertApproximately(attach.raw, 12.06) # default resize + default compression
|
||||
|
||||
# resize + default quality (80)
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '1024x768')
|
||||
attach.raw = img_bin
|
||||
self.assertApproximately(attach.raw, 3.71)
|
||||
|
||||
# resize + no quality
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_quality', '0')
|
||||
attach.raw = img_bin
|
||||
self.assertApproximately(attach.raw, 4.09)
|
||||
|
||||
# resize + quality 50
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_quality', '50')
|
||||
attach.raw = img_bin
|
||||
self.assertApproximately(attach.raw, 3.57)
|
||||
|
||||
# no resize + no quality implicit
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '0')
|
||||
attach.raw = img_bin
|
||||
self.assertApproximately(attach.raw, fullsize)
|
||||
|
||||
# no resize of gif
|
||||
self.env['ir.config_parameter'].set_param('base.image_autoresize_max_px', '0x0')
|
||||
gif_bin = b'GIF89a\x01\x00\x01\x00\x00\xff\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x00;'
|
||||
attach.raw = gif_bin
|
||||
self.assertEqual(attach.raw, gif_bin)
|
||||
|
||||
def test_11_copy(self):
|
||||
"""
|
||||
Copying an attachment preserves the data
|
||||
"""
|
||||
document = self.Attachment.create({'name': 'document', 'datas': self.blob2_b64})
|
||||
document2 = document.copy({'name': "document (copy)"})
|
||||
self.assertEqual(document2.name, "document (copy)")
|
||||
self.assertEqual(document2.datas, document.datas)
|
||||
self.assertEqual(document2.db_datas, document.db_datas)
|
||||
self.assertEqual(document2.store_fname, document.store_fname)
|
||||
self.assertEqual(document2.checksum, document.checksum)
|
||||
|
||||
document3 = document.copy({'datas': self.blob1_b64})
|
||||
self.assertEqual(document3.datas, self.blob1_b64)
|
||||
self.assertEqual(document3.raw, self.blob1)
|
||||
self.assertTrue(self.filestore) # no data in db but has a store_fname
|
||||
self.assertEqual(document3.db_datas, False)
|
||||
self.assertEqual(document3.store_fname, self.blob1_fname)
|
||||
self.assertEqual(document3.checksum, self.blob1_hash)
|
||||
|
||||
def test_12_gc(self):
|
||||
# the data needs to be unique so that no other attachment link
|
||||
# the file so that the gc removes it
|
||||
unique_blob = os.urandom(16)
|
||||
a1 = self.Attachment.create({'name': 'a1', 'raw': unique_blob})
|
||||
store_path = os.path.join(self.filestore, a1.store_fname)
|
||||
self.assertTrue(os.path.isfile(store_path), 'file exists')
|
||||
a1.unlink()
|
||||
self.Attachment._gc_file_store_unsafe()
|
||||
self.assertFalse(os.path.isfile(store_path), 'file removed')
|
||||
|
||||
def test_13_rollback(self):
|
||||
self.registry.enter_test_mode(self.cr)
|
||||
self.addCleanup(self.registry.leave_test_mode)
|
||||
self.cr = self.registry.cursor()
|
||||
self.addCleanup(self.cr.close)
|
||||
self.env = odoo.api.Environment(self.cr, odoo.SUPERUSER_ID, {})
|
||||
|
||||
# the data needs to be unique so that no other attachment link
|
||||
# the file so that the gc removes it
|
||||
unique_blob = os.urandom(16)
|
||||
a1 = self.Attachment.create({'name': 'a1', 'raw': unique_blob})
|
||||
store_path = os.path.join(self.filestore, a1.store_fname)
|
||||
self.assertTrue(os.path.isfile(store_path), 'file exists')
|
||||
self.env.cr.rollback()
|
||||
self.Attachment._gc_file_store_unsafe()
|
||||
self.assertFalse(os.path.isfile(store_path), 'file removed')
|
||||
|
||||
|
||||
class TestPermissions(TransactionCaseWithUserDemo):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# replace self.env(uid=1) with an actual user environment so rules apply
|
||||
self.env = self.env(user=self.user_demo)
|
||||
self.Attachments = self.env['ir.attachment']
|
||||
|
||||
# create a record with an attachment and a rule allowing Read access
|
||||
# but preventing Create, Update, or Delete
|
||||
record = self.Attachments.create({'name': 'record1'})
|
||||
self.vals = {'name': 'attach', 'res_id': record.id, 'res_model': record._name}
|
||||
a = self.attachment = self.Attachments.create(self.vals)
|
||||
|
||||
# prevent create, write and unlink accesses on record
|
||||
self.rule = self.env['ir.rule'].sudo().create({
|
||||
'name': 'remove access to record %d' % record.id,
|
||||
'model_id': self.env['ir.model']._get_id(record._name),
|
||||
'domain_force': "[('id', '!=', %s)]" % record.id,
|
||||
'perm_read': False
|
||||
})
|
||||
self.env.flush_all()
|
||||
a.invalidate_recordset()
|
||||
|
||||
def test_no_read_permission(self):
|
||||
"""If the record can't be read, the attachment can't be read either
|
||||
"""
|
||||
# check that the information can be read out of the box
|
||||
self.attachment.datas
|
||||
# prevent read access on record
|
||||
self.rule.perm_read = True
|
||||
self.attachment.invalidate_recordset()
|
||||
with self.assertRaises(AccessError):
|
||||
self.attachment.datas
|
||||
|
||||
def test_with_write_permissions(self):
|
||||
"""With write permissions to the linked record, attachment can be
|
||||
created, updated, or deleted (or copied).
|
||||
"""
|
||||
# enable write permission on linked record
|
||||
self.rule.perm_write = False
|
||||
attachment = self.Attachments.create(self.vals)
|
||||
attachment.copy()
|
||||
attachment.write({'raw': b'test'})
|
||||
attachment.unlink()
|
||||
|
||||
def test_basic_modifications(self):
|
||||
"""Lacking write access to the linked record means create, update, and
|
||||
delete on the attachment are forbidden
|
||||
"""
|
||||
with self.assertRaises(AccessError):
|
||||
self.Attachments.create(self.vals)
|
||||
with self.assertRaises(AccessError):
|
||||
self.attachment.write({'raw': b'yay'})
|
||||
with self.assertRaises(AccessError):
|
||||
self.attachment.unlink()
|
||||
with self.assertRaises(AccessError):
|
||||
self.attachment.copy()
|
||||
|
||||
def test_cross_record_copies(self):
|
||||
"""Copying attachments between records (in the same model or not) adds
|
||||
wrinkles as the ACLs may diverge a lot more
|
||||
"""
|
||||
# create an other unwritable record in a different model
|
||||
unwritable = self.env['res.users.log'].create({})
|
||||
with self.assertRaises(AccessError):
|
||||
unwritable.write({}) # checks unwritability
|
||||
# create a writable record in the same model
|
||||
writable = self.Attachments.create({'name': 'yes'})
|
||||
writable.name = 'canwrite' # checks for writeability
|
||||
|
||||
# can copy from a record with read permissions to one with write permissions
|
||||
copied = self.attachment.copy({'res_model': writable._name, 'res_id': writable.id})
|
||||
# can copy to self given write permission
|
||||
copied.copy()
|
||||
# can not copy back to record without write permission
|
||||
with self.assertRaises(AccessError):
|
||||
copied.copy({'res_id': self.vals['res_id']})
|
||||
|
||||
# can not copy to a record without write permission
|
||||
with self.assertRaises(AccessError):
|
||||
self.attachment.copy({'res_model': unwritable._name, 'res_id': unwritable.id})
|
||||
# even from a record with write permissions
|
||||
with self.assertRaises(AccessError):
|
||||
copied.copy({'res_model': unwritable._name, 'res_id': unwritable.id})
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
from freezegun import freeze_time
|
||||
|
||||
from odoo import fields
|
||||
from odoo.tests.common import TransactionCase, RecordCapturer, get_db_name
|
||||
|
||||
|
||||
class CronMixinCase:
|
||||
def capture_triggers(self, cron_id=None):
|
||||
"""
|
||||
Get a context manager to get all cron triggers created during
|
||||
the context lifetime. While in the context, it exposes the
|
||||
triggers created so far from the beginning of the context. When
|
||||
the context exits, it doesn't capture new triggers anymore.
|
||||
|
||||
The triggers are accessible on the `records` attribute of the
|
||||
returned object.
|
||||
|
||||
:param cron_id: An optional cron record id (int) or xmlid (str)
|
||||
to only capture triggers for that cron.
|
||||
"""
|
||||
if isinstance(cron_id, str): # xmlid case
|
||||
cron_id = self.env.ref(cron_id).id
|
||||
|
||||
return RecordCapturer(
|
||||
model=self.env['ir.cron.trigger'].sudo(),
|
||||
domain=[('cron_id', '=', cron_id)] if cron_id else []
|
||||
)
|
||||
|
||||
|
||||
class TestIrCron(TransactionCase, CronMixinCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
freezer = freeze_time(cls.cr.now())
|
||||
cls.frozen_datetime = freezer.start()
|
||||
cls.addClassCleanup(freezer.stop)
|
||||
|
||||
def setUp(self):
|
||||
super(TestIrCron, self).setUp()
|
||||
|
||||
self.cron = self.env['ir.cron'].create({
|
||||
'name': 'TestCron',
|
||||
'model_id': self.env.ref('base.model_res_partner').id,
|
||||
'state': 'code',
|
||||
'code': 'model.search([("name", "=", "TestCronRecord")]).write({"name": "You have been CRONWNED"})',
|
||||
'interval_number': 1,
|
||||
'interval_type': 'days',
|
||||
'numbercall': -1,
|
||||
'doall': False,
|
||||
})
|
||||
self.test_partner = self.env['res.partner'].create({
|
||||
'name': 'TestCronRecord'
|
||||
})
|
||||
self.test_partner2 = self.env['res.partner'].create({
|
||||
'name': 'NotTestCronRecord'
|
||||
})
|
||||
|
||||
def test_cron_direct_trigger(self):
|
||||
self.assertFalse(self.cron.lastcall)
|
||||
self.assertEqual(self.test_partner.name, 'TestCronRecord')
|
||||
self.assertEqual(self.test_partner2.name, 'NotTestCronRecord')
|
||||
|
||||
def patched_now(*args, **kwargs):
|
||||
return '2020-10-22 08:00:00'
|
||||
|
||||
with patch('odoo.fields.Datetime.now', patched_now):
|
||||
self.cron.method_direct_trigger()
|
||||
|
||||
self.assertEqual(fields.Datetime.to_string(self.cron.lastcall), '2020-10-22 08:00:00')
|
||||
self.assertEqual(self.test_partner.name, 'You have been CRONWNED')
|
||||
self.assertEqual(self.test_partner2.name, 'NotTestCronRecord')
|
||||
|
||||
def test_cron_skip_unactive_triggers(self):
|
||||
# Situation: an admin disable the cron and another user triggers
|
||||
# the cron to be executed *now*, the cron shouldn't be ready and
|
||||
# the trigger should not be stored.
|
||||
|
||||
self.cron.active = False
|
||||
self.cron.nextcall = fields.Datetime.now() + timedelta(days=2)
|
||||
self.cron.flush_recordset()
|
||||
with self.capture_triggers() as capture:
|
||||
self.cron._trigger()
|
||||
|
||||
ready_jobs = self.registry['ir.cron']._get_all_ready_jobs(self.cr)
|
||||
self.assertNotIn(self.cron.id, [job['id'] for job in ready_jobs],
|
||||
"the cron shouldn't be ready")
|
||||
self.assertFalse(capture.records, "trigger should has been skipped")
|
||||
|
||||
def test_cron_keep_future_triggers(self):
|
||||
# Situation: yesterday an admin disabled the cron, while the
|
||||
# cron was disabled, another user triggered it to run today.
|
||||
# In case the cron as been re-enabled before "today", it should
|
||||
# run.
|
||||
|
||||
# go yesterday
|
||||
self.frozen_datetime.tick(delta=timedelta(days=-1))
|
||||
|
||||
# admin disable the cron
|
||||
self.cron.active = False
|
||||
self.cron.nextcall = fields.Datetime.now() + timedelta(days=10)
|
||||
self.cron.flush_recordset()
|
||||
|
||||
# user triggers the cron to run *tomorrow of yesterday (=today)
|
||||
with self.capture_triggers() as capture:
|
||||
self.cron._trigger(at=fields.Datetime.now() + timedelta(days=1))
|
||||
|
||||
# admin re-enable the cron
|
||||
self.cron.active = True
|
||||
self.cron.flush_recordset()
|
||||
|
||||
# go today, check the cron should run
|
||||
self.frozen_datetime.tick(delta=timedelta(days=1))
|
||||
ready_jobs = self.registry['ir.cron']._get_all_ready_jobs(self.cr)
|
||||
self.assertIn(self.cron.id, [job['id'] for job in ready_jobs],
|
||||
"cron should be ready")
|
||||
self.assertTrue(capture.records, "trigger should has been kept")
|
||||
|
||||
def test_cron_null_interval(self):
|
||||
self.cron.interval_number = 0
|
||||
with self.assertLogs('odoo.addons.base.models.ir_cron', 'ERROR'):
|
||||
self.cron._process_job(get_db_name(), self.env.cr, self.cron.read(load=False)[0])
|
||||
self.cron.invalidate_recordset(['active'])
|
||||
self.assertFalse(self.cron.active)
|
||||
|
|
@ -0,0 +1,157 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestIrDefault(TransactionCase):
|
||||
|
||||
def test_defaults(self):
|
||||
""" check the mechanism of user-defined defaults """
|
||||
companyA = self.env.company
|
||||
companyB = companyA.create({'name': 'CompanyB'})
|
||||
user1 = self.env.user
|
||||
user2 = user1.create({'name': 'u2', 'login': 'u2'})
|
||||
user3 = user1.create({'name': 'u3', 'login': 'u3',
|
||||
'company_id': companyB.id,
|
||||
'company_ids': companyB.ids})
|
||||
|
||||
# create some default value for some model
|
||||
IrDefault1 = self.env['ir.default']
|
||||
IrDefault2 = IrDefault1.with_user(user2)
|
||||
IrDefault3 = IrDefault1.with_user(user3)
|
||||
|
||||
# set a default value for all users
|
||||
IrDefault1.search([('field_id.model', '=', 'res.partner')]).unlink()
|
||||
IrDefault1.set('res.partner', 'ref', 'GLOBAL', user_id=False, company_id=False)
|
||||
self.assertEqual(IrDefault1.get_model_defaults('res.partner'), {'ref': 'GLOBAL'},
|
||||
"Can't retrieve the created default value for all users.")
|
||||
self.assertEqual(IrDefault2.get_model_defaults('res.partner'), {'ref': 'GLOBAL'},
|
||||
"Can't retrieve the created default value for all users.")
|
||||
self.assertEqual(IrDefault3.get_model_defaults('res.partner'), {'ref': 'GLOBAL'},
|
||||
"Can't retrieve the created default value for all users.")
|
||||
|
||||
# set a default value for current company (behavior of 'set default' from debug mode)
|
||||
IrDefault1.set('res.partner', 'ref', 'COMPANY', user_id=False, company_id=True)
|
||||
self.assertEqual(IrDefault1.get_model_defaults('res.partner'), {'ref': 'COMPANY'},
|
||||
"Can't retrieve the created default value for company.")
|
||||
self.assertEqual(IrDefault2.get_model_defaults('res.partner'), {'ref': 'COMPANY'},
|
||||
"Can't retrieve the created default value for company.")
|
||||
self.assertEqual(IrDefault3.get_model_defaults('res.partner'), {'ref': 'GLOBAL'},
|
||||
"Unexpected default value for company.")
|
||||
|
||||
# set a default value for current user (behavior of 'set default' from debug mode)
|
||||
IrDefault2.set('res.partner', 'ref', 'USER', user_id=True, company_id=True)
|
||||
self.assertEqual(IrDefault1.get_model_defaults('res.partner'), {'ref': 'COMPANY'},
|
||||
"Can't retrieve the created default value for user.")
|
||||
self.assertEqual(IrDefault2.get_model_defaults('res.partner'), {'ref': 'USER'},
|
||||
"Unexpected default value for user.")
|
||||
self.assertEqual(IrDefault3.get_model_defaults('res.partner'), {'ref': 'GLOBAL'},
|
||||
"Unexpected default value for company.")
|
||||
|
||||
# check default values on partners
|
||||
default1 = IrDefault1.env['res.partner'].default_get(['ref']).get('ref')
|
||||
self.assertEqual(default1, 'COMPANY', "Wrong default value.")
|
||||
default2 = IrDefault2.env['res.partner'].default_get(['ref']).get('ref')
|
||||
self.assertEqual(default2, 'USER', "Wrong default value.")
|
||||
default3 = IrDefault3.env['res.partner'].default_get(['ref']).get('ref')
|
||||
self.assertEqual(default3, 'GLOBAL', "Wrong default value.")
|
||||
|
||||
def test_conditions(self):
|
||||
""" check user-defined defaults with condition """
|
||||
IrDefault = self.env['ir.default']
|
||||
|
||||
# default without condition
|
||||
IrDefault.search([('field_id.model', '=', 'res.partner')]).unlink()
|
||||
IrDefault.set('res.partner', 'ref', 'X')
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner'),
|
||||
{'ref': 'X'})
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner', condition='name=Agrolait'),
|
||||
{})
|
||||
|
||||
# default with a condition
|
||||
IrDefault.search([('field_id.model', '=', 'res.partner.title')]).unlink()
|
||||
IrDefault.set('res.partner.title', 'shortcut', 'X')
|
||||
IrDefault.set('res.partner.title', 'shortcut', 'Mr', condition='name=Mister')
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner.title'),
|
||||
{'shortcut': 'X'})
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner.title', condition='name=Miss'),
|
||||
{})
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner.title', condition='name=Mister'),
|
||||
{'shortcut': 'Mr'})
|
||||
|
||||
def test_invalid(self):
|
||||
""" check error cases with 'ir.default' """
|
||||
IrDefault = self.env['ir.default']
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.set('unknown_model', 'unknown_field', 42)
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.set('res.partner', 'unknown_field', 42)
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.set('res.partner', 'lang', 'some_LANG')
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.set('res.partner', 'partner_latitude', 'foo')
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.set('res.partner', 'color', 2147483648)
|
||||
|
||||
def test_removal(self):
|
||||
""" check defaults for many2one with their value being removed """
|
||||
IrDefault = self.env['ir.default']
|
||||
IrDefault.search([('field_id.model', '=', 'res.partner')]).unlink()
|
||||
|
||||
# set a record as a default value
|
||||
title = self.env['res.partner.title'].create({'name': 'President'})
|
||||
IrDefault.set('res.partner', 'title', title.id)
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner'), {'title': title.id})
|
||||
|
||||
# delete the record, and check the presence of the default value
|
||||
title.unlink()
|
||||
self.assertEqual(IrDefault.get_model_defaults('res.partner'), {})
|
||||
|
||||
def test_multi_company_defaults(self):
|
||||
"""Check defaults in multi-company environment."""
|
||||
company_a = self.env["res.company"].create({"name": "C_A"})
|
||||
company_b = self.env["res.company"].create({"name": "C_B"})
|
||||
company_a_b = (company_a + company_b)
|
||||
company_b_a = (company_b + company_a)
|
||||
multi_company_user = self.env['res.users'].create({
|
||||
'name': 'u2', 'login': 'u2',
|
||||
'company_id': company_a.id,
|
||||
'company_ids': company_a_b.ids,
|
||||
})
|
||||
IrDefault = self.env["ir.default"].with_user(multi_company_user)
|
||||
IrDefault.with_context(allowed_company_ids=company_a.ids).set(
|
||||
'res.partner', 'ref', 'CADefault', user_id=True, company_id=True)
|
||||
IrDefault.with_context(allowed_company_ids=company_b.ids).set(
|
||||
'res.partner', 'ref', 'CBDefault', user_id=True, company_id=True)
|
||||
self.assertEqual(
|
||||
IrDefault.get_model_defaults('res.partner')['ref'],
|
||||
'CADefault',
|
||||
)
|
||||
self.assertEqual(
|
||||
IrDefault.with_context(allowed_company_ids=company_a.ids).get_model_defaults('res.partner')['ref'],
|
||||
'CADefault',
|
||||
)
|
||||
self.assertEqual(
|
||||
IrDefault.with_context(allowed_company_ids=company_b.ids).get_model_defaults('res.partner')['ref'],
|
||||
'CBDefault',
|
||||
)
|
||||
self.assertEqual(
|
||||
IrDefault.with_context(allowed_company_ids=company_a_b.ids).get_model_defaults('res.partner')['ref'],
|
||||
'CADefault',
|
||||
)
|
||||
self.assertEqual(
|
||||
IrDefault.with_context(allowed_company_ids=company_b_a.ids).get_model_defaults('res.partner')['ref'],
|
||||
'CBDefault',
|
||||
)
|
||||
|
||||
def test_json_format_invalid(self):
|
||||
""" check the _check_json_format constraint """
|
||||
IrDefault = self.env['ir.default']
|
||||
field_id = self.env['ir.model.fields'].search([('model', '=', 'res.partner'), ('name', '=', 'ref')])
|
||||
with self.assertRaises(ValidationError):
|
||||
IrDefault.create({
|
||||
'field_id': field_id.id,
|
||||
'json_value': '{"name":"John", }',
|
||||
})
|
||||
|
|
@ -0,0 +1,330 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import ast
|
||||
import logging
|
||||
|
||||
from odoo import exceptions
|
||||
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
|
||||
from odoo.tests.common import TransactionCase, ADMIN_USER_ID, tagged
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
def noid(seq):
|
||||
""" Removes values that are not relevant for the test comparisons """
|
||||
for d in seq:
|
||||
d.pop('id', None)
|
||||
d.pop('action_id', None)
|
||||
return seq
|
||||
|
||||
|
||||
class FiltersCase(TransactionCaseWithUserDemo):
|
||||
def setUp(self):
|
||||
super(FiltersCase, self).setUp()
|
||||
self.USER_NG = self.env['res.users'].name_search('demo')[0]
|
||||
self.USER_ID = self.USER_NG[0]
|
||||
|
||||
def build(self, model, *args):
|
||||
Model = self.env[model].with_user(ADMIN_USER_ID)
|
||||
for vals in args:
|
||||
Model.create(vals)
|
||||
|
||||
|
||||
class TestGetFilters(FiltersCase):
|
||||
|
||||
def test_own_filters(self):
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='b', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='c', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='d', user_id=self.USER_ID, model_id='ir.filters'))
|
||||
|
||||
filters = self.env['ir.filters'].with_user(self.USER_ID).get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', is_default=False, user_id=self.USER_NG, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', is_default=False, user_id=self.USER_NG, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', is_default=False, user_id=self.USER_NG, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='d', is_default=False, user_id=self.USER_NG, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
def test_global_filters(self):
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', user_id=False, model_id='ir.filters'),
|
||||
dict(name='c', user_id=False, model_id='ir.filters'),
|
||||
dict(name='d', user_id=False, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
filters = self.env['ir.filters'].with_user(self.USER_ID).get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='d', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
def test_no_third_party_filters(self):
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', user_id=ADMIN_USER_ID, model_id='ir.filters'),
|
||||
dict(name='c', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='d', user_id=ADMIN_USER_ID, model_id='ir.filters') )
|
||||
|
||||
filters = self.env['ir.filters'].with_user(self.USER_ID).get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', is_default=False, user_id=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', is_default=False, user_id=self.USER_NG, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
|
||||
class TestOwnDefaults(FiltersCase):
|
||||
|
||||
def test_new_no_filter(self):
|
||||
"""
|
||||
When creating a @is_default filter with no existing filter, that new
|
||||
filter gets the default flag
|
||||
"""
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
Filters.create_or_replace({
|
||||
'name': 'a',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': self.USER_ID,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=self.USER_NG, is_default=True,
|
||||
domain='[]', context='{}', sort='[]')
|
||||
])
|
||||
|
||||
def test_new_filter_not_default(self):
|
||||
"""
|
||||
When creating a @is_default filter with existing non-default filters,
|
||||
the new filter gets the flag
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='b', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
Filters.create_or_replace({
|
||||
'name': 'c',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': self.USER_ID,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=self.USER_NG, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', user_id=self.USER_NG, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', user_id=self.USER_NG, is_default=True, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
def test_new_filter_existing_default(self):
|
||||
"""
|
||||
When creating a @is_default filter where an existing filter is already
|
||||
@is_default, the flag should be *moved* from the old to the new filter
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='b', is_default=True, user_id=self.USER_ID, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
Filters.create_or_replace({
|
||||
'name': 'c',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': self.USER_ID,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=self.USER_NG, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', user_id=self.USER_NG, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', user_id=self.USER_NG, is_default=True, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
def test_update_filter_set_default(self):
|
||||
"""
|
||||
When updating an existing filter to @is_default, if an other filter
|
||||
already has the flag the flag should be moved
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=self.USER_ID, model_id='ir.filters'),
|
||||
dict(name='b', is_default=True, user_id=self.USER_ID, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
Filters.create_or_replace({
|
||||
'name': 'a',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': self.USER_ID,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=self.USER_NG, is_default=True, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', user_id=self.USER_NG, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
|
||||
class TestGlobalDefaults(FiltersCase):
|
||||
|
||||
def test_new_filter_not_default(self):
|
||||
"""
|
||||
When creating a @is_default filter with existing non-default filters,
|
||||
the new filter gets the flag
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', user_id=False, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
Filters.create_or_replace({
|
||||
'name': 'c',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': False,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='c', user_id=False, is_default=True, domain='[]', context='{}', sort='[]'),
|
||||
])
|
||||
|
||||
def test_new_filter_existing_default(self):
|
||||
"""
|
||||
When creating a @is_default filter where an existing filter is already
|
||||
@is_default, an error should be generated
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', is_default=True, user_id=False, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
Filters.create_or_replace({
|
||||
'name': 'c',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': False,
|
||||
'is_default': True,
|
||||
})
|
||||
|
||||
def test_update_filter_set_default(self):
|
||||
"""
|
||||
When updating an existing filter to @is_default, if an other filter
|
||||
already has the flag an error should be generated
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', is_default=True, user_id=False, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
Filters.create_or_replace({
|
||||
'name': 'a',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': False,
|
||||
'is_default': True,
|
||||
})
|
||||
|
||||
def test_update_default_filter(self):
|
||||
"""
|
||||
Replacing the current default global filter should not generate any error
|
||||
"""
|
||||
self.build(
|
||||
'ir.filters',
|
||||
dict(name='a', user_id=False, model_id='ir.filters'),
|
||||
dict(name='b', is_default=True, user_id=False, model_id='ir.filters'),
|
||||
)
|
||||
|
||||
Filters = self.env['ir.filters'].with_user(self.USER_ID)
|
||||
context_value = "{'some_key': True}"
|
||||
Filters.create_or_replace({
|
||||
'name': 'b',
|
||||
'model_id': 'ir.filters',
|
||||
'user_id': False,
|
||||
'context': context_value,
|
||||
'is_default': True,
|
||||
})
|
||||
filters = Filters.get_filters('ir.filters')
|
||||
|
||||
self.assertItemsEqual(noid(filters), [
|
||||
dict(name='a', user_id=False, is_default=False, domain='[]', context='{}', sort='[]'),
|
||||
dict(name='b', user_id=False, is_default=True, domain='[]', context=context_value, sort='[]'),
|
||||
])
|
||||
|
||||
|
||||
class TestReadGroup(TransactionCase):
|
||||
"""Test function read_group with groupby on a many2one field to a model
|
||||
(in test, "user_id" to "res.users") which is ordered by an inherited not stored field (in
|
||||
test, "name" inherited from "res.partners").
|
||||
"""
|
||||
def test_read_group_1(self):
|
||||
Users = self.env['res.users']
|
||||
self.assertEqual(Users._order, "name, login", "Model res.users must be ordered by name, login")
|
||||
self.assertFalse(Users._fields['name'].store, "Field name is not stored in res.users")
|
||||
|
||||
Filters = self.env['ir.filters']
|
||||
filter_a = Filters.create(dict(name="Filter_A", model_id="ir.filters"))
|
||||
filter_b = Filters.create(dict(name="Filter_B", model_id="ir.filters"))
|
||||
filter_b.write(dict(user_id=False))
|
||||
|
||||
res = Filters.read_group([], ['name', 'user_id'], ['user_id'])
|
||||
self.assertTrue(any(val['user_id'] == False for val in res), "At least one group must contain val['user_id'] == False.")
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'migration')
|
||||
class TestAllFilters(TransactionCase):
|
||||
def check_filter(self, name, model, domain, fields, groupby, order, context):
|
||||
if groupby:
|
||||
try:
|
||||
self.env[model].with_context(context).read_group(domain, fields, groupby, orderby=order)
|
||||
except ValueError as e:
|
||||
raise self.failureException("Test filter '%s' failed: %s" % (name, e)) from None
|
||||
except KeyError as e:
|
||||
raise self.failureException("Test filter '%s' failed: field or aggregate %s does not exist"% (name, e)) from None
|
||||
elif domain:
|
||||
try:
|
||||
self.env[model].with_context(context).search(domain, order=order)
|
||||
except ValueError as e:
|
||||
raise self.failureException("Test filter '%s' failed: %s" % (name, e)) from None
|
||||
else:
|
||||
_logger.info("No domain or group by in filter %s with model %s and context %s", name, model, context)
|
||||
|
||||
def test_filters(self):
|
||||
for filter_ in self.env['ir.filters'].search([]):
|
||||
with self.subTest(name=filter_.name):
|
||||
context = ast.literal_eval(filter_.context)
|
||||
groupby = context.get('group_by')
|
||||
self.check_filter(
|
||||
name=filter_.name,
|
||||
model=filter_.model_id,
|
||||
domain=filter_._get_eval_domain(),
|
||||
fields=[field.split(':')[0] for field in (groupby or [])],
|
||||
groupby=groupby,
|
||||
order=','.join(ast.literal_eval(filter_.sort)),
|
||||
context=context,
|
||||
)
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import logging
|
||||
import re
|
||||
import time
|
||||
|
||||
from odoo.tests import tagged
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestIrHttpPerformances(TransactionCase):
|
||||
|
||||
def test_routing_map_performance(self):
|
||||
self.env['ir.http']._clear_routing_map()
|
||||
# if the routing map was already generated it is possible that some compiled regex are in cache.
|
||||
# we want to mesure the cold state, when the worker just spawned, we need to empty the re cache
|
||||
re._cache.clear()
|
||||
|
||||
self.env['ir.http']._clear_routing_map()
|
||||
start = time.time()
|
||||
self.env['ir.http'].routing_map()
|
||||
duration = time.time() - start
|
||||
_logger.info('Routing map web generated in %.3fs', duration)
|
||||
|
||||
# generate the routing map of another website, to check if we can benefit from anything computed by the previous routing map
|
||||
start = time.time()
|
||||
self.env['ir.http'].routing_map(key=1)
|
||||
duration = time.time() - start
|
||||
_logger.info('Routing map website1 generated in %.3fs', duration)
|
||||
|
|
@ -0,0 +1,533 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo import tools
|
||||
from odoo.addons.base.tests import test_mail_examples
|
||||
from odoo.addons.base.tests.common import MockSmtplibCase
|
||||
from odoo.tests import tagged
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import mute_logger
|
||||
from odoo.tools import config
|
||||
|
||||
|
||||
@tagged('mail_server')
|
||||
class TestIrMailServer(TransactionCase, MockSmtplibCase):
|
||||
|
||||
def setUp(self):
|
||||
self._init_mail_config()
|
||||
self._init_mail_servers()
|
||||
|
||||
def _build_email(self, mail_from, return_path=None):
|
||||
return self.env['ir.mail_server'].build_email(
|
||||
email_from=mail_from,
|
||||
email_to='dest@example-é.com',
|
||||
subject='subject', body='body',
|
||||
headers={'Return-Path': return_path} if return_path else None
|
||||
)
|
||||
|
||||
def test_match_from_filter(self):
|
||||
"""Test the from_filter field on the "ir.mail_server"."""
|
||||
match_from_filter = self.env['ir.mail_server']._match_from_filter
|
||||
|
||||
# Should match
|
||||
tests = [
|
||||
('admin@mail.example.com', 'mail.example.com'),
|
||||
('admin@mail.example.com', 'mail.EXAMPLE.com'),
|
||||
('admin@mail.example.com', 'admin@mail.example.com'),
|
||||
('admin@mail.example.com', False),
|
||||
('"fake@test.com" <admin@mail.example.com>', 'mail.example.com'),
|
||||
('"fake@test.com" <ADMIN@mail.example.com>', 'mail.example.com'),
|
||||
]
|
||||
for email, from_filter in tests:
|
||||
self.assertTrue(match_from_filter(email, from_filter))
|
||||
|
||||
# Should not match
|
||||
tests = [
|
||||
('admin@mail.example.com', 'test@mail.example.com'),
|
||||
('admin@mail.example.com', 'test.com'),
|
||||
('admin@mail.example.com', 'mail.éxample.com'),
|
||||
('admin@mmail.example.com', 'mail.example.com'),
|
||||
('admin@mail.example.com', 'mmail.example.com'),
|
||||
('"admin@mail.example.com" <fake@test.com>', 'mail.example.com'),
|
||||
]
|
||||
for email, from_filter in tests:
|
||||
self.assertFalse(match_from_filter(email, from_filter))
|
||||
|
||||
def test_mail_body(self):
|
||||
bodies = [
|
||||
'content',
|
||||
'<p>content</p>',
|
||||
'<head><meta content="text/html; charset=utf-8" http-equiv="Content-Type"></head><body><p>content</p></body>',
|
||||
test_mail_examples.MISC_HTML_SOURCE,
|
||||
test_mail_examples.QUOTE_THUNDERBIRD_HTML,
|
||||
]
|
||||
expected_list = [
|
||||
'content',
|
||||
'content',
|
||||
'content',
|
||||
"test1\n*test2*\ntest3\ntest4\ntest5\ntest6 test7\ntest8 test9\ntest10\ntest11\ntest12\ngoogle [1]\ntest link [2]\n\n\n[1] http://google.com\n[2] javascript:alert('malicious code')",
|
||||
'On 01/05/2016 10:24 AM, Raoul\nPoilvache wrote:\n\n* Test reply. The suite. *\n\n--\nRaoul Poilvache\n\nTop cool !!!\n\n--\nRaoul Poilvache',
|
||||
]
|
||||
for body, expected in zip(bodies, expected_list):
|
||||
message = self.env['ir.mail_server'].build_email(
|
||||
'john.doe@from.example.com',
|
||||
'destinataire@to.example.com',
|
||||
body=body,
|
||||
subject='Subject',
|
||||
subtype='html',
|
||||
)
|
||||
body_alternative = False
|
||||
for part in message.walk():
|
||||
if part.get_content_maintype() == 'multipart':
|
||||
continue # skip container
|
||||
if part.get_content_type() == 'text/plain':
|
||||
if not part.get_payload():
|
||||
continue
|
||||
body_alternative = tools.ustr(part.get_content())
|
||||
# remove ending new lines as it just adds noise
|
||||
body_alternative = body_alternative.strip('\n')
|
||||
self.assertEqual(body_alternative, expected)
|
||||
|
||||
@mute_logger('odoo.models.unlink')
|
||||
def test_mail_server_priorities(self):
|
||||
"""Test if we choose the right mail server to send an email.
|
||||
|
||||
Priorities are
|
||||
1. Forced mail server (e.g.: in mass mailing)
|
||||
- If the "from_filter" of the mail server match the notification email
|
||||
use the notifications email in the "From header"
|
||||
- Otherwise spoof the "From" (because we force the mail server but we don't
|
||||
know which email use to send it)
|
||||
2. A mail server for which the "from_filter" match the "From" header
|
||||
3. A mail server for which the "from_filter" match the domain of the "From" header
|
||||
4. The mail server used for notifications
|
||||
5. A mail server without "from_filter" (and so spoof the "From" header because we
|
||||
do not know for which email address it can be used)
|
||||
"""
|
||||
# sanity checks
|
||||
self.assertTrue(self.env['ir.mail_server']._get_default_from_address(), 'Notifications email must be set for testing')
|
||||
self.assertTrue(self.env['ir.mail_server']._get_default_bounce_address(), 'Bounce email must be set for testing')
|
||||
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='specific_user@test.com')
|
||||
self.assertEqual(mail_server, self.server_user)
|
||||
self.assertEqual(mail_from, 'specific_user@test.com')
|
||||
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='"Name name@strange.name" <specific_user@test.com>')
|
||||
self.assertEqual(mail_server, self.server_user, 'Must extract email from full name')
|
||||
self.assertEqual(mail_from, '"Name name@strange.name" <specific_user@test.com>', 'Must keep the given mail from')
|
||||
|
||||
# Should not be case sensitive
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='specific_user@test.com')
|
||||
self.assertEqual(mail_server, self.server_user, 'Mail from is case insensitive')
|
||||
self.assertEqual(mail_from, 'specific_user@test.com', 'Should not change the mail from')
|
||||
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='unknown_email@test.com')
|
||||
self.assertEqual(mail_server, self.server_domain)
|
||||
self.assertEqual(mail_from, 'unknown_email@test.com')
|
||||
|
||||
# Cover a different condition that the "email case insensitive" test
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='unknown_email@TEST.COM')
|
||||
self.assertEqual(mail_server, self.server_domain, 'Domain is case insensitive')
|
||||
self.assertEqual(mail_from, 'unknown_email@TEST.COM', 'Domain is case insensitive')
|
||||
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='"Test" <test@unknown_domain.com>')
|
||||
self.assertEqual(mail_server, self.server_notification, 'Should take the notification email')
|
||||
self.assertEqual(mail_from, 'notifications@test.com')
|
||||
|
||||
# test if notification server is selected if email_from = False
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from=False)
|
||||
self.assertEqual(mail_server, self.server_notification,
|
||||
'Should select the notification email server if passed FROM address was False')
|
||||
self.assertEqual(mail_from, 'notifications@test.com')
|
||||
|
||||
# remove the notifications email to simulate a mis-configured Odoo database
|
||||
# so we do not have the choice, we have to spoof the FROM
|
||||
# (otherwise we can not send the email)
|
||||
self.env['ir.config_parameter'].sudo().set_param('mail.catchall.domain', False)
|
||||
with mute_logger('odoo.addons.base.models.ir_mail_server'):
|
||||
mail_server, mail_from = self.env['ir.mail_server']._find_mail_server(email_from='test@unknown_domain.com')
|
||||
self.assertEqual(mail_server.from_filter, False, 'No notifications email set, must be forced to spoof the FROM')
|
||||
self.assertEqual(mail_from, 'test@unknown_domain.com')
|
||||
|
||||
@mute_logger('odoo.models.unlink', 'odoo.addons.base.models.ir_mail_server')
|
||||
def test_mail_server_send_email(self):
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
default_bounce_adress = self.env['ir.mail_server']._get_default_bounce_address()
|
||||
|
||||
# A mail server is configured for the email
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='specific_user@test.com',
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='specific_user@test.com',
|
||||
)
|
||||
|
||||
# No mail server are configured for the email address,
|
||||
# so it will use the notifications email instead and encapsulate the old email
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='"Name" <test@unknown_domain.com>')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='notifications@test.com',
|
||||
message_from='"Name" <notifications@test.com>',
|
||||
from_filter='notifications@test.com',
|
||||
)
|
||||
|
||||
# Same situation, but the original email has no name part
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='test@unknown_domain.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='notifications@test.com',
|
||||
message_from='"test" <notifications@test.com>',
|
||||
from_filter='notifications@test.com',
|
||||
)
|
||||
|
||||
# A mail server is configured for the entire domain name, so we can use the bounce
|
||||
# email address because the mail server supports it
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='unknown_name@test.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='unknown_name@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# remove the notification server
|
||||
# so <notifications@test.com> will use the <test.com> mail server
|
||||
self.server_notification.unlink()
|
||||
|
||||
# The mail server configured for the notifications email has been removed
|
||||
# but we can still use the mail server configured for test.com
|
||||
# and so we will be able to use the bounce address
|
||||
# because we use the mail server for "test.com"
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='"Name" <test@unknown_domain.com>')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='"Name" <notifications@test.com>',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# Test that the mail from / recipient envelop are encoded using IDNA
|
||||
self.server_domain.from_filter = 'ééééééé.com'
|
||||
self.env['ir.config_parameter'].sudo().set_param('mail.catchall.domain', 'ééééééé.com')
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='test@ééééééé.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assertEqual(len(self.emails), 1)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='bounce.test@xn--9caaaaaaa.com',
|
||||
smtp_to_list=['dest@xn--example--i1a.com'],
|
||||
message_from='test@=?utf-8?b?w6nDqcOpw6nDqcOpw6k=?=.com',
|
||||
from_filter='ééééééé.com',
|
||||
)
|
||||
|
||||
# Test the case when the "mail.default.from" contains a full email address and not just the local part
|
||||
# the domain of this default email address can be different than the catchall domain
|
||||
self.env['ir.config_parameter'].sudo().set_param('mail.default.from', 'test@custom_domain.com')
|
||||
self.server_default.from_filter = 'custom_domain.com'
|
||||
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='"Name" <test@unknown_domain.com>')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='test@custom_domain.com',
|
||||
smtp_to_list=['dest@xn--example--i1a.com'],
|
||||
message_from='"Name" <test@custom_domain.com>',
|
||||
from_filter='custom_domain.com',
|
||||
)
|
||||
|
||||
# Test when forcing the mail server and when smtp_encryption is "starttls"
|
||||
self.server_domain.smtp_encryption = "starttls"
|
||||
self.server_domain.from_filter = "test.com"
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message, mail_server_id=self.server_domain.id)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='specific_user@test.com',
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# miss-configured database, no mail servers from filter
|
||||
# match the user / notification email
|
||||
self.env['ir.mail_server'].search([]).from_filter = "random.domain"
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='test@custom_domain.com',
|
||||
message_from='"specific_user" <test@custom_domain.com>',
|
||||
from_filter='random.domain',
|
||||
)
|
||||
|
||||
@mute_logger('odoo.models.unlink')
|
||||
def test_mail_server_send_email_smtp_session(self):
|
||||
"""Test all the cases when we provide the SMTP session.
|
||||
|
||||
The results must be the same as passing directly the parameter to "send_email".
|
||||
"""
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
default_bounce_adress = self.env['ir.mail_server']._get_default_bounce_address()
|
||||
|
||||
# A mail server is configured for the email
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='specific_user@test.com')
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='specific_user@test.com',
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='specific_user@test.com',
|
||||
)
|
||||
|
||||
# No mail server are configured for the email address,
|
||||
# so it will use the notifications email instead and encapsulate the old email
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='"Name" <test@unknown_domain.com>')
|
||||
message = self._build_email(mail_from='"Name" <test@unknown_domain.com>')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='notifications@test.com',
|
||||
message_from='"Name" <notifications@test.com>',
|
||||
from_filter='notifications@test.com',
|
||||
)
|
||||
|
||||
# A mail server is configured for the entire domain name, so we can use the bounce
|
||||
# email address because the mail server supports it
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='unknown_name@test.com')
|
||||
message = self._build_email(mail_from='unknown_name@test.com')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='unknown_name@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# remove the notification server
|
||||
# so <notifications@test.com> will use the <test.com> mail server
|
||||
self.server_notification.unlink()
|
||||
|
||||
# The mail server configured for the notifications email has been removed
|
||||
# but we can still use the mail server configured for test.com
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='"Name" <test@unknown_domain.com>')
|
||||
message = self._build_email(mail_from='"Name" <test@unknown_domain.com>')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='"Name" <notifications@test.com>',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
@mute_logger('odoo.models.unlink')
|
||||
@patch.dict(config.options, {"from_filter": "test.com", "smtp_server": "example.com"})
|
||||
def test_mail_server_binary_arguments_domain(self):
|
||||
"""Test the configuration provided in the odoo-bin arguments.
|
||||
|
||||
This config is used when no mail server exists.
|
||||
"""
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
default_bounce_adress = self.env['ir.mail_server']._get_default_bounce_address()
|
||||
|
||||
# Remove all mail server so we will use the odoo-bin arguments
|
||||
self.env['ir.mail_server'].search([]).unlink()
|
||||
self.assertFalse(self.env['ir.mail_server'].search([]))
|
||||
|
||||
# Use an email in the domain of the "from_filter"
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# Test if the domain name is normalized before comparison
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# Use an email outside of the domain of the "from_filter"
|
||||
# So we will use the notifications email in the headers and the bounce address
|
||||
# in the envelop because the "from_filter" allows to use the entire domain
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='test@unknown_domain.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='"test" <notifications@test.com>',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
@mute_logger('odoo.models.unlink')
|
||||
@patch.dict(config.options, {"from_filter": "test.com", "smtp_server": "example.com"})
|
||||
def test_mail_server_binary_arguments_domain_smtp_session(self):
|
||||
"""Test the configuration provided in the odoo-bin arguments.
|
||||
|
||||
This config is used when no mail server exists.
|
||||
Use a pre-configured SMTP session.
|
||||
"""
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
default_bounce_adress = self.env['ir.mail_server']._get_default_bounce_address()
|
||||
|
||||
# Remove all mail server so we will use the odoo-bin arguments
|
||||
self.env['ir.mail_server'].search([]).unlink()
|
||||
self.assertFalse(self.env['ir.mail_server'].search([]))
|
||||
|
||||
# Use an email in the domain of the "from_filter"
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='specific_user@test.com')
|
||||
message = self._build_email(mail_from='specific_user@test.com')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='specific_user@test.com',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
# Use an email outside of the domain of the "from_filter"
|
||||
# So we will use the notifications email in the headers and the bounce address
|
||||
# in the envelop because the "from_filter" allows to use the entire domain
|
||||
with self.mock_smtplib_connection():
|
||||
smtp_session = IrMailServer.connect(smtp_from='test@unknown_domain.com')
|
||||
message = self._build_email(mail_from='test@unknown_domain.com')
|
||||
IrMailServer.send_email(message, smtp_session=smtp_session)
|
||||
|
||||
self.connect_mocked.assert_called_once()
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from=default_bounce_adress,
|
||||
message_from='"test" <notifications@test.com>',
|
||||
from_filter='test.com',
|
||||
)
|
||||
|
||||
def test_mail_server_get_email_addresses(self):
|
||||
"""Test the email used to test the mail server connection."""
|
||||
self.server_notification.from_filter = 'example_2.com'
|
||||
|
||||
self.env['ir.config_parameter'].set_param('mail.default.from', 'notifications@example.com')
|
||||
email_from = self.server_notification._get_test_email_addresses()[0]
|
||||
self.assertEqual(email_from, 'noreply@example_2.com')
|
||||
|
||||
self.env['ir.config_parameter'].set_param('mail.default.from', 'notifications')
|
||||
email_from = self.server_notification._get_test_email_addresses()[0]
|
||||
self.assertEqual(email_from, 'notifications@example_2.com')
|
||||
|
||||
self.server_notification.from_filter = 'full_email@example_2.com'
|
||||
|
||||
self.env['ir.config_parameter'].set_param('mail.default.from', 'notifications')
|
||||
email_from = self.server_notification._get_test_email_addresses()[0]
|
||||
self.assertEqual(email_from, 'full_email@example_2.com')
|
||||
|
||||
self.env['ir.config_parameter'].set_param('mail.default.from', 'notifications@example.com')
|
||||
email_from = self.server_notification._get_test_email_addresses()[0]
|
||||
self.assertEqual(email_from, 'full_email@example_2.com')
|
||||
|
||||
self.env['ir.config_parameter'].set_param('mail.default.from', 'notifications@example.com')
|
||||
self.server_notification.from_filter = 'example.com'
|
||||
email_from = self.server_notification._get_test_email_addresses()[0]
|
||||
self.assertEqual(email_from, 'notifications@example.com')
|
||||
|
||||
@mute_logger('odoo.models.unlink')
|
||||
@patch.dict(config.options, {'from_filter': 'test.com', 'smtp_server': 'example.com'})
|
||||
def test_mail_server_mail_default_from_filter(self):
|
||||
"""Test that the config parameter "mail.default.from_filter" overwrite the odoo-bin
|
||||
argument "--from-filter"
|
||||
"""
|
||||
self.env['ir.config_parameter'].sudo().set_param('mail.default.from_filter', 'example.com')
|
||||
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
|
||||
# Remove all mail server so we will use the odoo-bin arguments
|
||||
IrMailServer.search([]).unlink()
|
||||
self.assertFalse(IrMailServer.search([]))
|
||||
|
||||
# Use an email in the domain of the config parameter "mail.default.from_filter"
|
||||
with self.mock_smtplib_connection():
|
||||
message = self._build_email(mail_from='specific_user@example.com')
|
||||
IrMailServer.send_email(message)
|
||||
|
||||
self.assert_email_sent_smtp(
|
||||
smtp_from='specific_user@example.com',
|
||||
message_from='specific_user@example.com',
|
||||
from_filter='example.com',
|
||||
)
|
||||
|
||||
def test_eml_attachment_encoding(self):
|
||||
"""Test that message/rfc822 attachments are encoded using 7bit, 8bit, or binary encoding."""
|
||||
IrMailServer = self.env['ir.mail_server']
|
||||
|
||||
# Create a sample .eml file content
|
||||
eml_content = b"From: user@example.com\nTo: user2@example.com\nSubject: Test Email\n\nThis is a test email."
|
||||
attachments = [('test.eml', eml_content, 'message/rfc822')]
|
||||
|
||||
# Build the email with the .eml attachment
|
||||
message = IrMailServer.build_email(
|
||||
email_from='john.doe@from.example.com',
|
||||
email_to='destinataire@to.example.com',
|
||||
subject='Subject with .eml attachment',
|
||||
body='This email contains a .eml attachment.',
|
||||
attachments=attachments,
|
||||
)
|
||||
|
||||
# Verify that the attachment is correctly encoded
|
||||
acceptable_encodings = {'7bit', '8bit', 'binary'}
|
||||
for part in message.iter_attachments():
|
||||
if part.get_content_type() == 'message/rfc822':
|
||||
self.assertIn(
|
||||
part.get('Content-Transfer-Encoding'),
|
||||
acceptable_encodings,
|
||||
"The message/rfc822 attachment should be encoded using 7bit, 8bit, or binary encoding.",
|
||||
)
|
||||
|
|
@ -0,0 +1,445 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from psycopg2 import IntegrityError, Error as Psycopg2Error
|
||||
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tests.common import Form, TransactionCase, HttpCase, tagged
|
||||
from odoo.tools import mute_logger
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class TestXMLID(TransactionCase):
|
||||
def get_data(self, xml_id):
|
||||
""" Return the 'ir.model.data' record corresponding to ``xml_id``. """
|
||||
module, suffix = xml_id.split('.', 1)
|
||||
domain = [('module', '=', module), ('name', '=', suffix)]
|
||||
return self.env['ir.model.data'].search(domain)
|
||||
|
||||
def test_create(self):
|
||||
model = self.env['res.partner.category']
|
||||
xml_id = 'test_convert.category_foo'
|
||||
|
||||
# create category (flag 'noupdate' should be False by default)
|
||||
data = dict(xml_id=xml_id, values={'name': 'Foo'})
|
||||
category = model._load_records([data])
|
||||
self.assertEqual(category, self.env.ref(xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, False)
|
||||
|
||||
# update category
|
||||
data = dict(xml_id=xml_id, values={'name': 'Bar'})
|
||||
category1 = model._load_records([data], update=True)
|
||||
self.assertEqual(category, category1)
|
||||
self.assertEqual(category.name, 'Bar')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, False)
|
||||
|
||||
# update category
|
||||
data = dict(xml_id=xml_id, values={'name': 'Baz'}, noupdate=True)
|
||||
category2 = model._load_records([data], update=True)
|
||||
self.assertEqual(category, category2)
|
||||
self.assertEqual(category.name, 'Baz')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, False)
|
||||
|
||||
def test_create_noupdate(self):
|
||||
model = self.env['res.partner.category']
|
||||
xml_id = 'test_convert.category_foo'
|
||||
|
||||
# create category
|
||||
data = dict(xml_id=xml_id, values={'name': 'Foo'}, noupdate=True)
|
||||
category = model._load_records([data])
|
||||
self.assertEqual(category, self.env.ref(xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, True)
|
||||
|
||||
# update category
|
||||
data = dict(xml_id=xml_id, values={'name': 'Bar'}, noupdate=False)
|
||||
category1 = model._load_records([data], update=True)
|
||||
self.assertEqual(category, category1)
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, True)
|
||||
|
||||
# update category
|
||||
data = dict(xml_id=xml_id, values={'name': 'Baz'}, noupdate=True)
|
||||
category2 = model._load_records([data], update=True)
|
||||
self.assertEqual(category, category2)
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
self.assertEqual(self.get_data(xml_id).noupdate, True)
|
||||
|
||||
def test_create_noupdate_multi(self):
|
||||
model = self.env['res.partner.category']
|
||||
data_list = [
|
||||
dict(xml_id='test_convert.category_foo', values={'name': 'Foo'}, noupdate=True),
|
||||
dict(xml_id='test_convert.category_bar', values={'name': 'Bar'}, noupdate=True),
|
||||
]
|
||||
|
||||
# create category
|
||||
categories = model._load_records(data_list)
|
||||
foo = self.env.ref('test_convert.category_foo')
|
||||
bar = self.env.ref('test_convert.category_bar')
|
||||
self.assertEqual(categories, foo + bar)
|
||||
self.assertEqual(foo.name, 'Foo')
|
||||
self.assertEqual(bar.name, 'Bar')
|
||||
|
||||
# check data
|
||||
self.assertEqual(self.get_data('test_convert.category_foo').noupdate, True)
|
||||
self.assertEqual(self.get_data('test_convert.category_bar').noupdate, True)
|
||||
|
||||
def test_create_order(self):
|
||||
model = self.env['res.partner.category']
|
||||
data_list = [
|
||||
dict(xml_id='test_convert.category_foo', values={'name': 'Foo'}),
|
||||
dict(xml_id='test_convert.category_bar', values={'name': 'Bar'}, noupdate=True),
|
||||
dict(xml_id='test_convert.category_baz', values={'name': 'Baz'}),
|
||||
]
|
||||
|
||||
# create categories
|
||||
foo = model._load_records([data_list[0]])
|
||||
bar = model._load_records([data_list[1]])
|
||||
baz = model._load_records([data_list[2]])
|
||||
self.assertEqual(foo.name, 'Foo')
|
||||
self.assertEqual(bar.name, 'Bar')
|
||||
self.assertEqual(baz.name, 'Baz')
|
||||
|
||||
# update them, and check the order of result
|
||||
for data in data_list:
|
||||
data['values']['name'] += 'X'
|
||||
cats = model._load_records(data_list, update=True)
|
||||
self.assertEqual(list(cats), [foo, bar, baz])
|
||||
self.assertEqual(foo.name, 'FooX')
|
||||
self.assertEqual(bar.name, 'Bar')
|
||||
self.assertEqual(baz.name, 'BazX')
|
||||
|
||||
def test_create_inherits(self):
|
||||
model = self.env['res.users']
|
||||
xml_id = 'test_convert.user_foo'
|
||||
par_xml_id = xml_id + '_res_partner'
|
||||
|
||||
# create user
|
||||
user = model._load_records([dict(xml_id=xml_id, values={'name': 'Foo', 'login': 'foo'})])
|
||||
self.assertEqual(user, self.env.ref(xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(user.partner_id, self.env.ref(par_xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(user.name, 'Foo')
|
||||
self.assertEqual(user.login, 'foo')
|
||||
|
||||
def test_recreate(self):
|
||||
model = self.env['res.partner.category']
|
||||
xml_id = 'test_convert.category_foo'
|
||||
data = dict(xml_id=xml_id, values={'name': 'Foo'})
|
||||
|
||||
# create category
|
||||
category = model._load_records([data])
|
||||
self.assertEqual(category, self.env.ref(xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
|
||||
# suppress category
|
||||
category.unlink()
|
||||
self.assertFalse(self.env.ref(xml_id, raise_if_not_found=False))
|
||||
|
||||
# update category, this should recreate it
|
||||
category = model._load_records([data], update=True)
|
||||
self.assertEqual(category, self.env.ref(xml_id, raise_if_not_found=False))
|
||||
self.assertEqual(category.name, 'Foo')
|
||||
|
||||
def test_create_xmlids(self):
|
||||
# create users and assign them xml ids
|
||||
foo, bar = self.env['res.users']._load_records([{
|
||||
'xml_id': 'test_convert.foo',
|
||||
'values': {'name': 'Foo', 'login': 'foo'},
|
||||
'noupdate': True,
|
||||
}, {
|
||||
'xml_id': 'test_convert.bar',
|
||||
'values': {'name': 'Bar', 'login': 'bar'},
|
||||
'noupdate': True,
|
||||
}])
|
||||
|
||||
self.assertEqual(foo, self.env.ref('test_convert.foo', raise_if_not_found=False))
|
||||
self.assertEqual(bar, self.env.ref('test_convert.bar', raise_if_not_found=False))
|
||||
|
||||
self.assertEqual(foo.partner_id, self.env.ref('test_convert.foo_res_partner', raise_if_not_found=False))
|
||||
self.assertEqual(bar.partner_id, self.env.ref('test_convert.bar_res_partner', raise_if_not_found=False))
|
||||
|
||||
self.assertEqual(self.get_data('test_convert.foo').noupdate, True)
|
||||
self.assertEqual(self.get_data('test_convert.bar').noupdate, True)
|
||||
|
||||
@mute_logger('odoo.sql_db', 'odoo.addons.base.models.ir_model')
|
||||
def test_create_external_id_with_space(self):
|
||||
model = self.env['res.partner.category']
|
||||
data_list = [{
|
||||
'xml_id': 'test_convert.category_with space',
|
||||
'values': {'name': 'Bar'},
|
||||
}]
|
||||
with self.assertRaisesRegex(IntegrityError, 'ir_model_data_name_nospaces'):
|
||||
model._load_records(data_list)
|
||||
|
||||
|
||||
class TestIrModel(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
# The test mode is necessary in this case. After each test, we call
|
||||
# registry.reset_changes(), which opens a new cursor to retrieve custom
|
||||
# models and fields. A regular cursor would correspond to the state of
|
||||
# the database before setUpClass(), which is not correct. Instead, a
|
||||
# test cursor will correspond to the state of the database of cls.cr at
|
||||
# that point, i.e., before the call to setUp().
|
||||
cls.registry.enter_test_mode(cls.cr)
|
||||
cls.addClassCleanup(cls.registry.leave_test_mode)
|
||||
|
||||
# model and records for banana stages
|
||||
cls.env['ir.model'].create({
|
||||
'name': 'Banana Ripeness',
|
||||
'model': 'x_banana_ripeness',
|
||||
'field_id': [
|
||||
Command.create({'name': 'x_name', 'ttype': 'char', 'field_description': 'Name'}),
|
||||
]
|
||||
})
|
||||
# stage values are pairs (id, display_name)
|
||||
cls.ripeness_green = cls.env['x_banana_ripeness'].name_create('Green')
|
||||
cls.ripeness_okay = cls.env['x_banana_ripeness'].name_create('Okay, I guess?')
|
||||
cls.ripeness_gone = cls.env['x_banana_ripeness'].name_create('Walked away on its own')
|
||||
|
||||
# model and records for bananas
|
||||
cls.bananas_model = cls.env['ir.model'].create({
|
||||
'name': 'Bananas',
|
||||
'model': 'x_bananas',
|
||||
'field_id': [
|
||||
Command.create({'name': 'x_name', 'ttype': 'char', 'field_description': 'Name'}),
|
||||
Command.create({'name': 'x_length', 'ttype': 'float', 'field_description': 'Length'}),
|
||||
Command.create({'name': 'x_color', 'ttype': 'integer', 'field_description': 'Color'}),
|
||||
Command.create({'name': 'x_ripeness_id', 'ttype': 'many2one',
|
||||
'field_description': 'Ripeness','relation': 'x_banana_ripeness',
|
||||
'group_expand': True})
|
||||
]
|
||||
})
|
||||
# add non-stored field that is not valid in order
|
||||
cls.env['ir.model.fields'].create({
|
||||
'name': 'x_is_yellow',
|
||||
'field_description': 'Is the banana yellow?',
|
||||
'ttype': 'boolean',
|
||||
'model_id': cls.bananas_model.id,
|
||||
'store': False,
|
||||
'depends': 'x_color',
|
||||
'compute': "for banana in self:\n banana['x_is_yellow'] = banana.x_color == 9"
|
||||
})
|
||||
# default stage is ripeness_green
|
||||
cls.env['ir.default'].set('x_bananas', 'x_ripeness_id', cls.ripeness_green[0])
|
||||
cls.env['x_bananas'].create([{
|
||||
'x_name': 'Banana #1',
|
||||
'x_length': 3.14159,
|
||||
'x_color': 9,
|
||||
}, {
|
||||
'x_name': 'Banana #2',
|
||||
'x_length': 0,
|
||||
'x_color': 6,
|
||||
}, {
|
||||
'x_name': 'Banana #3',
|
||||
'x_length': 10,
|
||||
'x_color': 6,
|
||||
}])
|
||||
|
||||
def setUp(self):
|
||||
# this cleanup is necessary after each test, and must be done last
|
||||
self.addCleanup(self.registry.reset_changes)
|
||||
super().setUp()
|
||||
|
||||
def test_model_order_constraint(self):
|
||||
"""Check that the order constraint is properly enforced."""
|
||||
VALID_ORDERS = ['id', 'id desc', 'id asc, x_length', 'x_color, x_length, create_uid']
|
||||
for order in VALID_ORDERS:
|
||||
self.bananas_model.order = order
|
||||
|
||||
INVALID_ORDERS = ['', 'x_wat', 'id esc', 'create_uid,', 'id, x_is_yellow']
|
||||
for order in INVALID_ORDERS:
|
||||
with self.assertRaises(ValidationError), self.cr.savepoint():
|
||||
self.bananas_model.order = order
|
||||
|
||||
# check that the constraint is checked at model creation
|
||||
fields_value = [
|
||||
Command.create({'name': 'x_name', 'ttype': 'char', 'field_description': 'Name'}),
|
||||
Command.create({'name': 'x_length', 'ttype': 'float', 'field_description': 'Length'}),
|
||||
Command.create({'name': 'x_color', 'ttype': 'integer', 'field_description': 'Color'}),
|
||||
]
|
||||
self.env['ir.model'].create({
|
||||
'name': 'MegaBananas',
|
||||
'model': 'x_mega_bananas',
|
||||
'order': 'x_name asc, id desc', # valid order
|
||||
'field_id': fields_value,
|
||||
})
|
||||
with self.assertRaises(ValidationError):
|
||||
self.env['ir.model'].create({
|
||||
'name': 'GigaBananas',
|
||||
'model': 'x_giga_bananas',
|
||||
'order': 'x_name asc, x_wat', # invalid order
|
||||
'field_id': fields_value,
|
||||
})
|
||||
|
||||
# ensure we can order by a stored field via inherits
|
||||
user_model = self.env['ir.model'].search([('model', '=', 'res.users')])
|
||||
user_model._check_order() # must not raise
|
||||
|
||||
def test_model_order_search(self):
|
||||
"""Check that custom orders are applied when querying a model."""
|
||||
ORDERS = {
|
||||
'id asc': ['Banana #1', 'Banana #2', 'Banana #3'],
|
||||
'id desc': ['Banana #3', 'Banana #2', 'Banana #1'],
|
||||
'x_color asc, id asc': ['Banana #2', 'Banana #3', 'Banana #1'],
|
||||
'x_color asc, id desc': ['Banana #3', 'Banana #2', 'Banana #1'],
|
||||
'x_length asc, id': ['Banana #2', 'Banana #1', 'Banana #3'],
|
||||
}
|
||||
for order, names in ORDERS.items():
|
||||
self.bananas_model.order = order
|
||||
self.assertEqual(self.env['x_bananas']._order, order)
|
||||
|
||||
bananas = self.env['x_bananas'].search([])
|
||||
self.assertEqual(bananas.mapped('x_name'), names, 'failed to order by %s' % order)
|
||||
|
||||
def test_group_expansion(self):
|
||||
"""Check that the basic custom group expansion works."""
|
||||
groups = self.env['x_bananas'].read_group(domain=[],
|
||||
fields=['x_ripeness_id'],
|
||||
groupby=['x_ripeness_id'])
|
||||
expected = [{
|
||||
'x_ripeness_id': self.ripeness_green,
|
||||
'x_ripeness_id_count': 3,
|
||||
'__domain': [('x_ripeness_id', '=', self.ripeness_green[0])],
|
||||
}, {
|
||||
'x_ripeness_id': self.ripeness_okay,
|
||||
'x_ripeness_id_count': 0,
|
||||
'__domain': [('x_ripeness_id', '=', self.ripeness_okay[0])],
|
||||
}, {
|
||||
'x_ripeness_id': self.ripeness_gone,
|
||||
'x_ripeness_id_count': 0,
|
||||
'__domain': [('x_ripeness_id', '=', self.ripeness_gone[0])],
|
||||
}]
|
||||
self.assertEqual(groups, expected, 'should include 2 empty ripeness stages')
|
||||
|
||||
def test_rec_name_deletion(self):
|
||||
"""Check that deleting 'x_name' does not crash."""
|
||||
record = self.env['x_bananas'].create({'x_name': "Ifan Ben-Mezd"})
|
||||
self.assertEqual(record._rec_name, 'x_name')
|
||||
self.assertEqual(self.registry.field_depends[type(record).display_name], ('x_name',))
|
||||
self.assertEqual(record.display_name, "Ifan Ben-Mezd")
|
||||
|
||||
# unlinking x_name should fixup _rec_name and display_name
|
||||
self.env['ir.model.fields']._get('x_bananas', 'x_name').unlink()
|
||||
record = self.env['x_bananas'].browse(record.id)
|
||||
self.assertEqual(record._rec_name, None)
|
||||
self.assertEqual(self.registry.field_depends[type(record).display_name], ())
|
||||
self.assertEqual(record.display_name, f"x_bananas,{record.id}")
|
||||
|
||||
def test_new_ir_model_fields_related(self):
|
||||
"""Check that related field are handled correctly on new field"""
|
||||
with self.debug_mode():
|
||||
form = Form(
|
||||
self.env['ir.model.fields'].with_context(
|
||||
default_model_id=self.bananas_model.id
|
||||
)
|
||||
)
|
||||
form.related = 'id'
|
||||
self.assertEqual(form.ttype, 'integer')
|
||||
|
||||
def test_delete_manual_models_with_base_fields(self):
|
||||
model = self.env["ir.model"].create({
|
||||
"model": "x_test_base_delete",
|
||||
"name": "test base delete",
|
||||
"field_id": [
|
||||
Command.create({
|
||||
"name": "x_my_field",
|
||||
"ttype": "char",
|
||||
}),
|
||||
Command.create({
|
||||
"name": "active",
|
||||
"ttype": "boolean",
|
||||
"state": "base",
|
||||
})
|
||||
]
|
||||
})
|
||||
model2 = self.env["ir.model"].create({
|
||||
"model": "x_test_base_delete2",
|
||||
"name": "test base delete2",
|
||||
"field_id": [
|
||||
Command.create({
|
||||
"name": "x_my_field2",
|
||||
"ttype": "char",
|
||||
}),
|
||||
Command.create({
|
||||
"name": "active",
|
||||
"ttype": "boolean",
|
||||
"state": "base",
|
||||
})
|
||||
]
|
||||
})
|
||||
self.assertTrue(model.exists())
|
||||
self.assertTrue(model2.exists())
|
||||
|
||||
self.env["ir.model"].browse(model.ids + model2.ids).unlink()
|
||||
self.assertFalse(model.exists())
|
||||
self.assertFalse(model2.exists())
|
||||
|
||||
@mute_logger('odoo.sql_db')
|
||||
def test_ir_model_fields_name_create(self):
|
||||
NotNullViolationPgCode = '23502'
|
||||
# Quick create an ir_model_field should not be possible
|
||||
# It should be raise a ValidationError
|
||||
with self.assertRaises(Psycopg2Error) as error:
|
||||
self.env['ir.model.fields'].name_create("field_name")
|
||||
|
||||
self.assertEqual(error.exception.pgcode, NotNullViolationPgCode)
|
||||
|
||||
# But with default_ we should be able to name_create
|
||||
self.env['ir.model.fields'].with_context(
|
||||
default_model_id=self.bananas_model.id,
|
||||
default_model=self.bananas_model.name,
|
||||
default_ttype="char"
|
||||
).name_create("field_name")
|
||||
|
||||
|
||||
@tagged('test_eval_context')
|
||||
class TestEvalContext(TransactionCase):
|
||||
|
||||
def test_module_usage(self):
|
||||
self.env['ir.model.fields'].create({
|
||||
'name': 'x_foo_bar_baz',
|
||||
'model_id': self.env['ir.model'].search([('model', '=', 'res.partner')]).id,
|
||||
'field_description': 'foo',
|
||||
'ttype': 'integer',
|
||||
'store': False,
|
||||
'depends': 'name',
|
||||
'compute': ("time.time()\ndatetime.datetime.now()\n"
|
||||
"dateutil.relativedelta.relativedelta(hours=1)")
|
||||
})
|
||||
self.env['res.partner'].create({'name': 'foo'}).x_foo_bar_baz
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestIrModelFieldsTranslation(HttpCase):
|
||||
def test_ir_model_fields_translation(self):
|
||||
# If not enabled (like in demo data), landing on res.config will try
|
||||
# to disable module_sale_quotation_builder and raise an warning
|
||||
group_order_template = self.env.ref('sale_management.group_sale_order_template', raise_if_not_found=False)
|
||||
if group_order_template:
|
||||
self.env.ref('base.group_user').write({"implied_ids": [(4, group_order_template.id)]})
|
||||
|
||||
# modify en_US translation
|
||||
field = self.env['ir.model.fields'].search([('model_id.model', '=', 'res.users'), ('name', '=', 'login')])
|
||||
self.assertEqual(field.with_context(lang='en_US').field_description, 'Login')
|
||||
# check the name column of res.users is displayed as 'Login'
|
||||
self.start_tour("/web", 'ir_model_fields_translation_en_tour', login="admin")
|
||||
field.update_field_translations('field_description', {'en_US': 'Login2'})
|
||||
# check the name column of res.users is displayed as 'Login2'
|
||||
self.start_tour("/web", 'ir_model_fields_translation_en_tour2', login="admin")
|
||||
|
||||
# modify fr_FR translation
|
||||
self.env['res.lang']._activate_lang('fr_FR')
|
||||
field = self.env['ir.model.fields'].search([('model_id.model', '=', 'res.users'), ('name', '=', 'login')])
|
||||
field.update_field_translations('field_description', {'fr_FR': 'Identifiant'})
|
||||
self.assertEqual(field.with_context(lang='fr_FR').field_description, 'Identifiant')
|
||||
admin = self.env['res.users'].search([('login', '=', 'admin')], limit=1)
|
||||
admin.lang = 'fr_FR'
|
||||
# check the name column of res.users is displayed as 'Identifiant'
|
||||
self.start_tour("/web", 'ir_model_fields_translation_fr_tour', login="admin")
|
||||
field.update_field_translations('field_description', {'fr_FR': 'Identifiant2'})
|
||||
# check the name column of res.users is displayed as 'Identifiant2'
|
||||
self.start_tour("/web", 'ir_model_fields_translation_fr_tour2', login="admin")
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import mute_logger
|
||||
|
||||
|
||||
class IrModuleCase(TransactionCase):
|
||||
@mute_logger("odoo.modules.module")
|
||||
def test_missing_module_icon(self):
|
||||
module = self.env["ir.module.module"].create({"name": "missing"})
|
||||
base = self.env["ir.module.module"].search([("name", "=", "base")])
|
||||
self.assertEqual(base.icon_image, module.icon_image)
|
||||
|
||||
@mute_logger("odoo.modules.module")
|
||||
def test_new_module_icon(self):
|
||||
module = self.env["ir.module.module"].new({"name": "missing"})
|
||||
self.assertFalse(module.icon_image)
|
||||
|
||||
@mute_logger("odoo.modules.module")
|
||||
def test_module_wrong_icon(self):
|
||||
module = self.env["ir.module.module"].create(
|
||||
{"name": "wrong_icon", "icon": "/not/valid.png"}
|
||||
)
|
||||
self.assertFalse(module.icon_image)
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
class TestModuleCategory(TransactionCase):
|
||||
|
||||
def test_parent_circular_dependencies(self):
|
||||
Cats = self.env['ir.module.category']
|
||||
|
||||
def create(name, **kw):
|
||||
return Cats.create(dict(kw, name=name))
|
||||
|
||||
category_a = create('A', parent_id=False)
|
||||
category_b = create('B', parent_id=category_a.id)
|
||||
category_c = create('C', parent_id=category_b.id)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
category_a.write({'parent_id': category_c.id})
|
||||
with self.assertRaises(ValidationError):
|
||||
category_b.write({'parent_id': category_b.id})
|
||||
|
|
@ -0,0 +1,226 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.errorcodes
|
||||
|
||||
import odoo
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tests import common
|
||||
from odoo.tests.common import BaseCase
|
||||
from odoo.tools.misc import mute_logger
|
||||
|
||||
ADMIN_USER_ID = common.ADMIN_USER_ID
|
||||
|
||||
@contextmanager
|
||||
def environment():
|
||||
""" Return an environment with a new cursor for the current database; the
|
||||
cursor is committed and closed after the context block.
|
||||
"""
|
||||
registry = odoo.registry(common.get_db_name())
|
||||
with registry.cursor() as cr:
|
||||
yield odoo.api.Environment(cr, ADMIN_USER_ID, {})
|
||||
|
||||
|
||||
def drop_sequence(code):
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].search([('code', '=', code)])
|
||||
seq.unlink()
|
||||
|
||||
|
||||
class TestIrSequenceStandard(BaseCase):
|
||||
""" A few tests for a 'Standard' (i.e. PostgreSQL) sequence. """
|
||||
|
||||
def test_ir_sequence_create(self):
|
||||
""" Try to create a sequence object. """
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type',
|
||||
'name': 'Test sequence',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_search(self):
|
||||
""" Try a search. """
|
||||
with environment() as env:
|
||||
seqs = env['ir.sequence'].search([])
|
||||
self.assertTrue(seqs)
|
||||
|
||||
def test_ir_sequence_draw(self):
|
||||
""" Try to draw a number. """
|
||||
with environment() as env:
|
||||
n = env['ir.sequence'].next_by_code('test_sequence_type')
|
||||
self.assertTrue(n)
|
||||
|
||||
def test_ir_sequence_draw_twice(self):
|
||||
""" Try to draw a number from two transactions. """
|
||||
with environment() as env0:
|
||||
with environment() as env1:
|
||||
n0 = env0['ir.sequence'].next_by_code('test_sequence_type')
|
||||
self.assertTrue(n0)
|
||||
n1 = env1['ir.sequence'].next_by_code('test_sequence_type')
|
||||
self.assertTrue(n1)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
drop_sequence('test_sequence_type')
|
||||
|
||||
|
||||
class TestIrSequenceNoGap(BaseCase):
|
||||
""" Copy of the previous tests for a 'No gap' sequence. """
|
||||
|
||||
def test_ir_sequence_create_no_gap(self):
|
||||
""" Try to create a sequence object. """
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_2',
|
||||
'name': 'Test sequence',
|
||||
'implementation': 'no_gap',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_draw_no_gap(self):
|
||||
""" Try to draw a number. """
|
||||
with environment() as env:
|
||||
n = env['ir.sequence'].next_by_code('test_sequence_type_2')
|
||||
self.assertTrue(n)
|
||||
|
||||
@mute_logger('odoo.sql_db')
|
||||
def test_ir_sequence_draw_twice_no_gap(self):
|
||||
""" Try to draw a number from two transactions.
|
||||
This is expected to not work.
|
||||
"""
|
||||
with environment() as env0:
|
||||
with environment() as env1:
|
||||
# NOTE: The error has to be an OperationalError
|
||||
# s.t. the automatic request retry (service/model.py) works.
|
||||
with self.assertRaises(psycopg2.OperationalError) as e:
|
||||
n0 = env0['ir.sequence'].next_by_code('test_sequence_type_2')
|
||||
self.assertTrue(n0)
|
||||
n1 = env1['ir.sequence'].next_by_code('test_sequence_type_2')
|
||||
self.assertEqual(e.exception.pgcode, psycopg2.errorcodes.LOCK_NOT_AVAILABLE, msg="postgresql returned an incorrect errcode")
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
drop_sequence('test_sequence_type_2')
|
||||
|
||||
|
||||
class TestIrSequenceChangeImplementation(BaseCase):
|
||||
""" Create sequence objects and change their ``implementation`` field. """
|
||||
|
||||
def test_ir_sequence_1_create(self):
|
||||
""" Try to create a sequence object. """
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_3',
|
||||
'name': 'Test sequence',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_4',
|
||||
'name': 'Test sequence',
|
||||
'implementation': 'no_gap',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_2_write(self):
|
||||
with environment() as env:
|
||||
domain = [('code', 'in', ['test_sequence_type_3', 'test_sequence_type_4'])]
|
||||
seqs = env['ir.sequence'].search(domain)
|
||||
seqs.write({'implementation': 'standard'})
|
||||
seqs.write({'implementation': 'no_gap'})
|
||||
|
||||
def test_ir_sequence_3_unlink(self):
|
||||
with environment() as env:
|
||||
domain = [('code', 'in', ['test_sequence_type_3', 'test_sequence_type_4'])]
|
||||
seqs = env['ir.sequence'].search(domain)
|
||||
seqs.unlink()
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
drop_sequence('test_sequence_type_3')
|
||||
drop_sequence('test_sequence_type_4')
|
||||
|
||||
|
||||
class TestIrSequenceGenerate(BaseCase):
|
||||
""" Create sequence objects and generate some values. """
|
||||
|
||||
def test_ir_sequence_create(self):
|
||||
""" Try to create a sequence object. """
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_5',
|
||||
'name': 'Test sequence',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
with environment() as env:
|
||||
for i in range(1, 10):
|
||||
n = env['ir.sequence'].next_by_code('test_sequence_type_5')
|
||||
self.assertEqual(n, str(i))
|
||||
|
||||
def test_ir_sequence_create_no_gap(self):
|
||||
""" Try to create a sequence object. """
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_6',
|
||||
'name': 'Test sequence',
|
||||
'implementation': 'no_gap',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
with environment() as env:
|
||||
for i in range(1, 10):
|
||||
n = env['ir.sequence'].next_by_code('test_sequence_type_6')
|
||||
self.assertEqual(n, str(i))
|
||||
|
||||
def test_ir_sequence_prefix(self):
|
||||
""" test whether the raise a user error for an invalid sequence """
|
||||
|
||||
# try to create a sequence with invalid prefix
|
||||
with environment() as env:
|
||||
seq = env['ir.sequence'].create({
|
||||
'code': 'test_sequence_type_7',
|
||||
'name': 'Test sequence',
|
||||
'prefix': '%u',
|
||||
'suffix': '',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
with self.assertRaises(UserError):
|
||||
env['ir.sequence'].next_by_code('test_sequence_type_7')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
drop_sequence('test_sequence_type_5')
|
||||
drop_sequence('test_sequence_type_6')
|
||||
|
||||
|
||||
class TestIrSequenceInit(common.TransactionCase):
|
||||
|
||||
def test_00(self):
|
||||
""" test whether the read method returns the right number_next value
|
||||
(from postgreSQL sequence and not ir_sequence value)
|
||||
"""
|
||||
# first creation of sequence (normal)
|
||||
seq = self.env['ir.sequence'].create({
|
||||
'number_next': 1,
|
||||
'company_id': 1,
|
||||
'padding': 4,
|
||||
'number_increment': 1,
|
||||
'implementation': 'standard',
|
||||
'name': 'test-sequence-00',
|
||||
})
|
||||
# Call next() 4 times, and check the last returned value
|
||||
seq.next_by_id()
|
||||
seq.next_by_id()
|
||||
seq.next_by_id()
|
||||
n = seq.next_by_id()
|
||||
self.assertEqual(n, "0004", 'The actual sequence value must be 4. reading : %s' % n)
|
||||
# reset sequence to 1 by write()
|
||||
seq.write({'number_next': 1})
|
||||
# Read the value of the current sequence
|
||||
n = seq.next_by_id()
|
||||
self.assertEqual(n, "0001", 'The actual sequence value must be 1. reading : %s' % n)
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from datetime import date
|
||||
|
||||
from odoo.tests.common import SingleTransactionCase
|
||||
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
|
||||
|
||||
|
||||
class TestIrSequenceDateRangeStandard(SingleTransactionCase):
|
||||
""" A few tests for a 'Standard' (i.e. PostgreSQL) sequence. """
|
||||
|
||||
def test_ir_sequence_date_range_1_create(self):
|
||||
""" Try to create a sequence object with date ranges enabled. """
|
||||
seq = self.env['ir.sequence'].create({
|
||||
'code': 'test_sequence_date_range',
|
||||
'name': 'Test sequence',
|
||||
'use_date_range': True,
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_date_range_2_change_dates(self):
|
||||
""" Draw numbers to create a first subsequence then change its date range. Then, try to draw a new number adn check a new subsequence was correctly created. """
|
||||
year = date.today().year - 1
|
||||
january = lambda d: date(year, 1, d)
|
||||
|
||||
seq16 = self.env['ir.sequence'].with_context(ir_sequence_date=january(16))
|
||||
n = seq16.next_by_code('test_sequence_date_range')
|
||||
self.assertEqual(n, '1')
|
||||
n = seq16.next_by_code('test_sequence_date_range')
|
||||
self.assertEqual(n, '2')
|
||||
|
||||
# modify the range of date created
|
||||
domain = [('sequence_id.code', '=', 'test_sequence_date_range'), ('date_from', '=', january(1))]
|
||||
seq_date_range = self.env['ir.sequence.date_range'].search(domain)
|
||||
seq_date_range.write({'date_from': january(18)})
|
||||
n = seq16.next_by_code('test_sequence_date_range')
|
||||
self.assertEqual(n, '1')
|
||||
|
||||
# check the newly created sequence stops at the 17th of January
|
||||
domain = [('sequence_id.code', '=', 'test_sequence_date_range'), ('date_from', '=', january(1))]
|
||||
seq_date_range = self.env['ir.sequence.date_range'].search(domain)
|
||||
self.assertEqual(seq_date_range.date_to, january(17))
|
||||
|
||||
def test_ir_sequence_date_range_3_unlink(self):
|
||||
seq = self.env['ir.sequence'].search([('code', '=', 'test_sequence_date_range')])
|
||||
seq.unlink()
|
||||
|
||||
|
||||
class TestIrSequenceDateRangeNoGap(SingleTransactionCase):
|
||||
""" Copy of the previous tests for a 'No gap' sequence. """
|
||||
|
||||
def test_ir_sequence_date_range_1_create_no_gap(self):
|
||||
""" Try to create a sequence object. """
|
||||
seq = self.env['ir.sequence'].create({
|
||||
'code': 'test_sequence_date_range_2',
|
||||
'name': 'Test sequence',
|
||||
'use_date_range': True,
|
||||
'implementation': 'no_gap',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_date_range_2_change_dates(self):
|
||||
""" Draw numbers to create a first subsequence then change its date range. Then, try to draw a new number adn check a new subsequence was correctly created. """
|
||||
year = date.today().year - 1
|
||||
january = lambda d: date(year, 1, d)
|
||||
|
||||
seq16 = self.env['ir.sequence'].with_context({'ir_sequence_date': january(16)})
|
||||
n = seq16.next_by_code('test_sequence_date_range_2')
|
||||
self.assertEqual(n, '1')
|
||||
n = seq16.next_by_code('test_sequence_date_range_2')
|
||||
self.assertEqual(n, '2')
|
||||
|
||||
# modify the range of date created
|
||||
domain = [('sequence_id.code', '=', 'test_sequence_date_range_2'), ('date_from', '=', january(1))]
|
||||
seq_date_range = self.env['ir.sequence.date_range'].search(domain)
|
||||
seq_date_range.write({'date_from': january(18)})
|
||||
n = seq16.next_by_code('test_sequence_date_range_2')
|
||||
self.assertEqual(n, '1')
|
||||
|
||||
# check the newly created sequence stops at the 17th of January
|
||||
domain = [('sequence_id.code', '=', 'test_sequence_date_range_2'), ('date_from', '=', january(1))]
|
||||
seq_date_range = self.env['ir.sequence.date_range'].search(domain)
|
||||
self.assertEqual(seq_date_range.date_to, january(17))
|
||||
|
||||
def test_ir_sequence_date_range_3_unlink(self):
|
||||
seq = self.env['ir.sequence'].search([('code', '=', 'test_sequence_date_range_2')])
|
||||
seq.unlink()
|
||||
|
||||
|
||||
class TestIrSequenceDateRangeChangeImplementation(SingleTransactionCase):
|
||||
""" Create sequence objects and change their ``implementation`` field. """
|
||||
|
||||
def test_ir_sequence_date_range_1_create(self):
|
||||
""" Try to create a sequence object. """
|
||||
seq = self.env['ir.sequence'].create({
|
||||
'code': 'test_sequence_date_range_3',
|
||||
'name': 'Test sequence',
|
||||
'use_date_range': True,
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
seq = self.env['ir.sequence'].create({
|
||||
'code': 'test_sequence_date_range_4',
|
||||
'name': 'Test sequence',
|
||||
'use_date_range': True,
|
||||
'implementation': 'no_gap',
|
||||
})
|
||||
self.assertTrue(seq)
|
||||
|
||||
def test_ir_sequence_date_range_2_use(self):
|
||||
""" Make some use of the sequences to create some subsequences """
|
||||
year = date.today().year - 1
|
||||
january = lambda d: date(year, 1, d)
|
||||
|
||||
seq = self.env['ir.sequence']
|
||||
seq16 = self.env['ir.sequence'].with_context({'ir_sequence_date': january(16)})
|
||||
|
||||
for i in range(1, 5):
|
||||
n = seq.next_by_code('test_sequence_date_range_3')
|
||||
self.assertEqual(n, str(i))
|
||||
for i in range(1, 5):
|
||||
n = seq16.next_by_code('test_sequence_date_range_3')
|
||||
self.assertEqual(n, str(i))
|
||||
for i in range(1, 5):
|
||||
n = seq.next_by_code('test_sequence_date_range_4')
|
||||
self.assertEqual(n, str(i))
|
||||
for i in range(1, 5):
|
||||
n = seq16.next_by_code('test_sequence_date_range_4')
|
||||
self.assertEqual(n, str(i))
|
||||
|
||||
def test_ir_sequence_date_range_3_write(self):
|
||||
"""swap the implementation method on both"""
|
||||
domain = [('code', 'in', ['test_sequence_date_range_3', 'test_sequence_date_range_4'])]
|
||||
seqs = self.env['ir.sequence'].search(domain)
|
||||
seqs.write({'implementation': 'standard'})
|
||||
seqs.write({'implementation': 'no_gap'})
|
||||
|
||||
def test_ir_sequence_date_range_4_unlink(self):
|
||||
domain = [('code', 'in', ['test_sequence_date_range_3', 'test_sequence_date_range_4'])]
|
||||
seqs = self.env['ir.sequence'].search(domain)
|
||||
seqs.unlink()
|
||||
872
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_mail.py
Normal file
872
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_mail.py
Normal file
|
|
@ -0,0 +1,872 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
|
||||
from unittest.mock import patch
|
||||
import email.policy
|
||||
import email.message
|
||||
import re
|
||||
import threading
|
||||
|
||||
from odoo.addons.base.models.ir_mail_server import extract_rfc2822_addresses
|
||||
from odoo.tests.common import BaseCase, TransactionCase
|
||||
from odoo.tests import tagged
|
||||
from odoo.tools import (
|
||||
is_html_empty, html_to_inner_content, html_sanitize, append_content_to_html, plaintext2html,
|
||||
email_domain_normalize, email_normalize, email_split, email_split_and_format, html2plaintext,
|
||||
misc, formataddr, email_anonymize,
|
||||
prepend_html_content,
|
||||
config,
|
||||
)
|
||||
|
||||
from . import test_mail_examples
|
||||
|
||||
|
||||
class TestSanitizer(BaseCase):
|
||||
""" Test the html sanitizer that filters html to remove unwanted attributes """
|
||||
|
||||
def test_basic_sanitizer(self):
|
||||
cases = [
|
||||
("yop", "<p>yop</p>"), # simple
|
||||
("lala<p>yop</p>xxx", "<p>lala</p><p>yop</p>xxx"), # trailing text
|
||||
("Merci à l'intérêt pour notre produit.nous vous contacterons bientôt. Merci",
|
||||
u"<p>Merci à l'intérêt pour notre produit.nous vous contacterons bientôt. Merci</p>"), # unicode
|
||||
]
|
||||
for content, expected in cases:
|
||||
html = html_sanitize(content)
|
||||
self.assertEqual(html, expected, 'html_sanitize is broken')
|
||||
|
||||
def test_evil_malicious_code(self):
|
||||
# taken from https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet#Tests
|
||||
cases = [
|
||||
("<IMG SRC=javascript:alert('XSS')>"), # no quotes and semicolons
|
||||
("<IMG SRC=javascript:alert('XSS')>"), # UTF-8 Unicode encoding
|
||||
("<IMG SRC=javascript:alert('XSS')>"), # hex encoding
|
||||
("<IMG SRC=\"jav
ascript:alert('XSS');\">"), # embedded carriage return
|
||||
("<IMG SRC=\"jav
ascript:alert('XSS');\">"), # embedded newline
|
||||
("<IMG SRC=\"jav ascript:alert('XSS');\">"), # embedded tab
|
||||
("<IMG SRC=\"jav	ascript:alert('XSS');\">"), # embedded encoded tab
|
||||
("<IMG SRC=\"  javascript:alert('XSS');\">"), # spaces and meta-characters
|
||||
("<IMG SRC=\"javascript:alert('XSS')\""), # half-open html
|
||||
("<IMG \"\"\"><SCRIPT>alert(\"XSS\")</SCRIPT>\">"), # malformed tag
|
||||
("<SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT>"), # non-alpha-non-digits
|
||||
("<SCRIPT/SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT>"), # non-alpha-non-digits
|
||||
("<<SCRIPT>alert(\"XSS\");//<</SCRIPT>"), # extraneous open brackets
|
||||
("<SCRIPT SRC=http://ha.ckers.org/xss.js?< B >"), # non-closing script tags
|
||||
("<INPUT TYPE=\"IMAGE\" SRC=\"javascript:alert('XSS');\">"), # input image
|
||||
("<BODY BACKGROUND=\"javascript:alert('XSS')\">"), # body image
|
||||
("<IMG DYNSRC=\"javascript:alert('XSS')\">"), # img dynsrc
|
||||
("<IMG LOWSRC=\"javascript:alert('XSS')\">"), # img lowsrc
|
||||
("<TABLE BACKGROUND=\"javascript:alert('XSS')\">"), # table
|
||||
("<TABLE><TD BACKGROUND=\"javascript:alert('XSS')\">"), # td
|
||||
("<DIV STYLE=\"background-image: url(javascript:alert('XSS'))\">"), # div background
|
||||
("<DIV STYLE=\"background-image:\0075\0072\006C\0028'\006a\0061\0076\0061\0073\0063\0072\0069\0070\0074\003a\0061\006c\0065\0072\0074\0028.1027\0058.1053\0053\0027\0029'\0029\">"), # div background with unicoded exploit
|
||||
("<DIV STYLE=\"background-image: url(javascript:alert('XSS'))\">"), # div background + extra characters
|
||||
("<IMG SRC='vbscript:msgbox(\"XSS\")'>"), # VBscrip in an image
|
||||
("<BODY ONLOAD=alert('XSS')>"), # event handler
|
||||
("<BR SIZE=\"&{alert('XSS')}\\>"), # & javascript includes
|
||||
("<LINK REL=\"stylesheet\" HREF=\"javascript:alert('XSS');\">"), # style sheet
|
||||
("<LINK REL=\"stylesheet\" HREF=\"http://ha.ckers.org/xss.css\">"), # remote style sheet
|
||||
("<STYLE>@import'http://ha.ckers.org/xss.css';</STYLE>"), # remote style sheet 2
|
||||
("<META HTTP-EQUIV=\"Link\" Content=\"<http://ha.ckers.org/xss.css>; REL=stylesheet\">"), # remote style sheet 3
|
||||
("<STYLE>BODY{-moz-binding:url(\"http://ha.ckers.org/xssmoz.xml#xss\")}</STYLE>"), # remote style sheet 4
|
||||
("<IMG STYLE=\"xss:expr/*XSS*/ession(alert('XSS'))\">"), # style attribute using a comment to break up expression
|
||||
]
|
||||
for content in cases:
|
||||
html = html_sanitize(content)
|
||||
self.assertNotIn('javascript', html, 'html_sanitize did not remove a malicious javascript')
|
||||
self.assertTrue('ha.ckers.org' not in html or 'http://ha.ckers.org/xss.css' in html, 'html_sanitize did not remove a malicious code in %s (%s)' % (content, html))
|
||||
|
||||
content = "<!--[if gte IE 4]><SCRIPT>alert('XSS');</SCRIPT><![endif]-->" # down-level hidden block
|
||||
self.assertEqual(html_sanitize(content, silent=False), '')
|
||||
|
||||
def test_html(self):
|
||||
sanitized_html = html_sanitize(test_mail_examples.MISC_HTML_SOURCE)
|
||||
for tag in ['<div', '<b', '<i', '<u', '<strike', '<li', '<blockquote', '<a href']:
|
||||
self.assertIn(tag, sanitized_html, 'html_sanitize stripped too much of original html')
|
||||
for attr in ['javascript']:
|
||||
self.assertNotIn(attr, sanitized_html, 'html_sanitize did not remove enough unwanted attributes')
|
||||
|
||||
def test_outlook_mail_sanitize(self):
|
||||
case = """<div class="WordSection1">
|
||||
<p class="MsoNormal">Here is a test mail<o:p></o:p></p>
|
||||
<p class="MsoNormal"><o:p> </o:p></p>
|
||||
<p class="MsoNormal">With a break line<o:p></o:p></p>
|
||||
<p class="MsoNormal"><o:p> </o:p></p>
|
||||
<p class="MsoNormal"><o:p> </o:p></p>
|
||||
<p class="MsoNormal">Then two<o:p></o:p></p>
|
||||
<p class="MsoNormal"><o:p> </o:p></p>
|
||||
<div>
|
||||
<div style="border:none;border-top:solid #E1E1E1 1.0pt;padding:3.0pt 0in 0in 0in">
|
||||
<p class="MsoNormal"><b>From:</b> Mitchell Admin <dummy@example.com>
|
||||
<br>
|
||||
<b>Sent:</b> Monday, November 20, 2023 8:34 AM<br>
|
||||
<b>To:</b> test user <dummy@example.com><br>
|
||||
<b>Subject:</b> test (#23)<o:p></o:p></p>
|
||||
</div>
|
||||
</div>"""
|
||||
|
||||
expected = """<div class="WordSection1">
|
||||
<p class="MsoNormal">Here is a test mail</p>
|
||||
<p class="MsoNormal"> </p>
|
||||
<p class="MsoNormal">With a break line</p>
|
||||
<p class="MsoNormal"> </p>
|
||||
<p class="MsoNormal"> </p>
|
||||
<p class="MsoNormal">Then two</p>
|
||||
<p class="MsoNormal"> </p>
|
||||
<div>
|
||||
<div style="border:none;border-top:solid #E1E1E1 1.0pt;padding:3.0pt 0in 0in 0in">
|
||||
<p class="MsoNormal"><b>From:</b> Mitchell Admin <dummy@example.com>
|
||||
<br>
|
||||
<b>Sent:</b> Monday, November 20, 2023 8:34 AM<br>
|
||||
<b>To:</b> test user <dummy@example.com><br>
|
||||
<b>Subject:</b> test (#23)</p>
|
||||
</div>
|
||||
</div></div>"""
|
||||
|
||||
result = html_sanitize(case)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_sanitize_unescape_emails(self):
|
||||
not_emails = [
|
||||
'<blockquote cite="mid:CAEJSRZvWvud8c6Qp=wfNG6O1+wK3i_jb33qVrF7XyrgPNjnyUA@mail.gmail.com" type="cite">cat</blockquote>',
|
||||
'<img alt="@github-login" class="avatar" src="/web/image/pi" height="36" width="36">']
|
||||
for not_email in not_emails:
|
||||
sanitized = html_sanitize(not_email)
|
||||
left_part = not_email.split('>')[0] # take only left part, as the sanitizer could add data information on node
|
||||
self.assertNotIn(misc.html_escape(not_email), sanitized, 'html_sanitize stripped emails of original html')
|
||||
self.assertIn(left_part, sanitized)
|
||||
|
||||
def test_style_parsing(self):
|
||||
test_data = [
|
||||
(
|
||||
'<span style="position: fixed; top: 0px; left: 50px; width: 40%; height: 50%; background-color: red;">Coin coin </span>',
|
||||
['background-color:red', 'Coin coin'],
|
||||
['position', 'top', 'left']
|
||||
), (
|
||||
"""<div style='before: "Email Address; coincoin cheval: lapin";
|
||||
font-size: 30px; max-width: 100%; after: "Not sure
|
||||
|
||||
this; means: anything ?#ùµ"
|
||||
; some-property: 2px; top: 3'>youplaboum</div>""",
|
||||
['font-size:30px', 'youplaboum'],
|
||||
['some-property', 'top', 'cheval']
|
||||
), (
|
||||
'<span style="width">Coincoin</span>',
|
||||
[],
|
||||
['width']
|
||||
)
|
||||
]
|
||||
|
||||
for test, in_lst, out_lst in test_data:
|
||||
new_html = html_sanitize(test, sanitize_attributes=False, sanitize_style=True, strip_style=False, strip_classes=False)
|
||||
for text in in_lst:
|
||||
self.assertIn(text, new_html)
|
||||
for text in out_lst:
|
||||
self.assertNotIn(text, new_html)
|
||||
|
||||
# style should not be sanitized if removed
|
||||
new_html = html_sanitize(test_data[0][0], sanitize_attributes=False, strip_style=True, strip_classes=False)
|
||||
self.assertEqual(new_html, u'<span>Coin coin </span>')
|
||||
|
||||
def test_style_class(self):
|
||||
html = html_sanitize(test_mail_examples.REMOVE_CLASS, sanitize_attributes=True, sanitize_style=True, strip_classes=True)
|
||||
for ext in test_mail_examples.REMOVE_CLASS_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.REMOVE_CLASS_OUT:
|
||||
self.assertNotIn(ext, html,)
|
||||
|
||||
def test_style_class_only(self):
|
||||
html = html_sanitize(test_mail_examples.REMOVE_CLASS, sanitize_attributes=False, sanitize_style=True, strip_classes=True)
|
||||
for ext in test_mail_examples.REMOVE_CLASS_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.REMOVE_CLASS_OUT:
|
||||
self.assertNotIn(ext, html,)
|
||||
|
||||
def test_edi_source(self):
|
||||
html = html_sanitize(test_mail_examples.EDI_LIKE_HTML_SOURCE)
|
||||
self.assertIn(
|
||||
'font-family: \'Lucida Grande\', Ubuntu, Arial, Verdana, sans-serif;', html,
|
||||
'html_sanitize removed valid styling')
|
||||
self.assertIn(
|
||||
'src="https://www.paypal.com/en_US/i/btn/btn_paynowCC_LG.gif"', html,
|
||||
'html_sanitize removed valid img')
|
||||
self.assertNotIn('</body></html>', html, 'html_sanitize did not remove extra closing tags')
|
||||
|
||||
def test_quote_blockquote(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_BLOCKQUOTE)
|
||||
for ext in test_mail_examples.QUOTE_BLOCKQUOTE_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_BLOCKQUOTE_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s' % misc.html_escape(ext), html)
|
||||
|
||||
def test_quote_thunderbird(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_THUNDERBIRD_1)
|
||||
for ext in test_mail_examples.QUOTE_THUNDERBIRD_1_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_THUNDERBIRD_1_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(ext), html)
|
||||
|
||||
def test_quote_hotmail_html(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_HOTMAIL_HTML)
|
||||
for ext in test_mail_examples.QUOTE_HOTMAIL_HTML_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_HOTMAIL_HTML_OUT:
|
||||
self.assertIn(ext, html)
|
||||
|
||||
html = html_sanitize(test_mail_examples.HOTMAIL_1)
|
||||
for ext in test_mail_examples.HOTMAIL_1_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.HOTMAIL_1_OUT:
|
||||
self.assertIn(ext, html)
|
||||
|
||||
def test_quote_outlook_html(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_OUTLOOK_HTML)
|
||||
for ext in test_mail_examples.QUOTE_OUTLOOK_HTML_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_OUTLOOK_HTML_OUT:
|
||||
self.assertIn(ext, html)
|
||||
|
||||
def test_quote_thunderbird_html(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_THUNDERBIRD_HTML)
|
||||
for ext in test_mail_examples.QUOTE_THUNDERBIRD_HTML_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_THUNDERBIRD_HTML_OUT:
|
||||
self.assertIn(ext, html)
|
||||
|
||||
def test_quote_yahoo_html(self):
|
||||
html = html_sanitize(test_mail_examples.QUOTE_YAHOO_HTML)
|
||||
for ext in test_mail_examples.QUOTE_YAHOO_HTML_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.QUOTE_YAHOO_HTML_OUT:
|
||||
self.assertIn(ext, html)
|
||||
|
||||
def test_quote_basic_text(self):
|
||||
test_data = [
|
||||
(
|
||||
"""This is Sparta!\n--\nAdministrator\n+9988776655""",
|
||||
['This is Sparta!'],
|
||||
['\n--\nAdministrator\n+9988776655']
|
||||
), (
|
||||
"""<p>This is Sparta!\n--\nAdministrator</p>""",
|
||||
[],
|
||||
['\n--\nAdministrator']
|
||||
), (
|
||||
"""<p>This is Sparta!<br/>--<br>Administrator</p>""",
|
||||
['This is Sparta!'],
|
||||
[]
|
||||
), (
|
||||
"""This is Sparta!\n>Ah bon ?\nCertes\n> Chouette !\nClair""",
|
||||
['This is Sparta!', 'Certes', 'Clair'],
|
||||
['\n>Ah bon ?', '\n> Chouette !']
|
||||
)
|
||||
]
|
||||
for test, in_lst, out_lst in test_data:
|
||||
new_html = html_sanitize(test)
|
||||
for text in in_lst:
|
||||
self.assertIn(text, new_html)
|
||||
for text in out_lst:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(text), new_html)
|
||||
|
||||
def test_quote_signature(self):
|
||||
test_data = [
|
||||
(
|
||||
"""<div>Hello<pre>--<br />Administrator</pre></div>""",
|
||||
["<pre data-o-mail-quote=\"1\">--", "<br data-o-mail-quote=\"1\">"],
|
||||
)
|
||||
]
|
||||
for test, in_lst in test_data:
|
||||
new_html = html_sanitize(test)
|
||||
for text in in_lst:
|
||||
self.assertIn(text, new_html)
|
||||
|
||||
def test_quote_gmail(self):
|
||||
html = html_sanitize(test_mail_examples.GMAIL_1)
|
||||
for ext in test_mail_examples.GMAIL_1_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.GMAIL_1_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(ext), html)
|
||||
|
||||
def test_quote_text(self):
|
||||
html = html_sanitize(test_mail_examples.TEXT_1)
|
||||
for ext in test_mail_examples.TEXT_1_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.TEXT_1_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(ext), html)
|
||||
|
||||
html = html_sanitize(test_mail_examples.TEXT_2)
|
||||
for ext in test_mail_examples.TEXT_2_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.TEXT_2_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(ext), html)
|
||||
|
||||
def test_quote_bugs(self):
|
||||
html = html_sanitize(test_mail_examples.BUG1)
|
||||
for ext in test_mail_examples.BUG_1_IN:
|
||||
self.assertIn(ext, html)
|
||||
for ext in test_mail_examples.BUG_1_OUT:
|
||||
self.assertIn(u'<span data-o-mail-quote="1">%s</span>' % misc.html_escape(ext), html)
|
||||
|
||||
def test_misc(self):
|
||||
# False / void should not crash
|
||||
html = html_sanitize('')
|
||||
self.assertEqual(html, '')
|
||||
html = html_sanitize(False)
|
||||
self.assertEqual(html, False)
|
||||
|
||||
# Message with xml and doctype tags don't crash
|
||||
html = html_sanitize(u'<?xml version="1.0" encoding="iso-8859-1"?>\n<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"\n "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">\n <head>\n <title>404 - Not Found</title>\n </head>\n <body>\n <h1>404 - Not Found</h1>\n </body>\n</html>\n')
|
||||
self.assertNotIn('encoding', html)
|
||||
self.assertNotIn('<title>404 - Not Found</title>', html)
|
||||
self.assertIn('<h1>404 - Not Found</h1>', html)
|
||||
|
||||
def test_cid_with_at(self):
|
||||
img_tag = '<img src="@">'
|
||||
sanitized = html_sanitize(img_tag, sanitize_tags=False, strip_classes=True)
|
||||
self.assertEqual(img_tag, sanitized, "img with can have cid containing @ and shouldn't be escaped")
|
||||
|
||||
# ms office is currently not supported, have to find a way to support it
|
||||
# def test_30_email_msoffice(self):
|
||||
# new_html = html_sanitize(test_mail_examples.MSOFFICE_1, remove=True)
|
||||
# for ext in test_mail_examples.MSOFFICE_1_IN:
|
||||
# self.assertIn(ext, new_html)
|
||||
# for ext in test_mail_examples.MSOFFICE_1_OUT:
|
||||
# self.assertNotIn(ext, new_html)
|
||||
|
||||
|
||||
class TestHtmlTools(BaseCase):
|
||||
""" Test some of our generic utility functions about html """
|
||||
|
||||
def test_plaintext2html(self):
|
||||
cases = [
|
||||
("First \nSecond \nThird\n \nParagraph\n\r--\nSignature paragraph", 'div',
|
||||
"<div><p>First <br/>Second <br/>Third</p><p>Paragraph</p><p>--<br/>Signature paragraph</p></div>"),
|
||||
("First<p>It should be escaped</p>\nSignature", False,
|
||||
"<p>First<p>It should be escaped</p><br/>Signature</p>")
|
||||
]
|
||||
for content, container_tag, expected in cases:
|
||||
html = plaintext2html(content, container_tag)
|
||||
self.assertEqual(html, expected, 'plaintext2html is broken')
|
||||
|
||||
def test_html_html_to_inner_content(self):
|
||||
cases = [
|
||||
('<div><p>First <br/>Second <br/>Third Paragraph</p><p>--<br/>Signature paragraph with a <a href="./link">link</a></p></div>',
|
||||
'First Second Third Paragraph -- Signature paragraph with a link'),
|
||||
('<p>Now => processing entities​and extra whitespace too. </p>',
|
||||
'Now => processing\xa0entities\u200band extra whitespace too.'),
|
||||
('<div>Look what happens with <p>unmatched tags</div>', 'Look what happens with unmatched tags'),
|
||||
('<div>Look what happens with <p unclosed tags</div> Are we good?', 'Look what happens with Are we good?')
|
||||
]
|
||||
for content, expected in cases:
|
||||
text = html_to_inner_content(content)
|
||||
self.assertEqual(text, expected, 'html_html_to_inner_content is broken')
|
||||
|
||||
def test_append_to_html(self):
|
||||
test_samples = [
|
||||
('<!DOCTYPE...><HTML encoding="blah">some <b>content</b></HtMl>', '--\nYours truly', True, True, False,
|
||||
'<!DOCTYPE...><html encoding="blah">some <b>content</b>\n<pre>--\nYours truly</pre>\n</html>'),
|
||||
('<!DOCTYPE...><HTML encoding="blah">some <b>content</b></HtMl>', '--\nYours truly', True, False, False,
|
||||
'<!DOCTYPE...><html encoding="blah">some <b>content</b>\n<p>--<br/>Yours truly</p>\n</html>'),
|
||||
('<html><body>some <b>content</b></body></html>', '--\nYours & <truly>', True, True, False,
|
||||
'<html><body>some <b>content</b>\n<pre>--\nYours & <truly></pre>\n</body></html>'),
|
||||
('<html><body>some <b>content</b></body></html>', '<!DOCTYPE...>\n<html><body>\n<p>--</p>\n<p>Yours truly</p>\n</body>\n</html>', False, False, False,
|
||||
'<html><body>some <b>content</b>\n\n\n<p>--</p>\n<p>Yours truly</p>\n\n\n</body></html>'),
|
||||
]
|
||||
for html, content, plaintext_flag, preserve_flag, container_tag, expected in test_samples:
|
||||
self.assertEqual(append_content_to_html(html, content, plaintext_flag, preserve_flag, container_tag), expected, 'append_content_to_html is broken')
|
||||
|
||||
def test_is_html_empty(self):
|
||||
void_strings_samples = ['', False, ' ']
|
||||
for content in void_strings_samples:
|
||||
self.assertTrue(is_html_empty(content))
|
||||
|
||||
void_html_samples = [
|
||||
'<section><br /> <b><i/></b></section>',
|
||||
'<section><br /> <b><i/ ></b></section>',
|
||||
'<section><br /> <b>< i/ ></b></section>',
|
||||
'<section><br /> <b>< i / ></b></section>',
|
||||
'<p><br></p>', '<p><br> </p>', '<p><br /></p >',
|
||||
'<p style="margin: 4px"></p>',
|
||||
'<div style="margin: 4px"></div>',
|
||||
'<p class="oe_testing"><br></p>',
|
||||
'<p><span style="font-weight: bolder;"><font style="color: rgb(255, 0, 0);" class=" "></font></span><br></p>',
|
||||
]
|
||||
for content in void_html_samples:
|
||||
self.assertTrue(is_html_empty(content), 'Failed with %s' % content)
|
||||
|
||||
valid_html_samples = [
|
||||
'<p><br>1</p>', '<p>1<br > </p>', '<p style="margin: 4px">Hello World</p>',
|
||||
'<div style="margin: 4px"><p>Hello World</p></div>',
|
||||
'<p><span style="font-weight: bolder;"><font style="color: rgb(255, 0, 0);" class=" ">W</font></span><br></p>',
|
||||
'<span class="fa fa-heart"></span>',
|
||||
'<i class="fas fa-home"></i>'
|
||||
]
|
||||
for content in valid_html_samples:
|
||||
self.assertFalse(is_html_empty(content))
|
||||
|
||||
def test_prepend_html_content(self):
|
||||
body = """
|
||||
<html>
|
||||
<body>
|
||||
<div>test</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
content = "<span>content</span>"
|
||||
|
||||
result = prepend_html_content(body, content)
|
||||
result = re.sub(r'[\s\t]', '', result)
|
||||
self.assertEqual(result, "<html><body><span>content</span><div>test</div></body></html>")
|
||||
|
||||
body = "<div>test</div>"
|
||||
content = "<span>content</span>"
|
||||
|
||||
result = prepend_html_content(body, content)
|
||||
result = re.sub(r'[\s\t]', '', result)
|
||||
self.assertEqual(result, "<span>content</span><div>test</div>")
|
||||
|
||||
body = """
|
||||
<body>
|
||||
<div>test</div>
|
||||
</body>
|
||||
"""
|
||||
|
||||
result = prepend_html_content(body, content)
|
||||
result = re.sub(r'[\s\t]', '', result)
|
||||
self.assertEqual(result, "<body><span>content</span><div>test</div></body>")
|
||||
|
||||
body = """
|
||||
<html>
|
||||
<body>
|
||||
<div>test</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
content = """
|
||||
<html>
|
||||
<body>
|
||||
<div>test</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
result = prepend_html_content(body, content)
|
||||
result = re.sub(r'[\s\t]', '', result)
|
||||
self.assertEqual(result, "<html><body><div>test</div><div>test</div></body></html>")
|
||||
|
||||
|
||||
@tagged('mail_tools')
|
||||
class TestEmailTools(BaseCase):
|
||||
""" Test some of our generic utility functions for emails """
|
||||
|
||||
def test_email_domain_normalize(self):
|
||||
cases = [
|
||||
("Test.Com", "test.com", "Should have normalized domain"),
|
||||
("email@test.com", False, "Domain is not valid, should return False"),
|
||||
(False, False, "Domain is not valid, should retunr False"),
|
||||
]
|
||||
for source, expected, msg in cases:
|
||||
self.assertEqual(email_domain_normalize(source), expected, msg)
|
||||
|
||||
def test_email_normalize(self):
|
||||
""" Test 'email_normalize'. Note that it is built on 'email_split' so
|
||||
some use cases are already managed in 'test_email_split(_and_format)'
|
||||
hence having more specific test cases here about normalization itself. """
|
||||
format_name = 'My Super Prénom'
|
||||
format_name_ascii = '=?utf-8?b?TXkgU3VwZXIgUHLDqW5vbQ==?='
|
||||
sources = [
|
||||
'"Super Déboulonneur" <deboulonneur@example.com>', # formatted
|
||||
'Déboulonneur deboulonneur@example.com', # wrong formatting
|
||||
'deboulonneur@example.com Déboulonneur', # wrong formatting (happens, alas)
|
||||
'"Super Déboulonneur" <DEBOULONNEUR@example.com>, "Super Déboulonneur 2" <deboulonneur2@EXAMPLE.com>', # multi + case
|
||||
' Déboulonneur deboulonneur@example.com déboulonneur deboulonneur2@example.com', # wrong formatting + wrong multi
|
||||
'"Déboulonneur 😊" <deboulonneur.😊@example.com>', # unicode in name and email left-part
|
||||
'"Déboulonneur" <déboulonneur@examplé.com>', # utf-8
|
||||
'"Déboulonneur" <DéBoulonneur@Examplé.com>', # utf-8
|
||||
]
|
||||
expected_list = [
|
||||
'deboulonneur@example.com',
|
||||
'deboulonneur@example.com',
|
||||
'deboulonneur@example.comdéboulonneur',
|
||||
False,
|
||||
False, # need fix over 'getadresses'
|
||||
'deboulonneur.😊@example.com',
|
||||
'déboulonneur@examplé.com',
|
||||
'DéBoulonneur@examplé.com',
|
||||
]
|
||||
expected_fmt_utf8_list = [
|
||||
f'"{format_name}" <deboulonneur@example.com>',
|
||||
f'"{format_name}" <deboulonneur@example.com>',
|
||||
f'"{format_name}" <deboulonneur@example.comdéboulonneur>',
|
||||
f'"{format_name}" <@>',
|
||||
f'"{format_name}" <@>',
|
||||
f'"{format_name}" <deboulonneur.😊@example.com>',
|
||||
f'"{format_name}" <déboulonneur@examplé.com>',
|
||||
f'"{format_name}" <DéBoulonneur@examplé.com>',
|
||||
]
|
||||
expected_fmt_ascii_list = [
|
||||
f'{format_name_ascii} <deboulonneur@example.com>',
|
||||
f'{format_name_ascii} <deboulonneur@example.com>',
|
||||
f'{format_name_ascii} <deboulonneur@example.xn--comdboulonneur-ekb>',
|
||||
f'{format_name_ascii} <@>',
|
||||
f'{format_name_ascii} <@>',
|
||||
f'{format_name_ascii} <deboulonneur.😊@example.com>',
|
||||
f'{format_name_ascii} <déboulonneur@xn--exampl-gva.com>',
|
||||
f'{format_name_ascii} <DéBoulonneur@xn--exampl-gva.com>',
|
||||
]
|
||||
for source, expected, expected_utf8_fmt, expected_ascii_fmt in zip(sources, expected_list, expected_fmt_utf8_list, expected_fmt_ascii_list):
|
||||
with self.subTest(source=source):
|
||||
self.assertEqual(email_normalize(source, strict=True), expected)
|
||||
# standard usage of formataddr
|
||||
self.assertEqual(formataddr((format_name, (expected or '')), charset='utf-8'), expected_utf8_fmt)
|
||||
# check using INDA at format time, using ascii charset as done when
|
||||
# sending emails (see extract_rfc2822_addresses)
|
||||
self.assertEqual(formataddr((format_name, (expected or '')), charset='ascii'), expected_ascii_fmt)
|
||||
|
||||
def test_email_split(self):
|
||||
""" Test 'email_split' """
|
||||
cases = [
|
||||
("John <12345@gmail.com>", ['12345@gmail.com']), # regular form
|
||||
("d@x; 1@2", ['d@x', '1@2']), # semi-colon + extra space
|
||||
("'(ss)' <123@gmail.com>, 'foo' <foo@bar>", ['123@gmail.com', 'foo@bar']), # comma + single-quoting
|
||||
('"john@gmail.com"<johnny@gmail.com>', ['johnny@gmail.com']), # double-quoting
|
||||
('"<jg>" <johnny@gmail.com>', ['johnny@gmail.com']), # double-quoting with brackets
|
||||
('@gmail.com', ['@gmail.com']), # no left-part
|
||||
# '@domain' corner cases -- all those return a '@gmail.com' (or equivalent)
|
||||
# email address when going through 'getaddresses'
|
||||
# - multi @
|
||||
('fr@ncois.th@notgmail.com', ['fr@ncois.th']),
|
||||
('f@r@nc.gz,ois@notgmail.com', ['r@nc.gz', 'ois@notgmail.com']), # still failing, but differently from 'getaddresses' alone
|
||||
('@notgmail.com esteban_gnole@coldmail.com@notgmail.com', ['esteban_gnole@coldmail.com']),
|
||||
# - multi emails (with invalid)
|
||||
(
|
||||
'Ivan@dezotos.com Cc iv.an@notgmail.com',
|
||||
['Ivan@dezotos.com', 'iv.an@notgmail.com']
|
||||
),
|
||||
(
|
||||
'ivan-dredi@coldmail.com ivan.dredi@notgmail.com',
|
||||
['ivan-dredi@coldmail.com', 'ivan.dredi@notgmail.com']
|
||||
),
|
||||
(
|
||||
'@notgmail.com ivan@coincoin.com.ar jeanine@coincoin.com.ar',
|
||||
['ivan@coincoin.com.ar', 'jeanine@coincoin.com.ar']
|
||||
),
|
||||
(
|
||||
'@notgmail.com whoareyou@youhou.com. ivan.dezotos@notgmail.com',
|
||||
['whoareyou@youhou.com', 'ivan.dezotos@notgmail.com']
|
||||
),
|
||||
(
|
||||
'francois@nc.gz CC: ois@notgmail.com ivan@dezotos.com',
|
||||
['francois@nc.gz', 'ois@notgmail.com', 'ivan@dezotos.com']
|
||||
),
|
||||
(
|
||||
'francois@nc.gz CC: ois@notgmail.com,ivan@dezotos.com',
|
||||
['francois@nc.gzCC', 'ois@notgmail.com', 'ivan@dezotos.com']
|
||||
),
|
||||
# - separated with '/''
|
||||
(
|
||||
'ivan.plein@dezotos.com / ivan.plu@notgmail.com',
|
||||
['ivan.plein@dezotos.com', 'ivan.plu@notgmail.com']
|
||||
),
|
||||
(
|
||||
'@notgmail.com ivan.parfois@notgmail.com/ ivan.souvent@notgmail.com',
|
||||
['ivan.parfois@notgmail.com', 'ivan.souvent@notgmail.com']
|
||||
),
|
||||
# - separated with '-''
|
||||
('ivan@dezotos.com - ivan.dezotos@notgmail.com', ['ivan@dezotos.com', 'ivan.dezotos@notgmail.com']),
|
||||
(
|
||||
'car.pool@notgmail.com - co (TAMBO) Registration car.warsh@notgmail.com',
|
||||
['car.pool@notgmail.com', 'car.warsh@notgmail.com']
|
||||
),
|
||||
]
|
||||
for source, expected in cases:
|
||||
with self.subTest(source=source):
|
||||
self.assertEqual(email_split(source), expected)
|
||||
|
||||
def test_email_split_and_format(self):
|
||||
""" Test 'email_split_and_format', notably in case of multi encapsulation
|
||||
or multi emails. """
|
||||
sources = [
|
||||
'deboulonneur@example.com',
|
||||
'"Super Déboulonneur" <deboulonneur@example.com>', # formatted
|
||||
# wrong formatting
|
||||
'Déboulonneur <deboulonneur@example.com', # with a final typo
|
||||
'Déboulonneur deboulonneur@example.com', # wrong formatting
|
||||
'deboulonneur@example.com Déboulonneur', # wrong formatting (happens, alas)
|
||||
# multi
|
||||
'Déboulonneur, deboulonneur@example.com', # multi-like with errors
|
||||
'deboulonneur@example.com, deboulonneur2@example.com', # multi
|
||||
' Déboulonneur deboulonneur@example.com déboulonneur deboulonneur2@example.com', # wrong formatting + wrong multi
|
||||
# format / misc
|
||||
'"Déboulonneur" <"Déboulonneur Encapsulated" <deboulonneur@example.com>>', # double formatting
|
||||
'"Super Déboulonneur" <deboulonneur@example.com>, "Super Déboulonneur 2" <deboulonneur2@example.com>',
|
||||
'"Super Déboulonneur" <deboulonneur@example.com>, wrong, ',
|
||||
'"Déboulonneur 😊" <deboulonneur@example.com>', # unicode in name
|
||||
'"Déboulonneur 😊" <deboulonneur.😊@example.com>', # unicode in name and email left-part
|
||||
'"Déboulonneur" <déboulonneur@examplé.com>', # utf-8
|
||||
]
|
||||
expected_list = [
|
||||
['deboulonneur@example.com'],
|
||||
['"Super Déboulonneur" <deboulonneur@example.com>'],
|
||||
# wrong formatting
|
||||
['"Déboulonneur" <deboulonneur@example.com>'],
|
||||
['"Déboulonneur" <deboulonneur@example.com>'], # extra part correctly considered as a name
|
||||
['deboulonneur@example.comDéboulonneur'], # concatenated, not sure why
|
||||
# multi
|
||||
['deboulonneur@example.com'],
|
||||
['deboulonneur@example.com', 'deboulonneur2@example.com'],
|
||||
['deboulonneur@example.com', 'deboulonneur2@example.com'], # need fix over 'getadresses'
|
||||
# format / misc
|
||||
['deboulonneur@example.com'],
|
||||
['"Super Déboulonneur" <deboulonneur@example.com>', '"Super Déboulonneur 2" <deboulonneur2@example.com>'],
|
||||
['"Super Déboulonneur" <deboulonneur@example.com>'],
|
||||
['"Déboulonneur 😊" <deboulonneur@example.com>'],
|
||||
['"Déboulonneur 😊" <deboulonneur.😊@example.com>'],
|
||||
['"Déboulonneur" <déboulonneur@examplé.com>'],
|
||||
]
|
||||
for source, expected in zip(sources, expected_list):
|
||||
with self.subTest(source=source):
|
||||
self.assertEqual(email_split_and_format(source), expected)
|
||||
|
||||
def test_email_formataddr(self):
|
||||
email_base = 'joe@example.com'
|
||||
email_idna = 'joe@examplé.com'
|
||||
cases = [
|
||||
# (name, address), charsets expected
|
||||
(('', email_base), ['ascii', 'utf-8'], 'joe@example.com'),
|
||||
(('joe', email_base), ['ascii', 'utf-8'], '"joe" <joe@example.com>'),
|
||||
(('joe doe', email_base), ['ascii', 'utf-8'], '"joe doe" <joe@example.com>'),
|
||||
(('joe"doe', email_base), ['ascii', 'utf-8'], '"joe\\"doe" <joe@example.com>'),
|
||||
(('joé', email_base), ['ascii'], '=?utf-8?b?am/DqQ==?= <joe@example.com>'),
|
||||
(('joé', email_base), ['utf-8'], '"joé" <joe@example.com>'),
|
||||
(('', email_idna), ['ascii'], 'joe@xn--exampl-gva.com'),
|
||||
(('', email_idna), ['utf-8'], 'joe@examplé.com'),
|
||||
(('joé', email_idna), ['ascii'], '=?utf-8?b?am/DqQ==?= <joe@xn--exampl-gva.com>'),
|
||||
(('joé', email_idna), ['utf-8'], '"joé" <joe@examplé.com>'),
|
||||
(('', 'joé@example.com'), ['ascii', 'utf-8'], 'joé@example.com'),
|
||||
]
|
||||
|
||||
for pair, charsets, expected in cases:
|
||||
for charset in charsets:
|
||||
with self.subTest(pair=pair, charset=charset):
|
||||
self.assertEqual(formataddr(pair, charset), expected)
|
||||
|
||||
def test_extract_rfc2822_addresses(self):
|
||||
cases = [
|
||||
('"Admin" <admin@example.com>', ['admin@example.com']),
|
||||
('"Admin" <admin@example.com>, Demo <demo@test.com>', ['admin@example.com', 'demo@test.com']),
|
||||
('admin@example.com', ['admin@example.com']),
|
||||
('"Admin" <admin@example.com>, Demo <malformed email>', ['admin@example.com']),
|
||||
('admin@éxample.com', ['admin@xn--xample-9ua.com']),
|
||||
# email-like names
|
||||
(
|
||||
'"admin@éxample.com" <admin@éxample.com>',
|
||||
['admin@xn--xample-9ua.com', 'admin@xn--xample-9ua.com'],
|
||||
),
|
||||
('"Robert Le Grand" <robert@notgmail.com>', ['robert@notgmail.com']),
|
||||
('"robert@notgmail.com" <robert@notgmail.com>', ['robert@notgmail.com', 'robert@notgmail.com']),
|
||||
# "@' in names
|
||||
('"Bike @ Home" <bike@example.com>', ['bike@example.com']),
|
||||
('"Bike@Home" <bike@example.com>', ['Bike@Home', 'bike@example.com']),
|
||||
# combo @ in names + multi email
|
||||
(
|
||||
'"Not an Email" <robert@notgmail.com>, "robert@notgmail.com" <robert@notgmail.com>',
|
||||
['robert@notgmail.com', 'robert@notgmail.com', 'robert@notgmail.com'],
|
||||
),
|
||||
# accents
|
||||
('DéBoulonneur@examplé.com', ['DéBoulonneur@xn--exampl-gva.com']),
|
||||
]
|
||||
for source, expected in cases:
|
||||
with self.subTest(source=source):
|
||||
self.assertEqual(extract_rfc2822_addresses(source), expected)
|
||||
|
||||
def test_email_anonymize(self):
|
||||
cases = [
|
||||
# examples
|
||||
('admin@example.com', 'a****@example.com', 'a****@e******.com'), # short
|
||||
('portal@example.com', 'p***al@example.com', 'p***al@e******.com'), # long
|
||||
|
||||
# edge cases
|
||||
('a@example.com', 'a@example.com', 'a@e******.com'), # single letter
|
||||
('joé@example.com', 'j**@example.com', 'j**@e******.com'), # hidden unicode
|
||||
('élise@example.com', 'é****@example.com', 'é****@e******.com'), # visible unicode
|
||||
('admin@[127.0.0.1]', 'a****@[127.0.0.1]', 'a****@[127.0.0.1]'), # IPv4
|
||||
('admin@[IPv6:::1]', 'a****@[IPv6:::1]', 'a****@[IPv6:::1]'), # IPv6
|
||||
|
||||
# bad cases, to show how the system behave
|
||||
('', '', ''), # empty string
|
||||
('@example.com', '@example.com', '@e******.com'), # missing local part
|
||||
('john', 'j***', 'j***'), # missing domain
|
||||
('Jo <j@example.com>', 'J****@example.com>', 'J****@e******.com>'), # non-normalized
|
||||
('admin@com', 'a****@com', 'a****@com'), # dotless domain, prohibited by icann
|
||||
]
|
||||
for source, expected, expected_redacted_domain in cases:
|
||||
with self.subTest(source=source):
|
||||
self.assertEqual(email_anonymize(source), expected)
|
||||
self.assertEqual(
|
||||
email_anonymize(source, redact_domain=True),
|
||||
expected_redacted_domain,
|
||||
)
|
||||
|
||||
|
||||
class EmailConfigCase(TransactionCase):
|
||||
@patch.dict(config.options, {"email_from": "settings@example.com"})
|
||||
def test_default_email_from(self, *args):
|
||||
"""Email from setting is respected."""
|
||||
# ICP setting is more important
|
||||
ICP = self.env["ir.config_parameter"].sudo()
|
||||
ICP.set_param("mail.catchall.domain", "example.org")
|
||||
ICP.set_param("mail.default.from", "icp")
|
||||
message = self.env["ir.mail_server"].build_email(
|
||||
False, "recipient@example.com", "Subject",
|
||||
"The body of an email",
|
||||
)
|
||||
self.assertEqual(message["From"], "icp@example.org")
|
||||
# Without ICP, the config file/CLI setting is used
|
||||
ICP.set_param("mail.default.from", False)
|
||||
message = self.env["ir.mail_server"].build_email(
|
||||
False, "recipient@example.com", "Subject",
|
||||
"The body of an email",
|
||||
)
|
||||
self.assertEqual(message["From"], "settings@example.com")
|
||||
|
||||
|
||||
class _FakeSMTP:
|
||||
"""SMTP stub"""
|
||||
def __init__(self):
|
||||
self.messages = []
|
||||
self.from_filter = 'example.com'
|
||||
|
||||
# Python 3 before 3.7.4
|
||||
def sendmail(self, smtp_from, smtp_to_list, message_str,
|
||||
mail_options=(), rcpt_options=()):
|
||||
self.messages.append(message_str)
|
||||
|
||||
# Python 3.7.4+
|
||||
def send_message(self, message, smtp_from, smtp_to_list,
|
||||
mail_options=(), rcpt_options=()):
|
||||
self.messages.append(message.as_string())
|
||||
|
||||
|
||||
class TestEmailMessage(TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls._fake_smtp = _FakeSMTP()
|
||||
|
||||
def build_email(self, **kwargs):
|
||||
kwargs.setdefault('email_from', 'from@example.com')
|
||||
kwargs.setdefault('email_to', 'to@example.com')
|
||||
kwargs.setdefault('subject', 'subject')
|
||||
return self.env['ir.mail_server'].build_email(**kwargs)
|
||||
|
||||
def send_email(self, msg):
|
||||
with patch.object(threading.current_thread(), 'testing', False):
|
||||
self.env['ir.mail_server'].send_email(msg, smtp_session=self._fake_smtp)
|
||||
return self._fake_smtp.messages.pop()
|
||||
|
||||
def test_bpo_34424_35805(self):
|
||||
"""Ensure all email sent are bpo-34424 and bpo-35805 free"""
|
||||
msg = email.message.EmailMessage(policy=email.policy.SMTP)
|
||||
msg['From'] = '"Joé Doe" <joe@example.com>'
|
||||
msg['To'] = '"Joé Doe" <joe@example.com>'
|
||||
|
||||
# Message-Id & References fields longer than 77 chars (bpo-35805)
|
||||
msg['Message-Id'] = '<929227342217024.1596730490.324691772460938-example-30661-some.reference@test-123.example.com>'
|
||||
msg['References'] = '<345227342212345.1596730777.324691772483620-example-30453-other.reference@test-123.example.com>'
|
||||
|
||||
msg_on_the_wire = self.send_email(msg)
|
||||
self.assertEqual(msg_on_the_wire,
|
||||
'From: =?utf-8?q?Jo=C3=A9?= Doe <joe@example.com>\r\n'
|
||||
'To: =?utf-8?q?Jo=C3=A9?= Doe <joe@example.com>\r\n'
|
||||
'Message-Id: <929227342217024.1596730490.324691772460938-example-30661-some.reference@test-123.example.com>\r\n'
|
||||
'References: <345227342212345.1596730777.324691772483620-example-30453-other.reference@test-123.example.com>\r\n'
|
||||
'\r\n'
|
||||
)
|
||||
|
||||
def test_alternative_correct_order(self):
|
||||
"""
|
||||
RFC-1521 7.2.3. The Multipart/alternative subtype
|
||||
> the alternatives appear in an order of increasing faithfulness
|
||||
> to the original content. In general, the best choice is the
|
||||
> LAST part of a type supported by the recipient system's local
|
||||
> environment.
|
||||
|
||||
Also, the MIME-Version header should be present in BOTH the
|
||||
enveloppe AND the parts
|
||||
"""
|
||||
msg = self.build_email(body='<p>Hello world</p>', subtype='html')
|
||||
msg_on_the_wire = self.send_email(msg)
|
||||
|
||||
self.assertGreater(msg_on_the_wire.index('text/html'), msg_on_the_wire.index('text/plain'),
|
||||
"The html part should be preferred (=appear after) to the text part")
|
||||
self.assertEqual(msg_on_the_wire.count('==============='), 2 + 2, # +2 for the header and the footer
|
||||
"There should be 2 parts: one text and one html")
|
||||
self.assertEqual(msg_on_the_wire.count('MIME-Version: 1.0'), 3,
|
||||
"There should be 3 headers MIME-Version: one on the enveloppe, "
|
||||
"one on the html part, one on the text part")
|
||||
|
||||
def test_comment_malformed(self):
|
||||
html = '''<!-- malformed-close --!> <img src='x' onerror='alert(1)'></img> --> comment <!-- normal comment --> --> out of context balise --!>'''
|
||||
html_result = html_sanitize(html)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
def test_multiline(self):
|
||||
payload = """
|
||||
<div> <!--
|
||||
multi line comment
|
||||
--!> </div> <script> alert(1) </script> -->
|
||||
"""
|
||||
html_result = html_sanitize(payload)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
def test_abrupt_close(self):
|
||||
payload = """<!--> <script> alert(1) </script> -->"""
|
||||
html_result = html_sanitize(payload)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
payload = """<!---> <script> alert(1) </script> -->"""
|
||||
html_result = html_sanitize(payload)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
def test_abrut_malformed(self):
|
||||
payload = """<!--!> <script> alert(1) </script> -->"""
|
||||
html_result = html_sanitize(payload)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
payload = """<!---!> <script> alert(1) </script> -->"""
|
||||
html_result = html_sanitize(payload)
|
||||
self.assertNotIn('alert(1)', html_result)
|
||||
|
||||
|
||||
class TestMailTools(BaseCase):
|
||||
""" Test mail utility methods. """
|
||||
|
||||
def test_html2plaintext(self):
|
||||
self.assertEqual(html2plaintext(False), 'False')
|
||||
self.assertEqual(html2plaintext('\t'), '')
|
||||
self.assertEqual(html2plaintext(' '), '')
|
||||
self.assertEqual(html2plaintext("""<h1>Title</h1>
|
||||
<h2>Sub title</h2>
|
||||
<br/>
|
||||
<h3>Sub sub title</h3>
|
||||
<h4>Sub sub sub title</h4>
|
||||
<p>Paragraph <em>with</em> <b>bold</b></p>
|
||||
<table><tr><td>table element 1</td></tr><tr><td>table element 2</td></tr></table>
|
||||
<p><special-chars>0 < 10 & 10 > 0</special-chars></p>"""),
|
||||
"""**Title**
|
||||
**Sub title**
|
||||
|
||||
*Sub sub title*
|
||||
Sub sub sub title
|
||||
Paragraph /with/ *bold*
|
||||
|
||||
table element 1
|
||||
table element 2
|
||||
0 < 10 & \N{NO-BREAK SPACE} 10 > 0""")
|
||||
self.assertEqual(html2plaintext('<p><img src="/web/image/428-c064ab1b/test-image.jpg?access_token=f72b5ec5-a363-45fb-b9ad-81fc794d6d7b" class="img img-fluid o_we_custom_image"><br></p>'),
|
||||
"""test-image [1]
|
||||
|
||||
|
||||
[1] /web/image/428-c064ab1b/test-image.jpg?access_token=f72b5ec5-a363-45fb-b9ad-81fc794d6d7b""")
|
||||
|
|
@ -0,0 +1,629 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
MISC_HTML_SOURCE = u"""
|
||||
<font size="2" style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">test1</font>
|
||||
<div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; font-style: normal; ">
|
||||
<b>test2</b></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
|
||||
<i>test3</i></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
|
||||
<u>test4</u></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; ">
|
||||
<strike>test5</strike></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">
|
||||
<font size="5">test6</font></div><div><ul><li><font color="#1f1f1f" face="monospace" size="2">test7</font></li><li>
|
||||
<font color="#1f1f1f" face="monospace" size="2">test8</font></li></ul><div><ol><li><font color="#1f1f1f" face="monospace" size="2">test9</font>
|
||||
</li><li><font color="#1f1f1f" face="monospace" size="2">test10</font></li></ol></div></div>
|
||||
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><div><div><font color="#1f1f1f" face="monospace" size="2">
|
||||
test11</font></div></div></div></blockquote><blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;">
|
||||
<blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><font color="#1f1f1f" face="monospace" size="2">
|
||||
test12</font></div><div><font color="#1f1f1f" face="monospace" size="2"><br></font></div></blockquote></blockquote>
|
||||
<font color="#1f1f1f" face="monospace" size="2"><a href="http://google.com">google</a></font>
|
||||
<a href="javascript:alert('malicious code')">test link</a>
|
||||
"""
|
||||
|
||||
EDI_LIKE_HTML_SOURCE = u"""<div style="font-family: 'Lucida Grande', Ubuntu, Arial, Verdana, sans-serif; font-size: 12px; color: rgb(34, 34, 34); background-color: #FFF; ">
|
||||
<p>Hello {{ object.partner_id.name }},</p>
|
||||
<p>A new invoice is available for you: </p>
|
||||
<p style="border-left: 1px solid #8e0000; margin-left: 30px;">
|
||||
<strong>REFERENCES</strong><br />
|
||||
Invoice number: <strong>{{ object.number }}</strong><br />
|
||||
Invoice total: <strong>{{ object.amount_total }} {{ object.currency_id.name }}</strong><br />
|
||||
Invoice date: {{ object.invoice_date }}<br />
|
||||
Order reference: {{ object.origin }}<br />
|
||||
Your contact: <a href="mailto:{{ object.user_id.email or '' }}?subject=Invoice%20{{ object.number }}">{{ object.user_id.name }}</a>
|
||||
</p>
|
||||
<br/>
|
||||
<p>It is also possible to directly pay with Paypal:</p>
|
||||
<a style="margin-left: 120px;" href="{{ object.paypal_url }}">
|
||||
<img class="oe_edi_paypal_button" src="https://www.paypal.com/en_US/i/btn/btn_paynowCC_LG.gif"/>
|
||||
</a>
|
||||
<br/>
|
||||
<p>If you have any question, do not hesitate to contact us.</p>
|
||||
<p>Thank you for choosing {{ object.company_id.name or 'us' }}!</p>
|
||||
<br/>
|
||||
<br/>
|
||||
<div style="width: 375px; margin: 0px; padding: 0px; background-color: #8E0000; border-top-left-radius: 5px 5px; border-top-right-radius: 5px 5px; background-repeat: repeat no-repeat;">
|
||||
<h3 style="margin: 0px; padding: 2px 14px; font-size: 12px; color: #DDD;">
|
||||
<strong style="text-transform:uppercase;">{{ object.company_id.name }}</strong></h3>
|
||||
</div>
|
||||
<div style="width: 347px; margin: 0px; padding: 5px 14px; line-height: 16px; background-color: #F2F2F2;">
|
||||
<span style="color: #222; margin-bottom: 5px; display: block; ">
|
||||
{{ object.company_id.street }}<br/>
|
||||
{{ object.company_id.street2 }}<br/>
|
||||
{{ object.company_id.zip }} {{ object.company_id.city }}<br/>
|
||||
{{ object.company_id.state_id and ('%s, ' % object.company_id.state_id.name) or '' }} {{ object.company_id.country_id.name or '' }}<br/>
|
||||
</span>
|
||||
<div style="margin-top: 0px; margin-right: 0px; margin-bottom: 0px; margin-left: 0px; padding-top: 0px; padding-right: 0px; padding-bottom: 0px; padding-left: 0px; ">
|
||||
Phone: {{ object.company_id.phone }}
|
||||
</div>
|
||||
<div>
|
||||
Web : <a href="{{ object.company_id.website }}">{{ object.company_id.website }}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div></body></html>"""
|
||||
|
||||
|
||||
# QUOTES
|
||||
|
||||
QUOTE_BLOCKQUOTE = u"""<html>
|
||||
<head>
|
||||
<meta content="text/html; charset=utf-8" http-equiv="Content-Type">
|
||||
</head>
|
||||
<body text="#000000" bgcolor="#FFFFFF">
|
||||
<div class="moz-cite-prefix">On 05-01-16 05:52, Andreas Becker
|
||||
wrote:<br>
|
||||
</div>
|
||||
<blockquote
|
||||
cite="mid:CAEJSRZvWvud8c6Qp=wfNG6O1+wK3i_jb33qVrF7XyrgPNjnyUA@mail.gmail.com"
|
||||
type="cite"><base href="https://www.odoo.com">
|
||||
<div dir="ltr">Yep Dominique that is true, as Postgres was the
|
||||
base of all same as Odoo and MySQL etc came much later.Â
|
||||
<div><br>
|
||||
</div>
|
||||
<div>Unfortunately many customers who ask for and ERP are with
|
||||
hosters which still don't provide Postgres and MySQL is
|
||||
available everywhere. Additionally Postgres seems for many
|
||||
like a big black box while MySQL is very well documented and
|
||||
understandable and it has PHPmyAdmin which is far ahead of any
|
||||
tool managing postgres DBs.</div>
|
||||
<br>
|
||||
</div>
|
||||
</blockquote>
|
||||
<br>
|
||||
I don't care how much you are highlighting the advantages of Erpnext
|
||||
on this Odoo mailinglist, but when you start implying that Postgres
|
||||
is not well documented it really hurts.<br>
|
||||
<br>
|
||||
<pre class="moz-signature" cols="72">--
|
||||
Opener B.V. - Business solutions driven by open source collaboration
|
||||
|
||||
Stefan Rijnhart - Consultant/developer
|
||||
|
||||
mail: <a class="moz-txt-link-abbreviated" href="mailto:stefan@opener.am">stefan@opener.am</a>
|
||||
tel: +31 (0) 20 3090 139
|
||||
web: <a class="moz-txt-link-freetext" href="https://opener.am">https://opener.am</a></pre>
|
||||
</body>
|
||||
</html>"""
|
||||
|
||||
QUOTE_BLOCKQUOTE_IN = [u"""<blockquote cite="mid:CAEJSRZvWvud8c6Qp=wfNG6O1+wK3i_jb33qVrF7XyrgPNjnyUA@mail.gmail.com" type="cite" data-o-mail-quote-node="1" data-o-mail-quote="1">"""]
|
||||
QUOTE_BLOCKQUOTE_OUT = [u"""--
|
||||
Opener B.V. - Business solutions driven by open source collaboration
|
||||
|
||||
Stefan Rijnhart - Consultant/developer"""]
|
||||
|
||||
|
||||
QUOTE_THUNDERBIRD_HTML = u"""<html>
|
||||
<head>
|
||||
<meta content="text/html; charset=utf-8" http-equiv="Content-Type">
|
||||
</head>
|
||||
<body text="#000000" bgcolor="#FFFFFF">
|
||||
<div class="moz-cite-prefix">On 01/05/2016 10:24 AM, Raoul
|
||||
Poilvache wrote:<br>
|
||||
</div>
|
||||
<blockquote
|
||||
cite="mid:CAP76m_WWFH2KVrbjOxbaozvkmbzZYLWJnQ0n0sy9XpGaCWRf1g@mail.gmail.com"
|
||||
type="cite">
|
||||
<div dir="ltr"><b><i>Test reply. The suite.</i></b><br clear="all">
|
||||
<div><br>
|
||||
</div>
|
||||
-- <br>
|
||||
<div class="gmail_signature">Raoul Poilvache</div>
|
||||
</div>
|
||||
</blockquote>
|
||||
Top cool !!!<br>
|
||||
<br>
|
||||
<pre class="moz-signature" cols="72">--
|
||||
Raoul Poilvache
|
||||
</pre>
|
||||
</body>
|
||||
</html>"""
|
||||
|
||||
|
||||
QUOTE_THUNDERBIRD_HTML_IN = [u"""<blockquote cite="mid:CAP76m_WWFH2KVrbjOxbaozvkmbzZYLWJnQ0n0sy9XpGaCWRf1g@mail.gmail.com" type="cite" data-o-mail-quote-node="1" data-o-mail-quote="1">"""]
|
||||
QUOTE_THUNDERBIRD_HTML_OUT = [u"""<pre class="moz-signature" cols="72"><span data-o-mail-quote="1">--
|
||||
Raoul Poilvache
|
||||
</span></pre>"""]
|
||||
|
||||
|
||||
QUOTE_HOTMAIL_HTML = u"""
|
||||
<html>
|
||||
<head>
|
||||
<style><!--
|
||||
.hmmessage P
|
||||
{
|
||||
margin:0px=3B
|
||||
padding:0px
|
||||
}
|
||||
body.hmmessage
|
||||
{
|
||||
font-size: 12pt=3B
|
||||
font-family:Calibri
|
||||
}
|
||||
--></style></head>
|
||||
<body class='hmmessage'>
|
||||
<div dir='ltr'>I don't like that.<br><br>
|
||||
<div><hr id="stopSpelling">
|
||||
Date: Tue=2C 5 Jan 2016 10:24:48 +0100<br>
|
||||
Subject: Test from gmail<br>
|
||||
From: poilvache@example.com<br>
|
||||
To: tartelette@example.com grosbedon@example.com<br><br>
|
||||
<div dir="ltr"><b><i>Test reply. The suite.</i></b>
|
||||
<br clear="all"><div><br>
|
||||
</div>-- <br><div class="ecxgmail_signature">
|
||||
Raoul Poilvache</div>
|
||||
</div></div></div></body></html>"""
|
||||
QUOTE_HOTMAIL_HTML_IN = [u"""I don't like that.<br><br>"""]
|
||||
QUOTE_HOTMAIL_HTML_OUT = [
|
||||
u"""<hr id="stopSpelling" data-o-mail-quote="1">""",
|
||||
u"""<div dir="ltr" data-o-mail-quote="1"><b data-o-mail-quote="1"><i data-o-mail-quote="1">Test reply. The suite.</i></b>"""]
|
||||
|
||||
|
||||
QUOTE_OUTLOOK_HTML = """
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=3Diso-8859-=
|
||||
1">
|
||||
<style type="text/css" style="display:none;"> P {margin-top:0;margin-bo=
|
||||
ttom:0;}
|
||||
</style>
|
||||
</head>
|
||||
<body dir="ltr">
|
||||
<div id="mail_body">
|
||||
Reply from outlook
|
||||
</div>
|
||||
<div style="font-family: Calibri, Helvetica, sans-serif; font-size: 12pt;=
|
||||
color: rgb(0, 0, 0);">
|
||||
<br>
|
||||
</div>
|
||||
<div class="elementToProof" id="Signature">John</div>
|
||||
<div id="appendonsend"></div>
|
||||
<div style="font-family:Calibri,Helvetica,sans-serif; font-size:12pt; col=
|
||||
or:rgb(0,0,0)">
|
||||
<br>
|
||||
</div>
|
||||
<hr tabindex="-1" style="display:inline-block; width:98%">
|
||||
<div id="divRplyFwdMsg" dir="ltr">
|
||||
<font face="Calibri, sans-serif" color="#000000" style="font-size:11pt"><b>De :</b> test@example.com<br>
|
||||
<b>=C0 :</b> test@example.com <test@example.com><br>
|
||||
<b>Objet :</b> Parent message</font>
|
||||
<div> </div>
|
||||
</div>
|
||||
<div>
|
||||
<div dir="ltr">Parent email body</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
QUOTE_OUTLOOK_HTML_IN = [
|
||||
"""Reply from outlook""",
|
||||
"""<div id="mail_body">""",
|
||||
]
|
||||
QUOTE_OUTLOOK_HTML_OUT = [
|
||||
"""<div class="elementToProof" id="Signature" data-o-mail-quote-container="1" data-o-mail-quote="1">John</div>""",
|
||||
"""<div id="appendonsend" data-o-mail-quote-container="1" data-o-mail-quote="1"></div>""", # quoted when empty in case there's a signature before
|
||||
"""<hr tabindex="-1" style="display:inline-block; width:98%" data-o-mail-quote="1">""",
|
||||
"""<div data-o-mail-quote-container="1" data-o-mail-quote="1">
|
||||
<div dir="ltr" data-o-mail-quote="1">Parent email body</div>
|
||||
</div>""",
|
||||
"""<div id="divRplyFwdMsg" dir="ltr" data-o-mail-quote-container="1" data-o-mail-quote="1">""",
|
||||
]
|
||||
|
||||
|
||||
QUOTE_THUNDERBIRD_1 = u"""<div>On 11/08/2012 05:29 PM,
|
||||
<a href="mailto:dummy@example.com">dummy@example.com</a> wrote:<br></div>
|
||||
<blockquote>
|
||||
<div>I contact you about our meeting for tomorrow. Here is the
|
||||
schedule I propose:</div>
|
||||
<div>
|
||||
<ul><li>9 AM: brainstorming about our new amazing business
|
||||
app</span></li></li>
|
||||
<li>9.45 AM: summary</li>
|
||||
<li>10 AM: meeting with Fabien to present our app</li>
|
||||
</ul></div>
|
||||
<div>Is everything ok for you ?</div>
|
||||
<div>
|
||||
<p>--<br>
|
||||
Administrator</p>
|
||||
</div>
|
||||
<div>
|
||||
<p>Log in our portal at:
|
||||
<a href="http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH">http://localhost:8069#action=login&db=mail_1&token=rHdWcUART5PhEnJRaXjH</a></p>
|
||||
</div>
|
||||
</blockquote>
|
||||
Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.<br><br>
|
||||
Did you receive my email about my new laptop, by the way ?<br><br>
|
||||
Raoul.<br><pre>--
|
||||
Raoul Grosbedonnée
|
||||
</pre>"""
|
||||
|
||||
QUOTE_THUNDERBIRD_1_IN = [
|
||||
u'<a href="mailto:dummy@example.com">dummy@example.com</a> ',
|
||||
u'<blockquote data-o-mail-quote-node="1" data-o-mail-quote="1">',
|
||||
u'Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.']
|
||||
QUOTE_THUNDERBIRD_1_OUT = [u"""--
|
||||
Raoul Grosbedonnée
|
||||
"""]
|
||||
|
||||
QUOTE_YAHOO_HTML = """
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<div class="ydpf6e951dcyahoo-style-wrap">
|
||||
<div></div>
|
||||
<div dir="ltr" data-setdir="false">Reply from Yahoo</div>
|
||||
</div>
|
||||
<div id="yahoo_quoted_8820595126" class="yahoo_quoted">
|
||||
<div style="font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;font-size:13px;color:#26282a;">
|
||||
=20
|
||||
<div>
|
||||
Bob a dit:
|
||||
</div>
|
||||
<div><br></div>
|
||||
<div><br></div>
|
||||
<div>
|
||||
<div id="yiv3215395356">
|
||||
<div dir="ltr">Parent email body</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
QUOTE_YAHOO_HTML_IN = [
|
||||
"""Reply from Yahoo""",
|
||||
"""<div dir="ltr" data-setdir="false">""",
|
||||
"""<div class="ydpf6e951dcyahoo-style-wrap">""",
|
||||
]
|
||||
QUOTE_YAHOO_HTML_OUT = [
|
||||
"""<div id="yahoo_quoted_8820595126" class="yahoo_quoted" data-o-mail-quote="1">""",
|
||||
]
|
||||
|
||||
|
||||
TEXT_1 = u"""I contact you about our meeting tomorrow. Here is the schedule I propose:
|
||||
9 AM: brainstorming about our new amazing business app
|
||||
9.45 AM: summary
|
||||
10 AM: meeting with Ignasse to present our app
|
||||
Is everything ok for you ?
|
||||
--
|
||||
MySignature"""
|
||||
|
||||
TEXT_1_IN = [u"""I contact you about our meeting tomorrow. Here is the schedule I propose:
|
||||
9 AM: brainstorming about our new amazing business app
|
||||
9.45 AM: summary
|
||||
10 AM: meeting with Ignasse to present our app
|
||||
Is everything ok for you ?"""]
|
||||
TEXT_1_OUT = [u"""
|
||||
--
|
||||
MySignature"""]
|
||||
|
||||
TEXT_2 = u"""Salut Raoul!
|
||||
Le 28 oct. 2012 à 00:02, Raoul Grosbedon a écrit :
|
||||
|
||||
> I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)
|
||||
|
||||
Of course. This seems viable.
|
||||
|
||||
> 2012/10/27 Bert Tartopoils :
|
||||
>> blahblahblah (quote)?
|
||||
>>
|
||||
>> blahblahblah (quote)
|
||||
>>
|
||||
>> Bert TARTOPOILS
|
||||
>> bert.tartopoils@miam.miam
|
||||
>>
|
||||
>
|
||||
>
|
||||
> --
|
||||
> RaoulSignature
|
||||
|
||||
--
|
||||
Bert TARTOPOILS
|
||||
bert.tartopoils@miam.miam
|
||||
"""
|
||||
|
||||
TEXT_2_IN = [u"Salut Raoul!", "Of course. This seems viable."]
|
||||
TEXT_2_OUT = [u"""
|
||||
> I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)""",
|
||||
"""
|
||||
> 2012/10/27 Bert Tartopoils :
|
||||
>> blahblahblah (quote)?
|
||||
>>
|
||||
>> blahblahblah (quote)
|
||||
>>
|
||||
>> Bert TARTOPOILS
|
||||
>> bert.tartopoils@miam.miam
|
||||
>>
|
||||
>
|
||||
>
|
||||
> --
|
||||
> RaoulSignature"""]
|
||||
|
||||
# MISC
|
||||
|
||||
GMAIL_1 = u"""Hello,<div><br></div><div>Ok for me. I am replying directly in gmail, with signature.</div><div><br></div><div>Kind regards,</div><div><br></div><div>Demo.<br><br>
|
||||
<div class="gmail_quote">
|
||||
<div dir="ltr" class="gmail_attr">On Thu, Nov 8, 2012 at 5:29 PM, <span><<a href="mailto:dummy@example.com">dummy@example.com</a>></span> wrote:<br>
|
||||
<blockquote class="gmail_quote"><div>I contact you about our meeting for tomorrow. Here is the schedule I propose:</div><div><ul><li>9 AM: brainstorming about our new amazing business app</span></li></li>
|
||||
<li>9.45 AM: summary</li><li>10 AM: meeting with Fabien to present our app</li></ul></div><div>Is everything ok for you?</div>
|
||||
<div><p>-- <br>Administrator</p></div>
|
||||
|
||||
<div><p>Log in our portal at: <a href="http://localhost:8069#action=login&db=mail_1&login=demo">http://localhost:8069#action=login&db=mail_1&login=demo</a></p></div>
|
||||
</blockquote>
|
||||
<div><br clear="all"></div>
|
||||
<div><br></div>
|
||||
<span class="gmail_signature_prefix">-- </span><br>
|
||||
<div dir="ltr" class="gmail_signature">
|
||||
<div dir="ltr">
|
||||
This is a test signature
|
||||
<div><br></div>
|
||||
<div>123</div>
|
||||
</div>
|
||||
</div>
|
||||
</div><br></div>"""
|
||||
|
||||
GMAIL_1_IN = [
|
||||
u'Ok for me. I am replying directly in gmail, with signature.',
|
||||
'<div class="gmail_quote" data-o-mail-quote-container="1" data-o-mail-quote="1">',
|
||||
'<div dir="ltr" class="gmail_attr" data-o-mail-quote="1">On Thu, Nov 8, 2012 at 5:29 PM',
|
||||
'<blockquote class="gmail_quote" data-o-mail-quote-container="1" data-o-mail-quote="1" data-o-mail-quote-node="1">',
|
||||
# blank spaces between signature and reply quote should be quoted too
|
||||
'<div data-o-mail-quote="1"><br clear="all" data-o-mail-quote="1"></div>\n'
|
||||
'<div data-o-mail-quote="1"><br data-o-mail-quote="1"></div>',
|
||||
]
|
||||
GMAIL_1_OUT = []
|
||||
|
||||
HOTMAIL_1 = u"""<div>
|
||||
<div dir="ltr"><br>
|
||||
I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly.
|
||||
<br><br>Kindest regards,<br>xxx<br>
|
||||
<div>
|
||||
<div id="SkyDrivePlaceholder">
|
||||
</div>
|
||||
<hr id="stopSpelling">
|
||||
Subject: Re: your OpenERP.com registration<br>From: xxx@xxx.xxx<br>To: xxx@xxx.xxx<br>Date: Wed, 27 Mar 2013 17:12:12 +0000
|
||||
<br><br>
|
||||
Hello xxx,
|
||||
<br>
|
||||
I noticed you recently created an OpenERP.com account to access OpenERP Apps.
|
||||
<br>
|
||||
You indicated that you wish to use OpenERP in your own company.
|
||||
We would like to know more about your your business needs and requirements, and see how
|
||||
we can help you. When would you be available to discuss your project ?<br>
|
||||
Best regards,<br>
|
||||
<pre>
|
||||
<a href="http://openerp.com" target="_blank">http://openerp.com</a>
|
||||
Belgium: +32.81.81.37.00
|
||||
U.S.: +1 (650) 307-6736
|
||||
India: +91 (79) 40 500 100
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>"""
|
||||
HOTMAIL_1_IN = [u"""<div dir="ltr"><br>
|
||||
I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly.
|
||||
<br><br>Kindest regards,<br>xxx<br>"""]
|
||||
HOTMAIL_1_OUT = [
|
||||
u"""<hr id="stopSpelling" data-o-mail-quote="1">""",
|
||||
u"""<pre data-o-mail-quote="1">
|
||||
<a href="http://openerp.com" target="_blank" data-o-mail-quote="1">http://openerp.com</a>
|
||||
Belgium: +32.81.81.37.00
|
||||
U.S.: +1 (650) 307-6736
|
||||
India: +91 (79) 40 500 100
|
||||
</pre>"""]
|
||||
|
||||
MSOFFICE_1 = u"""
|
||||
<div>
|
||||
<div class="WordSection1">
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.
|
||||
We are a company of 25 engineers providing product design services to clients.
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
I’ll install on a windows server and run a very limited trial to see how it works.
|
||||
If we adopt OpenERP we will probably move to Linux or look for a hosted SaaS option.
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
<br>
|
||||
I am also evaluating Adempiere and maybe others.
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
</span>
|
||||
</p>
|
||||
<p> </p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
I expect the trial will take 2-3 months as this is not a high priority for us.
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
</span>
|
||||
</p>
|
||||
<p> </p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
Alan
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
<p class="MsoNormal">
|
||||
<span style="font-size:11.0pt;font-family:"Calibri","sans-serif";color:#1F497D">
|
||||
</span>
|
||||
</p>
|
||||
<p> </p>
|
||||
<p></p>
|
||||
<div>
|
||||
<div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in">
|
||||
<p class="MsoNormal">
|
||||
<b><span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
|
||||
From:
|
||||
</span></b>
|
||||
<span style="font-size:10.0pt;font-family:"Tahoma","sans-serif"">
|
||||
OpenERP Enterprise [mailto:sales@openerp.com]
|
||||
<br><b>Sent:</b> Monday, 11 March, 2013 14:47<br><b>To:</b> Alan Widmer<br><b>Subject:</b> Re: your OpenERP.com registration
|
||||
</span>
|
||||
</p>
|
||||
<p></p>
|
||||
<p></p>
|
||||
</div>
|
||||
</div>
|
||||
<p class="MsoNormal"></p>
|
||||
<p> </p>
|
||||
<p>Hello Alan Widmer, </p>
|
||||
<p></p>
|
||||
<p>I noticed you recently downloaded OpenERP. </p>
|
||||
<p></p>
|
||||
<p>
|
||||
Uou mentioned you wish to use OpenERP in your own company. Please let me more about your
|
||||
business needs and requirements? When will you be available to discuss about your project?
|
||||
</p>
|
||||
<p></p>
|
||||
<p>Thanks for your interest in OpenERP, </p>
|
||||
<p></p>
|
||||
<p>Feel free to contact me if you have any questions, </p>
|
||||
<p></p>
|
||||
<p>Looking forward to hear from you soon. </p>
|
||||
<p></p>
|
||||
<pre><p> </p></pre>
|
||||
<pre>--<p></p></pre>
|
||||
<pre>Nicolas<p></p></pre>
|
||||
<pre><a href="http://openerp.com">http://openerp.com</a><p></p></pre>
|
||||
<pre>Belgium: +32.81.81.37.00<p></p></pre>
|
||||
<pre>U.S.: +1 (650) 307-6736<p></p></pre>
|
||||
<pre>India: +91 (79) 40 500 100<p></p></pre>
|
||||
<pre> <p></p></pre>
|
||||
</div>
|
||||
</div>"""
|
||||
|
||||
MSOFFICE_1_IN = [u'Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.']
|
||||
MSOFFICE_1_OUT = [u'I noticed you recently downloaded OpenERP.', 'Uou mentioned you wish to use OpenERP in your own company.', 'Belgium: +32.81.81.37.00']
|
||||
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Test cases coming from bugs
|
||||
# ------------------------------------------------------------
|
||||
|
||||
# bug: read more not apparent, strange message in read more span
|
||||
BUG1 = u"""<pre>Hi Migration Team,
|
||||
|
||||
Paragraph 1, blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah.
|
||||
|
||||
Paragraph 2, blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah.
|
||||
|
||||
Paragraph 3, blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah blah blah blah blah blah blah
|
||||
blah blah blah blah blah blah blah blah.
|
||||
|
||||
Thanks.
|
||||
|
||||
Regards,
|
||||
|
||||
--
|
||||
Olivier Laurent
|
||||
Migration Manager
|
||||
OpenERP SA
|
||||
Chaussée de Namur, 40
|
||||
B-1367 Gérompont
|
||||
Tel: +32.81.81.37.00
|
||||
Web: http://www.openerp.com</pre>"""
|
||||
|
||||
BUG_1_IN = [
|
||||
u'Hi Migration Team',
|
||||
u'Paragraph 1'
|
||||
]
|
||||
BUG_1_OUT = [u"""
|
||||
--
|
||||
Olivier Laurent
|
||||
Migration Manager
|
||||
OpenERP SA
|
||||
Chaussée de Namur, 40
|
||||
B-1367 Gérompont
|
||||
Tel: +32.81.81.37.00
|
||||
Web: http://www.openerp.com"""]
|
||||
|
||||
|
||||
REMOVE_CLASS = u"""
|
||||
<div style="FONT-SIZE: 12pt; FONT-FAMILY: 'Times New Roman'; COLOR: #000000">
|
||||
<div>Hello</div>
|
||||
<div>I have just installed Odoo 9 and I've got the following error:</div>
|
||||
<div> </div>
|
||||
<div class="openerp openerp_webclient_container oe_webclient">
|
||||
<div class="oe_loading" style="DISPLAY: none"> </div>
|
||||
</div>
|
||||
<div class="modal-backdrop in"></div>
|
||||
<div role="dialog" tabindex="-1" aria-hidden="false" class="modal in" style="DISPLAY: block" data-backdrop="static">
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content openerp">
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">Odoo Error<span class="o_subtitle text-muted"></span></h4>
|
||||
</div>
|
||||
<div class="o_error_detail modal-body">
|
||||
<pre>An error occurred in a modal and I will send you back the html to try opening one on your end</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
REMOVE_CLASS_IN = [
|
||||
u'<div style="font-size:12pt; font-family:\'Times New Roman\'; color:#000000">',
|
||||
u'An error occurred in a modal and I will send you back the html to try opening one on your end']
|
||||
REMOVE_CLASS_OUT = [
|
||||
u'<div class="modal-backdrop in">',
|
||||
u'<div class="modal-content openerp">',
|
||||
u'<div class="modal-header">']
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestMenu(TransactionCase):
|
||||
|
||||
def test_00_menu_deletion(self):
|
||||
"""Verify that menu deletion works properly when there are child menus, and those
|
||||
are indeed made orphans"""
|
||||
Menu = self.env['ir.ui.menu']
|
||||
root = Menu.create({'name': 'Test root'})
|
||||
child1 = Menu.create({'name': 'Test child 1', 'parent_id': root.id})
|
||||
child2 = Menu.create({'name': 'Test child 2', 'parent_id': root.id})
|
||||
child21 = Menu.create({'name': 'Test child 2-1', 'parent_id': child2.id})
|
||||
all_ids = [root.id, child1.id, child2.id, child21.id]
|
||||
|
||||
# delete and check that direct children are promoted to top-level
|
||||
# cfr. explanation in menu.unlink()
|
||||
root.unlink()
|
||||
|
||||
# Generic trick necessary for search() calls to avoid hidden menus
|
||||
Menu = self.env['ir.ui.menu'].with_context({'ir.ui.menu.full_list': True})
|
||||
|
||||
remaining = Menu.search([('id', 'in', all_ids)], order="id")
|
||||
self.assertEqual([child1.id, child2.id, child21.id], remaining.ids)
|
||||
|
||||
orphans = Menu.search([('id', 'in', all_ids), ('parent_id', '=', False)], order="id")
|
||||
self.assertEqual([child1.id, child2.id], orphans.ids)
|
||||
|
|
@ -0,0 +1,129 @@
|
|||
import base64
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import magic
|
||||
except ImportError:
|
||||
magic = None
|
||||
|
||||
from odoo.tests.common import BaseCase
|
||||
from odoo.tools.mimetypes import get_extension, guess_mimetype
|
||||
|
||||
PNG = b'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVQI12P4//8/AAX+Av7czFnnAAAAAElFTkSuQmCC'
|
||||
GIF = b"R0lGODdhAQABAIAAAP///////ywAAAAAAQABAAACAkQBADs="
|
||||
BMP = b"""Qk1+AAAAAAAAAHoAAABsAAAAAQAAAAEAAAABABgAAAAAAAQAAAATCwAAEwsAAAAAAAAAAAAAQkdScwAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAD///8A"""
|
||||
JPG = """/9j/4AAQSkZJRgABAQEASABIAAD//gATQ3JlYXRlZCB3aXRoIEdJTVD/2wBDAP
|
||||
//////////////////////////////////////////////////////////////////////////////////////2wBDAf///////
|
||||
///////////////////////////////////////////////////////////////////////////////wgARCAABAAEDAREAAhEB
|
||||
AxEB/8QAFAABAAAAAAAAAAAAAAAAAAAAAv/EABQBAQAAAAAAAAAAAAAAAAAAAAD/2gAMAwEAAhADEAAAAUf/xAAUEAEAAAAAAAA
|
||||
AAAAAAAAAAAAA/9oACAEBAAEFAn//xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oACAEDAQE/AX//xAAUEQEAAAAAAAAAAAAAAAAAAA
|
||||
AA/9oACAECAQE/AX//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBAAY/An//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBA
|
||||
AE/IX//2gAMAwEAAgADAAAAEB//xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oACAEDAQE/EH//xAAUEQEAAAAAAAAAAAAAAAAAAAAA
|
||||
/9oACAECAQE/EH//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBAAE/EH//2Q=="""
|
||||
SVG = b"""PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iaXNvLTg4NTktMSI/PjwhRE9DVFlQRSBzdmcgUFVCTElDICItLy9XM0MvL0RURCBTVkcgMjAwMDExMDIvL0VOIlxuICJodHRwOi8vd3d3LnczLm9yZy9UUi8yMDAwL0NSLVNWRy0yMDAwMTEwMi9EVEQvc3ZnLTIwMDAxMTAyLmR0ZCI+PHN2ZyB3aWR0aD0iMTAwJSIgaGVpZ2h0PSIxMDAlIj48ZyB0cmFuc2Zvcm09InRyYW5zbGF0ZSg1MCw1MCkiPjxyZWN0IHg9IjAiIHk9IjAiIHdpZHRoPSIxNTAiIGhlaWdodD0iNTAiIHN0eWxlPSJmaWxsOnJlZDsiIC8+PC9nPjwvc3ZnPg=="""
|
||||
NAMESPACED_SVG = b"""<svg:svg xmlns:svg="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
|
||||
<svg:rect x="10" y="10" width="80" height="80" fill="green" />
|
||||
</svg:svg>"""
|
||||
# minimal zip file with an empty `t.txt` file
|
||||
ZIP = b"""UEsDBBQACAAIAGFva1AAAAAAAAAAAAAAAAAFACAAdC50eHRVVA0AB5bgaF6W4GheluBoXnV4CwABBOgDAAAE6AMAAA
|
||||
MAUEsHCAAAAAACAAAAAAAAAFBLAQIUAxQACAAIAGFva1AAAAAAAgAAAAAAAAAFACAAAAAAAAAAAACkgQAAAAB0LnR4dFVUDQAHlu
|
||||
BoXpbgaF6W4GhedXgLAAEE6AMAAAToAwAAUEsFBgAAAAABAAEAUwAAAFUAAAAAAA=="""
|
||||
|
||||
XML = b"""<?xml version='1.0' encoding='utf-8'?>
|
||||
<Document xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="urn:iso:std:iso:20022:tech:xsd:pain.001.001.03">
|
||||
<CstmrCdtTrfInitn>
|
||||
<GrpHdr>
|
||||
<MsgId>123456Odoo S.A.893873733</MsgId>
|
||||
<CreDtTm>2018-11-21T09:47:32</CreDtTm>
|
||||
<NbOfTxs>0</NbOfTxs>
|
||||
<CtrlSum>0.0</CtrlSum>
|
||||
<InitgPty>
|
||||
<Nm>Odoo S.A.</Nm>
|
||||
<Id>
|
||||
<OrgId>
|
||||
<Othr>
|
||||
<Id>BE0477472701</Id>
|
||||
<Issr>KBO-BCE</Issr>
|
||||
</Othr>
|
||||
</OrgId>
|
||||
</Id>
|
||||
</InitgPty>
|
||||
</GrpHdr>
|
||||
</CstmrCdtTrfInitn>
|
||||
</Document>
|
||||
"""
|
||||
|
||||
class test_guess_mimetype(BaseCase):
|
||||
|
||||
def test_default_mimetype_empty(self):
|
||||
mimetype = guess_mimetype(b'')
|
||||
# odoo implementation returns application/octet-stream by default
|
||||
# if available, python-magic returns application/x-empty
|
||||
self.assertIn(mimetype, ('application/octet-stream', 'application/x-empty'))
|
||||
|
||||
def test_default_mimetype(self):
|
||||
mimetype = guess_mimetype(b'', default='test')
|
||||
# if available, python-magic returns application/x-empty
|
||||
self.assertIn(mimetype, ('test', 'application/x-empty'))
|
||||
|
||||
def test_mimetype_octet_stream(self):
|
||||
mimetype = guess_mimetype(b'\0')
|
||||
self.assertEqual(mimetype, 'application/octet-stream')
|
||||
|
||||
def test_mimetype_png(self):
|
||||
content = base64.b64decode(PNG)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
self.assertEqual(mimetype, 'image/png')
|
||||
|
||||
def test_mimetype_bmp(self):
|
||||
content = base64.b64decode(BMP)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
# mimetype should match image/bmp, image/x-ms-bmp, ...
|
||||
self.assertRegex(mimetype, r'image/.*\bbmp')
|
||||
|
||||
def test_mimetype_jpg(self):
|
||||
content = base64.b64decode(JPG)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
self.assertEqual(mimetype, 'image/jpeg')
|
||||
|
||||
def test_mimetype_gif(self):
|
||||
content = base64.b64decode(GIF)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
self.assertEqual(mimetype, 'image/gif')
|
||||
|
||||
def test_mimetype_svg(self):
|
||||
content = base64.b64decode(SVG)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
self.assertTrue(mimetype.startswith('image/svg'))
|
||||
|
||||
mimetype = guess_mimetype(NAMESPACED_SVG, default='test')
|
||||
self.assertTrue(mimetype.startswith('image/svg'))
|
||||
# Tests that whitespace padded SVG are not detected as SVG in odoo implementation
|
||||
if not magic:
|
||||
mimetype = guess_mimetype(b" " + content, default='test')
|
||||
self.assertNotIn("svg", mimetype)
|
||||
|
||||
|
||||
def test_mimetype_zip(self):
|
||||
content = base64.b64decode(ZIP)
|
||||
mimetype = guess_mimetype(content, default='test')
|
||||
self.assertEqual(mimetype, 'application/zip')
|
||||
|
||||
def test_mimetype_xml(self):
|
||||
expected_mimetype = 'application/xml' if magic is None else 'text/xml'
|
||||
mimetype = guess_mimetype(XML, default='test')
|
||||
self.assertEqual(mimetype, expected_mimetype)
|
||||
|
||||
def test_mimetype_get_extension(self):
|
||||
self.assertEqual(get_extension('filename.Abc'), '.abc')
|
||||
self.assertEqual(get_extension('filename.scss'), '.scss')
|
||||
self.assertEqual(get_extension('filename.torrent'), '.torrent')
|
||||
self.assertEqual(get_extension('.htaccess'), '')
|
||||
# enough to suppose that extension is present and don't suffix the filename
|
||||
self.assertEqual(get_extension('filename.tar.gz'), '.gz')
|
||||
self.assertEqual(get_extension('filename'), '')
|
||||
self.assertEqual(get_extension('filename.'), '')
|
||||
self.assertEqual(get_extension('filename.not_alnum'), '')
|
||||
self.assertEqual(get_extension('filename.with space'), '')
|
||||
self.assertEqual(get_extension('filename.notAnExtension'), '')
|
||||
519
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_misc.py
Normal file
519
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_misc.py
Normal file
|
|
@ -0,0 +1,519 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import os.path
|
||||
import pytz
|
||||
|
||||
from odoo.tools import (
|
||||
config,
|
||||
date_utils,
|
||||
file_open,
|
||||
file_path,
|
||||
merge_sequences,
|
||||
misc,
|
||||
remove_accents,
|
||||
validate_url,
|
||||
)
|
||||
from odoo.tests.common import TransactionCase, BaseCase
|
||||
|
||||
|
||||
class TestCountingStream(BaseCase):
|
||||
def test_empty_stream(self):
|
||||
s = misc.CountingStream(iter([]))
|
||||
self.assertEqual(s.index, -1)
|
||||
self.assertIsNone(next(s, None))
|
||||
self.assertEqual(s.index, 0)
|
||||
|
||||
def test_single(self):
|
||||
s = misc.CountingStream(range(1))
|
||||
self.assertEqual(s.index, -1)
|
||||
self.assertEqual(next(s, None), 0)
|
||||
self.assertIsNone(next(s, None))
|
||||
self.assertEqual(s.index, 1)
|
||||
|
||||
def test_full(self):
|
||||
s = misc.CountingStream(range(42))
|
||||
for _ in s:
|
||||
pass
|
||||
self.assertEqual(s.index, 42)
|
||||
|
||||
def test_repeated(self):
|
||||
""" Once the CountingStream has stopped iterating, the index should not
|
||||
increase anymore (the internal state should not be allowed to change)
|
||||
"""
|
||||
s = misc.CountingStream(iter([]))
|
||||
self.assertIsNone(next(s, None))
|
||||
self.assertEqual(s.index, 0)
|
||||
self.assertIsNone(next(s, None))
|
||||
self.assertEqual(s.index, 0)
|
||||
|
||||
|
||||
class TestMergeSequences(BaseCase):
|
||||
def test_merge_sequences(self):
|
||||
# base case
|
||||
seq = merge_sequences(['A', 'B', 'C'])
|
||||
self.assertEqual(seq, ['A', 'B', 'C'])
|
||||
|
||||
# 'Z' can be anywhere
|
||||
seq = merge_sequences(['A', 'B', 'C'], ['Z'])
|
||||
self.assertEqual(seq, ['A', 'B', 'C', 'Z'])
|
||||
|
||||
# 'Y' must precede 'C';
|
||||
seq = merge_sequences(['A', 'B', 'C'], ['Y', 'C'])
|
||||
self.assertEqual(seq, ['A', 'B', 'Y', 'C'])
|
||||
|
||||
# 'X' must follow 'A' and precede 'C'
|
||||
seq = merge_sequences(['A', 'B', 'C'], ['A', 'X', 'C'])
|
||||
self.assertEqual(seq, ['A', 'B', 'X', 'C'])
|
||||
|
||||
# all cases combined
|
||||
seq = merge_sequences(
|
||||
['A', 'B', 'C'],
|
||||
['Z'], # 'Z' can be anywhere
|
||||
['Y', 'C'], # 'Y' must precede 'C';
|
||||
['A', 'X', 'Y'], # 'X' must follow 'A' and precede 'Y'
|
||||
)
|
||||
self.assertEqual(seq, ['A', 'B', 'X', 'Y', 'C', 'Z'])
|
||||
|
||||
|
||||
class TestDateRangeFunction(BaseCase):
|
||||
""" Test on date_range generator. """
|
||||
|
||||
def test_date_range_with_naive_datetimes(self):
|
||||
""" Check date_range with naive datetimes. """
|
||||
start = datetime.datetime(1985, 1, 1)
|
||||
end = datetime.datetime(1986, 1, 1)
|
||||
|
||||
expected = [
|
||||
datetime.datetime(1985, 1, 1, 0, 0),
|
||||
datetime.datetime(1985, 2, 1, 0, 0),
|
||||
datetime.datetime(1985, 3, 1, 0, 0),
|
||||
datetime.datetime(1985, 4, 1, 0, 0),
|
||||
datetime.datetime(1985, 5, 1, 0, 0),
|
||||
datetime.datetime(1985, 6, 1, 0, 0),
|
||||
datetime.datetime(1985, 7, 1, 0, 0),
|
||||
datetime.datetime(1985, 8, 1, 0, 0),
|
||||
datetime.datetime(1985, 9, 1, 0, 0),
|
||||
datetime.datetime(1985, 10, 1, 0, 0),
|
||||
datetime.datetime(1985, 11, 1, 0, 0),
|
||||
datetime.datetime(1985, 12, 1, 0, 0),
|
||||
datetime.datetime(1986, 1, 1, 0, 0)
|
||||
]
|
||||
|
||||
dates = [date for date in date_utils.date_range(start, end)]
|
||||
|
||||
self.assertEqual(dates, expected)
|
||||
|
||||
def test_date_range_with_timezone_aware_datetimes_other_than_utc(self):
|
||||
""" Check date_range with timezone-aware datetimes other than UTC."""
|
||||
timezone = pytz.timezone('Europe/Brussels')
|
||||
|
||||
start = datetime.datetime(1985, 1, 1)
|
||||
end = datetime.datetime(1986, 1, 1)
|
||||
start = timezone.localize(start)
|
||||
end = timezone.localize(end)
|
||||
|
||||
expected = [datetime.datetime(1985, 1, 1, 0, 0),
|
||||
datetime.datetime(1985, 2, 1, 0, 0),
|
||||
datetime.datetime(1985, 3, 1, 0, 0),
|
||||
datetime.datetime(1985, 4, 1, 0, 0),
|
||||
datetime.datetime(1985, 5, 1, 0, 0),
|
||||
datetime.datetime(1985, 6, 1, 0, 0),
|
||||
datetime.datetime(1985, 7, 1, 0, 0),
|
||||
datetime.datetime(1985, 8, 1, 0, 0),
|
||||
datetime.datetime(1985, 9, 1, 0, 0),
|
||||
datetime.datetime(1985, 10, 1, 0, 0),
|
||||
datetime.datetime(1985, 11, 1, 0, 0),
|
||||
datetime.datetime(1985, 12, 1, 0, 0),
|
||||
datetime.datetime(1986, 1, 1, 0, 0)]
|
||||
|
||||
expected = [timezone.localize(e) for e in expected]
|
||||
|
||||
dates = [date for date in date_utils.date_range(start, end)]
|
||||
|
||||
self.assertEqual(expected, dates)
|
||||
|
||||
def test_date_range_with_mismatching_zones(self):
|
||||
""" Check date_range with mismatching zone should raise an exception."""
|
||||
start_timezone = pytz.timezone('Europe/Brussels')
|
||||
end_timezone = pytz.timezone('America/Recife')
|
||||
|
||||
start = datetime.datetime(1985, 1, 1)
|
||||
end = datetime.datetime(1986, 1, 1)
|
||||
start = start_timezone.localize(start)
|
||||
end = end_timezone.localize(end)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
dates = [date for date in date_utils.date_range(start, end)]
|
||||
|
||||
def test_date_range_with_inconsistent_datetimes(self):
|
||||
""" Check date_range with a timezone-aware datetime and a naive one."""
|
||||
context_timezone = pytz.timezone('Europe/Brussels')
|
||||
|
||||
start = datetime.datetime(1985, 1, 1)
|
||||
end = datetime.datetime(1986, 1, 1)
|
||||
end = context_timezone.localize(end)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
dates = [date for date in date_utils.date_range(start, end)]
|
||||
|
||||
def test_date_range_with_hour(self):
|
||||
""" Test date range with hour and naive datetime."""
|
||||
start = datetime.datetime(2018, 3, 25)
|
||||
end = datetime.datetime(2018, 3, 26)
|
||||
step = relativedelta(hours=1)
|
||||
|
||||
expected = [
|
||||
datetime.datetime(2018, 3, 25, 0, 0),
|
||||
datetime.datetime(2018, 3, 25, 1, 0),
|
||||
datetime.datetime(2018, 3, 25, 2, 0),
|
||||
datetime.datetime(2018, 3, 25, 3, 0),
|
||||
datetime.datetime(2018, 3, 25, 4, 0),
|
||||
datetime.datetime(2018, 3, 25, 5, 0),
|
||||
datetime.datetime(2018, 3, 25, 6, 0),
|
||||
datetime.datetime(2018, 3, 25, 7, 0),
|
||||
datetime.datetime(2018, 3, 25, 8, 0),
|
||||
datetime.datetime(2018, 3, 25, 9, 0),
|
||||
datetime.datetime(2018, 3, 25, 10, 0),
|
||||
datetime.datetime(2018, 3, 25, 11, 0),
|
||||
datetime.datetime(2018, 3, 25, 12, 0),
|
||||
datetime.datetime(2018, 3, 25, 13, 0),
|
||||
datetime.datetime(2018, 3, 25, 14, 0),
|
||||
datetime.datetime(2018, 3, 25, 15, 0),
|
||||
datetime.datetime(2018, 3, 25, 16, 0),
|
||||
datetime.datetime(2018, 3, 25, 17, 0),
|
||||
datetime.datetime(2018, 3, 25, 18, 0),
|
||||
datetime.datetime(2018, 3, 25, 19, 0),
|
||||
datetime.datetime(2018, 3, 25, 20, 0),
|
||||
datetime.datetime(2018, 3, 25, 21, 0),
|
||||
datetime.datetime(2018, 3, 25, 22, 0),
|
||||
datetime.datetime(2018, 3, 25, 23, 0),
|
||||
datetime.datetime(2018, 3, 26, 0, 0)
|
||||
]
|
||||
|
||||
dates = [date for date in date_utils.date_range(start, end, step)]
|
||||
|
||||
self.assertEqual(dates, expected)
|
||||
|
||||
|
||||
class TestFormatLangDate(TransactionCase):
|
||||
def test_00_accepted_types(self):
|
||||
self.env.user.tz = 'Europe/Brussels'
|
||||
datetime_str = '2017-01-31 12:00:00'
|
||||
date_datetime = datetime.datetime.strptime(datetime_str, "%Y-%m-%d %H:%M:%S")
|
||||
date_date = date_datetime.date()
|
||||
date_str = '2017-01-31'
|
||||
time_part = datetime.time(16, 30, 22)
|
||||
t_medium = 'h:mm:ss a'
|
||||
medium = f'MMM d, YYYY, {t_medium}'
|
||||
|
||||
self.assertEqual(misc.format_date(self.env, date_datetime), '01/31/2017')
|
||||
self.assertEqual(misc.format_date(self.env, date_date), '01/31/2017')
|
||||
self.assertEqual(misc.format_date(self.env, date_str), '01/31/2017')
|
||||
self.assertEqual(misc.format_date(self.env, ''), '')
|
||||
self.assertEqual(misc.format_date(self.env, False), '')
|
||||
self.assertEqual(misc.format_date(self.env, None), '')
|
||||
|
||||
self.assertEqual(misc.format_datetime(self.env, date_datetime, dt_format=medium), 'Jan 31, 2017, 1:00:00 PM')
|
||||
self.assertEqual(misc.format_datetime(self.env, datetime_str, dt_format=medium), 'Jan 31, 2017, 1:00:00 PM')
|
||||
self.assertEqual(misc.format_datetime(self.env, '', dt_format=medium), '')
|
||||
self.assertEqual(misc.format_datetime(self.env, False, dt_format=medium), '')
|
||||
self.assertEqual(misc.format_datetime(self.env, None, dt_format=medium), '')
|
||||
|
||||
self.assertEqual(misc.format_time(self.env, time_part, time_format=t_medium), '4:30:22 PM')
|
||||
self.assertEqual(misc.format_time(self.env, '', time_format=t_medium), '')
|
||||
self.assertEqual(misc.format_time(self.env, False, time_format=t_medium), '')
|
||||
self.assertEqual(misc.format_time(self.env, None, time_format=t_medium), '')
|
||||
|
||||
def test_01_code_and_format(self):
|
||||
date_str = '2017-01-31'
|
||||
lang = self.env['res.lang']
|
||||
|
||||
# Activate French and Simplified Chinese (test with non-ASCII characters)
|
||||
lang._activate_lang('fr_FR')
|
||||
lang._activate_lang('zh_CN')
|
||||
|
||||
# -- test `date`
|
||||
# Change a single parameter
|
||||
self.assertEqual(misc.format_date(lang.with_context(lang='fr_FR').env, date_str), '31/01/2017')
|
||||
self.assertEqual(misc.format_date(lang.env, date_str, lang_code='fr_FR'), '31/01/2017')
|
||||
self.assertEqual(misc.format_date(lang.env, date_str, date_format='MMM d, y'), 'Jan 31, 2017')
|
||||
|
||||
# Change 2 parameters
|
||||
self.assertEqual(misc.format_date(lang.with_context(lang='zh_CN').env, date_str, lang_code='fr_FR'), '31/01/2017')
|
||||
self.assertEqual(misc.format_date(lang.with_context(lang='zh_CN').env, date_str, date_format='MMM d, y'), u'1\u6708 31, 2017')
|
||||
self.assertEqual(misc.format_date(lang.env, date_str, lang_code='fr_FR', date_format='MMM d, y'), 'janv. 31, 2017')
|
||||
|
||||
# Change 3 parameters
|
||||
self.assertEqual(misc.format_date(lang.with_context(lang='zh_CN').env, date_str, lang_code='en_US', date_format='MMM d, y'), 'Jan 31, 2017')
|
||||
|
||||
# -- test `datetime`
|
||||
datetime_str = '2017-01-31 10:33:00'
|
||||
# Change languages and timezones
|
||||
datetime_us_str = misc.format_datetime(lang.with_context(lang='en_US').env, datetime_str, tz='Europe/Brussels')
|
||||
self.assertNotEqual(misc.format_datetime(lang.with_context(lang='fr_FR').env, datetime_str, tz='Europe/Brussels'), datetime_us_str)
|
||||
self.assertNotEqual(misc.format_datetime(lang.with_context(lang='zh_CN').env, datetime_str, tz='America/New_York'), datetime_us_str)
|
||||
|
||||
# Change language, timezone and format
|
||||
self.assertEqual(misc.format_datetime(lang.with_context(lang='fr_FR').env, datetime_str, tz='America/New_York', dt_format='dd/MM/YYYY HH:mm'), '31/01/2017 05:33')
|
||||
self.assertEqual(misc.format_datetime(lang.with_context(lang='en_US').env, datetime_str, tz='Europe/Brussels', dt_format='MMM d, y'), 'Jan 31, 2017')
|
||||
|
||||
# Check given `lang_code` overwites context lang
|
||||
fmt_fr = 'dd MMMM YYYY à HH:mm:ss Z'
|
||||
fmt_us = "MMMM dd, YYYY 'at' hh:mm:ss a Z"
|
||||
self.assertEqual(misc.format_datetime(lang.env, datetime_str, tz='Europe/Brussels', dt_format=fmt_fr, lang_code='fr_FR'), '31 janvier 2017 à 11:33:00 +0100')
|
||||
self.assertEqual(misc.format_datetime(lang.with_context(lang='zh_CN').env, datetime_str, tz='Europe/Brussels', dt_format=fmt_us, lang_code='en_US'), 'January 31, 2017 at 11:33:00 AM +0100')
|
||||
|
||||
# -- test `time`
|
||||
time_part = datetime.time(16, 30, 22)
|
||||
time_part_tz = datetime.time(16, 30, 22, tzinfo=pytz.timezone('America/New_York')) # 4:30 PM timezoned
|
||||
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, time_part, time_format='HH:mm:ss'), '16:30:22')
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='zh_CN').env, time_part, time_format="ah:m:ss"), '\u4e0b\u53484:30:22')
|
||||
|
||||
# Check format in different languages
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, time_part, time_format='HH:mm'), '16:30')
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='zh_CN').env, time_part, time_format='ah:mm'), '\u4e0b\u53484:30')
|
||||
|
||||
# Check timezoned time part
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, time_part_tz, time_format='HH:mm:ss Z'), '16:30:22 -0504')
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='zh_CN').env, time_part_tz, time_format='zzzz ah:mm:ss'), '\u5317\u7f8e\u4e1c\u90e8\u6807\u51c6\u65f6\u95f4\u0020\u4e0b\u53484:30:22')
|
||||
|
||||
#Check timezone conversion in format_time
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, datetime_str, 'Europe/Brussels', time_format='HH:mm:ss Z'), '11:33:00 +0100')
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, datetime_str, 'America/New_York', time_format='HH:mm:ss Z'), '05:33:00 -0500')
|
||||
|
||||
# Check given `lang_code` overwites context lang
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='fr_FR').env, time_part, time_format='ah:mm', lang_code='zh_CN'), '\u4e0b\u53484:30')
|
||||
self.assertEqual(misc.format_time(lang.with_context(lang='zh_CN').env, time_part, time_format='ah:mm', lang_code='fr_FR'), 'PM4:30')
|
||||
|
||||
def test_02_tz(self):
|
||||
self.env.user.tz = 'Europe/Brussels'
|
||||
datetime_str = '2016-12-31 23:55:00'
|
||||
date_datetime = datetime.datetime.strptime(datetime_str, "%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# While London is still in 2016, Brussels is already in 2017
|
||||
self.assertEqual(misc.format_date(self.env, date_datetime), '01/01/2017')
|
||||
|
||||
# Force London timezone
|
||||
date_datetime = date_datetime.replace(tzinfo=pytz.UTC)
|
||||
self.assertEqual(misc.format_date(self.env, date_datetime), '12/31/2016', "User's tz must be ignored when tz is specifed in datetime object")
|
||||
|
||||
|
||||
class TestCallbacks(BaseCase):
|
||||
def test_callback(self):
|
||||
log = []
|
||||
callbacks = misc.Callbacks()
|
||||
|
||||
# add foo
|
||||
def foo():
|
||||
log.append("foo")
|
||||
|
||||
callbacks.add(foo)
|
||||
|
||||
# add bar
|
||||
@callbacks.add
|
||||
def bar():
|
||||
log.append("bar")
|
||||
|
||||
# add foo again
|
||||
callbacks.add(foo)
|
||||
|
||||
# this should call foo(), bar(), foo()
|
||||
callbacks.run()
|
||||
self.assertEqual(log, ["foo", "bar", "foo"])
|
||||
|
||||
# this should do nothing
|
||||
callbacks.run()
|
||||
self.assertEqual(log, ["foo", "bar", "foo"])
|
||||
|
||||
def test_aggregate(self):
|
||||
log = []
|
||||
callbacks = misc.Callbacks()
|
||||
|
||||
# register foo once
|
||||
@callbacks.add
|
||||
def foo():
|
||||
log.append(callbacks.data["foo"])
|
||||
|
||||
# aggregate data
|
||||
callbacks.data.setdefault("foo", []).append(1)
|
||||
callbacks.data.setdefault("foo", []).append(2)
|
||||
callbacks.data.setdefault("foo", []).append(3)
|
||||
|
||||
# foo() is called once
|
||||
callbacks.run()
|
||||
self.assertEqual(log, [[1, 2, 3]])
|
||||
self.assertFalse(callbacks.data)
|
||||
|
||||
callbacks.run()
|
||||
self.assertEqual(log, [[1, 2, 3]])
|
||||
|
||||
def test_reentrant(self):
|
||||
log = []
|
||||
callbacks = misc.Callbacks()
|
||||
|
||||
# register foo that runs callbacks
|
||||
@callbacks.add
|
||||
def foo():
|
||||
log.append("foo1")
|
||||
callbacks.run()
|
||||
log.append("foo2")
|
||||
|
||||
@callbacks.add
|
||||
def bar():
|
||||
log.append("bar")
|
||||
|
||||
# both foo() and bar() are called once
|
||||
callbacks.run()
|
||||
self.assertEqual(log, ["foo1", "bar", "foo2"])
|
||||
|
||||
callbacks.run()
|
||||
self.assertEqual(log, ["foo1", "bar", "foo2"])
|
||||
|
||||
|
||||
class TestRemoveAccents(BaseCase):
|
||||
def test_empty_string(self):
|
||||
self.assertEqual(remove_accents(False), False)
|
||||
self.assertEqual(remove_accents(''), '')
|
||||
self.assertEqual(remove_accents(None), None)
|
||||
|
||||
def test_latin(self):
|
||||
self.assertEqual(remove_accents('Niño Hernández'), 'Nino Hernandez')
|
||||
self.assertEqual(remove_accents('Anaïs Clémence'), 'Anais Clemence')
|
||||
|
||||
def test_non_latin(self):
|
||||
self.assertEqual(remove_accents('العربية'), 'العربية')
|
||||
self.assertEqual(remove_accents('русский алфавит'), 'русскии алфавит')
|
||||
|
||||
|
||||
class TestAddonsFileAccess(BaseCase):
|
||||
|
||||
def assertCannotAccess(self, path, ExceptionType=FileNotFoundError, filter_ext=None):
|
||||
with self.assertRaises(ExceptionType):
|
||||
file_path(path, filter_ext=filter_ext)
|
||||
|
||||
def assertCanRead(self, path, needle='', mode='r', filter_ext=None):
|
||||
with file_open(path, mode, filter_ext) as f:
|
||||
self.assertIn(needle, f.read())
|
||||
|
||||
def assertCannotRead(self, path, ExceptionType=FileNotFoundError, filter_ext=None):
|
||||
with self.assertRaises(ExceptionType):
|
||||
file_open(path, filter_ext=filter_ext)
|
||||
|
||||
def test_file_path(self):
|
||||
# absolute path
|
||||
self.assertEqual(__file__, file_path(__file__))
|
||||
self.assertEqual(__file__, file_path(__file__, filter_ext=None)) # means "no filter" too
|
||||
self.assertEqual(__file__, file_path(__file__, filter_ext=('.py',)))
|
||||
|
||||
# directory target is ok
|
||||
self.assertEqual(os.path.dirname(__file__), file_path(os.path.join(__file__, '..')))
|
||||
|
||||
# relative path
|
||||
relpath = os.path.join(*(__file__.split(os.sep)[-3:])) # 'base/tests/test_misc.py'
|
||||
self.assertEqual(__file__, file_path(relpath))
|
||||
self.assertEqual(__file__, file_path(relpath, filter_ext=('.py',)))
|
||||
|
||||
# leading 'addons/' is ignored if present
|
||||
self.assertTrue(file_path("addons/web/__init__.py"))
|
||||
relpath = os.path.join('addons', relpath) # 'addons/base/tests/test_misc.py'
|
||||
self.assertEqual(__file__, file_path(relpath))
|
||||
|
||||
# files in root_path are allowed
|
||||
self.assertTrue(file_path('tools/misc.py'))
|
||||
|
||||
# errors when outside addons_paths
|
||||
self.assertCannotAccess('/doesnt/exist')
|
||||
self.assertCannotAccess('/tmp')
|
||||
self.assertCannotAccess('../../../../../../../../../tmp')
|
||||
self.assertCannotAccess(os.path.join(__file__, '../../../../../'))
|
||||
|
||||
# data_dir is forbidden
|
||||
self.assertCannotAccess(config['data_dir'])
|
||||
|
||||
# errors for illegal extensions
|
||||
self.assertCannotAccess(__file__, ValueError, filter_ext=('.png',))
|
||||
# file doesnt exist but has wrong extension
|
||||
self.assertCannotAccess(__file__.replace('.py', '.foo'), ValueError, filter_ext=('.png',))
|
||||
|
||||
def test_file_open(self):
|
||||
# The needle includes UTF8 so we test reading non-ASCII files at the same time.
|
||||
# This depends on the system locale and is harder to unit test, but if you manage to run the
|
||||
# test with a non-UTF8 locale (`LC_ALL=fr_FR.iso8859-1 python3...`) it should not crash ;-)
|
||||
test_needle = "A needle with non-ascii bytes: ♥"
|
||||
|
||||
# absolute path
|
||||
self.assertCanRead(__file__, test_needle)
|
||||
self.assertCanRead(__file__, test_needle.encode(), mode='rb')
|
||||
self.assertCanRead(__file__, test_needle.encode(), mode='rb', filter_ext=('.py',))
|
||||
|
||||
# directory target *is* an error
|
||||
with self.assertRaises(FileNotFoundError):
|
||||
file_open(os.path.join(__file__, '..'))
|
||||
|
||||
# relative path
|
||||
relpath = os.path.join(*(__file__.split(os.sep)[-3:])) # 'base/tests/test_misc.py'
|
||||
self.assertCanRead(relpath, test_needle)
|
||||
self.assertCanRead(relpath, test_needle.encode(), mode='rb')
|
||||
self.assertCanRead(relpath, test_needle.encode(), mode='rb', filter_ext=('.py',))
|
||||
|
||||
# leading 'addons/' is ignored if present
|
||||
self.assertCanRead("addons/web/__init__.py", "import")
|
||||
relpath = os.path.join('addons', relpath) # 'addons/base/tests/test_misc.py'
|
||||
self.assertCanRead(relpath, test_needle)
|
||||
|
||||
# files in root_path are allowed
|
||||
self.assertCanRead('tools/misc.py')
|
||||
|
||||
# errors when outside addons_paths
|
||||
self.assertCannotRead('/doesnt/exist')
|
||||
self.assertCannotRead('')
|
||||
self.assertCannotRead('/tmp')
|
||||
self.assertCannotRead('../../../../../../../../../tmp')
|
||||
self.assertCannotRead(os.path.join(__file__, '../../../../../'))
|
||||
|
||||
# data_dir is forbidden
|
||||
self.assertCannotRead(config['data_dir'])
|
||||
|
||||
# errors for illegal extensions
|
||||
self.assertCannotRead(__file__, ValueError, filter_ext=('.png',))
|
||||
# file doesnt exist but has wrong extension
|
||||
self.assertCannotRead(__file__.replace('.py', '.foo'), ValueError, filter_ext=('.png',))
|
||||
|
||||
|
||||
class TestDictTools(BaseCase):
|
||||
def test_readonly_dict(self):
|
||||
d = misc.ReadonlyDict({'foo': 'bar'})
|
||||
with self.assertRaises(TypeError):
|
||||
d['baz'] = 'xyz'
|
||||
with self.assertRaises(AttributeError):
|
||||
d.update({'baz': 'xyz'})
|
||||
with self.assertRaises(TypeError):
|
||||
dict.update(d, {'baz': 'xyz'})
|
||||
|
||||
|
||||
class TestUrlValidate(BaseCase):
|
||||
def test_url_validate(self):
|
||||
for case, truth in [
|
||||
# full URLs should be preserved
|
||||
('http://example.com', 'http://example.com'),
|
||||
('http://example.com/index.html', 'http://example.com/index.html'),
|
||||
('http://example.com?debug=1', 'http://example.com?debug=1'),
|
||||
('http://example.com#h3', 'http://example.com#h3'),
|
||||
|
||||
# URLs with a domain should get a http scheme
|
||||
('example.com', 'http://example.com'),
|
||||
('example.com/index.html', 'http://example.com/index.html'),
|
||||
('example.com?debug=1', 'http://example.com?debug=1'),
|
||||
('example.com#h3', 'http://example.com#h3'),
|
||||
]:
|
||||
with self.subTest(case=case):
|
||||
self.assertEqual(validate_url(case), truth)
|
||||
|
||||
# broken cases, do we really want that?
|
||||
self.assertEqual(validate_url('/index.html'), 'http:///index.html')
|
||||
self.assertEqual(validate_url('?debug=1'), 'http://?debug=1')
|
||||
self.assertEqual(validate_url('#model=project.task&id=3603607'), 'http://#model=project.task&id=3603607')
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import os.path
|
||||
import tempfile
|
||||
from os.path import join as opj
|
||||
from unittest.mock import patch
|
||||
|
||||
import odoo.addons
|
||||
from odoo.modules.module import load_manifest
|
||||
from odoo.modules.module import get_manifest
|
||||
from odoo.release import major_version
|
||||
from odoo.tests.common import BaseCase
|
||||
|
||||
|
||||
class TestModuleManifest(BaseCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls._tmp_dir = tempfile.TemporaryDirectory(prefix='odoo-test-addons-')
|
||||
cls.addClassCleanup(cls._tmp_dir.cleanup)
|
||||
cls.addons_path = cls._tmp_dir.name
|
||||
|
||||
patcher = patch.object(odoo.addons, '__path__', [cls.addons_path])
|
||||
cls.startClassPatcher(patcher)
|
||||
|
||||
def setUp(self):
|
||||
self.module_root = tempfile.mkdtemp(prefix='odoo-test-module-', dir=self.addons_path)
|
||||
self.module_name = os.path.basename(self.module_root)
|
||||
|
||||
def test_default_manifest(self):
|
||||
with open(opj(self.module_root, '__manifest__.py'), 'w') as file:
|
||||
file.write(str({'name': f'Temp {self.module_name}', 'license': 'MIT'}))
|
||||
|
||||
with self.assertNoLogs('odoo.modules.module', 'WARNING'):
|
||||
manifest = load_manifest(self.module_name)
|
||||
|
||||
self.maxDiff = None
|
||||
self.assertDictEqual(manifest, {
|
||||
'addons_path': self.addons_path,
|
||||
'application': False,
|
||||
'assets': {},
|
||||
'author': 'Odoo S.A.',
|
||||
'auto_install': False,
|
||||
'bootstrap': False,
|
||||
'category': 'Uncategorized',
|
||||
'data': [],
|
||||
'demo': [],
|
||||
'demo_xml': [],
|
||||
'depends': [],
|
||||
'description': '',
|
||||
'external_dependencies': [],
|
||||
'icon': '/base/static/description/icon.png',
|
||||
'init_xml': [],
|
||||
'installable': True,
|
||||
'images': [],
|
||||
'images_preview_theme': {},
|
||||
'license': 'MIT',
|
||||
'live_test_url': '',
|
||||
'name': f'Temp {self.module_name}',
|
||||
'post_init_hook': '',
|
||||
'post_load': '',
|
||||
'pre_init_hook': '',
|
||||
'sequence': 100,
|
||||
'snippet_lists': {},
|
||||
'summary': '',
|
||||
'test': [],
|
||||
'update_xml': [],
|
||||
'uninstall_hook': '',
|
||||
'version': f'{major_version}.1.0',
|
||||
'web': False,
|
||||
'website': '',
|
||||
})
|
||||
|
||||
def test_change_manifest(self):
|
||||
module_name = 'base'
|
||||
new_manifest = get_manifest(module_name)
|
||||
orig_auto_install = new_manifest['auto_install']
|
||||
new_manifest['auto_install'] = not orig_auto_install
|
||||
self.assertNotEqual(new_manifest, get_manifest(module_name))
|
||||
self.assertEqual(orig_auto_install, get_manifest(module_name)['auto_install'])
|
||||
|
||||
def test_missing_manifest(self):
|
||||
with self.assertLogs('odoo.modules.module', 'DEBUG') as capture:
|
||||
manifest = load_manifest(self.module_name)
|
||||
self.assertEqual(manifest, {})
|
||||
self.assertIn("no manifest file found", capture.output[0])
|
||||
|
||||
def test_missing_license(self):
|
||||
with open(opj(self.module_root, '__manifest__.py'), 'w') as file:
|
||||
file.write(str({'name': f'Temp {self.module_name}'}))
|
||||
with self.assertLogs('odoo.modules.module', 'WARNING') as capture:
|
||||
manifest = load_manifest(self.module_name)
|
||||
self.assertEqual(manifest['license'], 'LGPL-3')
|
||||
self.assertIn("Missing `license` key", capture.output[0])
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.modules import neutralize
|
||||
|
||||
from odoo.tests import tagged
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'neutralize')
|
||||
class TestNeutralize(TransactionCase):
|
||||
def test_10_neutralize(self):
|
||||
""" Simply testing that none of the SQL neutralize crashes """
|
||||
installed_modules = neutralize.get_installed_modules(self.cr)
|
||||
queries = neutralize.get_neutralization_queries(installed_modules)
|
||||
for query in queries:
|
||||
self.cr.execute(query)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
from num2words import num2words
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestNum2WordsAr(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
def test_multiple_of_thousands(self):
|
||||
"""Test num2words function with a multiple of thousands number."""
|
||||
thousand = num2words(1234, lang="ar")
|
||||
million = num2words(1234567, lang="ar")
|
||||
billion = num2words(1234567890, lang="ar")
|
||||
|
||||
self.assertEqual(thousand, "ألف و مئتان و أربعة و ثلاثون")
|
||||
self.assertEqual(
|
||||
million, "مليون و مئتان و أربعة و ثلاثون ألفاً و خمسمائة و سبعة و ستون")
|
||||
self.assertEqual(
|
||||
billion, "مليار و مئتان و أربعة و ثلاثون مليوناً و خمسمائة و سبعة و ستون ألفاً و ثمانمائة و تسعون")
|
||||
|
||||
def test_decimal_multiple_of_thousands(self):
|
||||
"""Test num2words function with a multiple of thousands number."""
|
||||
thousand = num2words(1234.1, lang="ar")
|
||||
million = num2words(1234567.23, lang="ar")
|
||||
billion = num2words(1234567890.9, lang="ar")
|
||||
|
||||
self.assertEqual(thousand, "ألف و مئتان و أربعة و ثلاثون , عشر")
|
||||
self.assertEqual(
|
||||
million, "مليون و مئتان و أربعة و ثلاثون ألفاً و خمسمائة و سبعة و ستون , ثلاث و عشرون")
|
||||
self.assertEqual(
|
||||
billion, "مليار و مئتان و أربعة و ثلاثون مليوناً و خمسمائة و سبعة و ستون ألفاً و ثمانمائة و تسعون , تسعون")
|
||||
405
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_orm.py
Normal file
405
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_orm.py
Normal file
|
|
@ -0,0 +1,405 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import psycopg2
|
||||
|
||||
from odoo.exceptions import AccessError, MissingError
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import mute_logger
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class TestORM(TransactionCase):
|
||||
""" test special behaviors of ORM CRUD functions """
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_access_deleted_records(self):
|
||||
""" Verify that accessing deleted records works as expected """
|
||||
c1 = self.env['res.partner.category'].create({'name': 'W'})
|
||||
c2 = self.env['res.partner.category'].create({'name': 'Y'})
|
||||
c1.unlink()
|
||||
|
||||
# read() is expected to skip deleted records because our API is not
|
||||
# transactional for a sequence of search()->read() performed from the
|
||||
# client-side... a concurrent deletion could therefore cause spurious
|
||||
# exceptions even when simply opening a list view!
|
||||
# /!\ Using unprileged user to detect former side effects of ir.rules!
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'test user',
|
||||
'login': 'test2',
|
||||
'groups_id': [Command.set([self.ref('base.group_user')])],
|
||||
})
|
||||
cs = (c1 + c2).with_user(user)
|
||||
self.assertEqual([{'id': c2.id, 'name': 'Y'}], cs.read(['name']), "read() should skip deleted records")
|
||||
self.assertEqual([], cs[0].read(['name']), "read() should skip deleted records")
|
||||
|
||||
# Deleting an already deleted record should be simply ignored
|
||||
self.assertTrue(c1.unlink(), "Re-deleting should be a no-op")
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_access_partial_deletion(self):
|
||||
""" Check accessing a record from a recordset where another record has been deleted. """
|
||||
Model = self.env['res.country']
|
||||
self.assertTrue(type(Model).display_name.automatic, "test assumption not satisfied")
|
||||
|
||||
# access regular field when another record from the same prefetch set has been deleted
|
||||
records = Model.create([{'name': name} for name in ('Foo', 'Bar', 'Baz')])
|
||||
for record in records:
|
||||
record.name
|
||||
record.unlink()
|
||||
|
||||
# access computed field when another record from the same prefetch set has been deleted
|
||||
records = Model.create([{'name': name} for name in ('Foo', 'Bar', 'Baz')])
|
||||
for record in records:
|
||||
record.display_name
|
||||
record.unlink()
|
||||
|
||||
@mute_logger('odoo.models', 'odoo.addons.base.models.ir_rule')
|
||||
def test_access_filtered_records(self):
|
||||
""" Verify that accessing filtered records works as expected for non-admin user """
|
||||
p1 = self.env['res.partner'].create({'name': 'W'})
|
||||
p2 = self.env['res.partner'].create({'name': 'Y'})
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'test user',
|
||||
'login': 'test2',
|
||||
'groups_id': [Command.set([self.ref('base.group_user')])],
|
||||
})
|
||||
|
||||
partner_model = self.env['ir.model'].search([('model','=','res.partner')])
|
||||
self.env['ir.rule'].create({
|
||||
'name': 'Y is invisible',
|
||||
'domain_force': [('id', '!=', p1.id)],
|
||||
'model_id': partner_model.id,
|
||||
})
|
||||
|
||||
# search as unprivileged user
|
||||
partners = self.env['res.partner'].with_user(user).search([])
|
||||
self.assertNotIn(p1, partners, "W should not be visible...")
|
||||
self.assertIn(p2, partners, "... but Y should be visible")
|
||||
|
||||
# read as unprivileged user
|
||||
with self.assertRaises(AccessError):
|
||||
p1.with_user(user).read(['name'])
|
||||
# write as unprivileged user
|
||||
with self.assertRaises(AccessError):
|
||||
p1.with_user(user).write({'name': 'foo'})
|
||||
# unlink as unprivileged user
|
||||
with self.assertRaises(AccessError):
|
||||
p1.with_user(user).unlink()
|
||||
|
||||
# Prepare mixed case
|
||||
p2.unlink()
|
||||
# read mixed records: some deleted and some filtered
|
||||
with self.assertRaises(AccessError):
|
||||
(p1 + p2).with_user(user).read(['name'])
|
||||
# delete mixed records: some deleted and some filtered
|
||||
with self.assertRaises(AccessError):
|
||||
(p1 + p2).with_user(user).unlink()
|
||||
|
||||
def test_read(self):
|
||||
partner = self.env['res.partner'].create({'name': 'MyPartner1'})
|
||||
result = partner.read()
|
||||
self.assertIsInstance(result, list)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_search_read(self):
|
||||
partner = self.env['res.partner']
|
||||
|
||||
# simple search_read
|
||||
partner.create({'name': 'MyPartner1'})
|
||||
found = partner.search_read([('name', '=', 'MyPartner1')], ['name'])
|
||||
self.assertEqual(len(found), 1)
|
||||
self.assertEqual(found[0]['name'], 'MyPartner1')
|
||||
self.assertIn('id', found[0])
|
||||
|
||||
# search_read correct order
|
||||
partner.create({'name': 'MyPartner2'})
|
||||
found = partner.search_read([('name', 'like', 'MyPartner')], ['name'], order="name")
|
||||
self.assertEqual(len(found), 2)
|
||||
self.assertEqual(found[0]['name'], 'MyPartner1')
|
||||
self.assertEqual(found[1]['name'], 'MyPartner2')
|
||||
found = partner.search_read([('name', 'like', 'MyPartner')], ['name'], order="name desc")
|
||||
self.assertEqual(len(found), 2)
|
||||
self.assertEqual(found[0]['name'], 'MyPartner2')
|
||||
self.assertEqual(found[1]['name'], 'MyPartner1')
|
||||
|
||||
# search_read that finds nothing
|
||||
found = partner.search_read([('name', '=', 'Does not exists')], ['name'])
|
||||
self.assertEqual(len(found), 0)
|
||||
|
||||
# search_read with an empty array of fields
|
||||
found = partner.search_read([], [], limit=1)
|
||||
self.assertEqual(len(found), 1)
|
||||
self.assertTrue(field in list(found[0]) for field in ['id', 'name', 'display_name', 'email'])
|
||||
|
||||
# search_read without fields
|
||||
found = partner.search_read([], False, limit=1)
|
||||
self.assertEqual(len(found), 1)
|
||||
self.assertTrue(field in list(found[0]) for field in ['id', 'name', 'display_name', 'email'])
|
||||
|
||||
@mute_logger('odoo.sql_db')
|
||||
def test_exists(self):
|
||||
partner = self.env['res.partner']
|
||||
|
||||
# check that records obtained from search exist
|
||||
recs = partner.search([])
|
||||
self.assertTrue(recs)
|
||||
self.assertEqual(recs.exists(), recs)
|
||||
|
||||
# check that new records exist by convention
|
||||
recs = partner.new({})
|
||||
self.assertTrue(recs.exists())
|
||||
|
||||
# check that there is no record with id 0
|
||||
recs = partner.browse([0])
|
||||
self.assertFalse(recs.exists())
|
||||
|
||||
def test_groupby_date(self):
|
||||
partners_data = dict(
|
||||
A='2012-11-19',
|
||||
B='2012-12-17',
|
||||
C='2012-12-31',
|
||||
D='2013-01-07',
|
||||
E='2013-01-14',
|
||||
F='2013-01-28',
|
||||
G='2013-02-11',
|
||||
)
|
||||
|
||||
partner_ids = []
|
||||
partner_ids_by_day = defaultdict(list)
|
||||
partner_ids_by_month = defaultdict(list)
|
||||
partner_ids_by_year = defaultdict(list)
|
||||
|
||||
partners = self.env['res.partner']
|
||||
for name, date in partners_data.items():
|
||||
p = partners.create(dict(name=name, date=date))
|
||||
partner_ids.append(p.id)
|
||||
partner_ids_by_day[date].append(p.id)
|
||||
partner_ids_by_month[date.rsplit('-', 1)[0]].append(p.id)
|
||||
partner_ids_by_year[date.split('-', 1)[0]].append(p.id)
|
||||
|
||||
def read_group(interval):
|
||||
domain = [('id', 'in', partner_ids)]
|
||||
result = {}
|
||||
for grp in partners.read_group(domain, ['date'], ['date:' + interval]):
|
||||
result[grp['date:' + interval]] = partners.search(grp['__domain'])
|
||||
return result
|
||||
|
||||
self.assertEqual(len(read_group('day')), len(partner_ids_by_day))
|
||||
self.assertEqual(len(read_group('month')), len(partner_ids_by_month))
|
||||
self.assertEqual(len(read_group('year')), len(partner_ids_by_year))
|
||||
|
||||
res = partners.read_group([('id', 'in', partner_ids)], ['date'],
|
||||
['date:month', 'date:day'], lazy=False)
|
||||
self.assertEqual(len(res), len(partner_ids))
|
||||
|
||||
# combine groupby and orderby
|
||||
months = ['February 2013', 'January 2013', 'December 2012', 'November 2012']
|
||||
res = partners.read_group([('id', 'in', partner_ids)], ['date'],
|
||||
groupby=['date:month'], orderby='date:month DESC')
|
||||
self.assertEqual([item['date:month'] for item in res], months)
|
||||
|
||||
# order by date should reorder by date:month
|
||||
res = partners.read_group([('id', 'in', partner_ids)], ['date'],
|
||||
groupby=['date:month'], orderby='date DESC')
|
||||
self.assertEqual([item['date:month'] for item in res], months)
|
||||
|
||||
# order by date should reorder by date:day
|
||||
days = ['11 Feb 2013', '28 Jan 2013', '14 Jan 2013', '07 Jan 2013',
|
||||
'31 Dec 2012', '17 Dec 2012', '19 Nov 2012']
|
||||
res = partners.read_group([('id', 'in', partner_ids)], ['date'],
|
||||
groupby=['date:month', 'date:day'],
|
||||
orderby='date DESC', lazy=False)
|
||||
self.assertEqual([item['date:day'] for item in res], days)
|
||||
|
||||
def test_write_duplicate(self):
|
||||
p1 = self.env['res.partner'].create({'name': 'W'})
|
||||
(p1 + p1).write({'name': 'X'})
|
||||
|
||||
def test_m2m_store_trigger(self):
|
||||
group_user = self.env.ref('base.group_user')
|
||||
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'test',
|
||||
'login': 'test_m2m_store_trigger',
|
||||
'groups_id': [Command.set([])],
|
||||
})
|
||||
self.assertTrue(user.share)
|
||||
|
||||
group_user.write({'users': [Command.link(user.id)]})
|
||||
self.assertFalse(user.share)
|
||||
|
||||
group_user.write({'users': [Command.unlink(user.id)]})
|
||||
self.assertTrue(user.share)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_unlink_with_property(self):
|
||||
""" Verify that unlink removes the related ir.property as unprivileged user """
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'Justine Bridou',
|
||||
'login': 'saucisson',
|
||||
'groups_id': [Command.set([self.ref('base.group_partner_manager')])],
|
||||
})
|
||||
p1 = self.env['res.partner'].with_user(user).create({'name': 'Zorro'})
|
||||
self.env['ir.property'].with_user(user)._set_multi("ref", "res.partner", {p1.id: "Nain poilu"})
|
||||
p1_prop = self.env['ir.property'].with_user(user)._get("ref", "res.partner", res_id=p1.id)
|
||||
self.assertEqual(
|
||||
p1_prop, "Nain poilu", 'p1_prop should have been created')
|
||||
|
||||
# Unlink with unprivileged user
|
||||
p1.unlink()
|
||||
|
||||
# ir.property is deleted
|
||||
p1_prop = self.env['ir.property'].with_user(user)._get("ref", "res.partner", res_id=p1.id)
|
||||
self.assertEqual(
|
||||
p1_prop, False, 'p1_prop should have been deleted')
|
||||
|
||||
def test_create_multi(self):
|
||||
""" create for multiple records """
|
||||
# assumption: 'res.bank' does not override 'create'
|
||||
vals_list = [{'name': name} for name in ('Foo', 'Bar', 'Baz')]
|
||||
vals_list[0]['email'] = 'foo@example.com'
|
||||
for vals in vals_list:
|
||||
record = self.env['res.bank'].create(vals)
|
||||
self.assertEqual(len(record), 1)
|
||||
self.assertEqual(record.name, vals['name'])
|
||||
self.assertEqual(record.email, vals.get('email', False))
|
||||
|
||||
records = self.env['res.bank'].create([])
|
||||
self.assertFalse(records)
|
||||
|
||||
records = self.env['res.bank'].create(vals_list)
|
||||
self.assertEqual(len(records), len(vals_list))
|
||||
for record, vals in zip(records, vals_list):
|
||||
self.assertEqual(record.name, vals['name'])
|
||||
self.assertEqual(record.email, vals.get('email', False))
|
||||
|
||||
# create countries and states
|
||||
vals_list = [{
|
||||
'name': 'Foo',
|
||||
'state_ids': [
|
||||
Command.create({'name': 'North Foo', 'code': 'NF'}),
|
||||
Command.create({'name': 'South Foo', 'code': 'SF'}),
|
||||
Command.create({'name': 'West Foo', 'code': 'WF'}),
|
||||
Command.create({'name': 'East Foo', 'code': 'EF'}),
|
||||
],
|
||||
}, {
|
||||
'name': 'Bar',
|
||||
'state_ids': [
|
||||
Command.create({'name': 'North Bar', 'code': 'NB'}),
|
||||
Command.create({'name': 'South Bar', 'code': 'SB'}),
|
||||
],
|
||||
}]
|
||||
foo, bar = self.env['res.country'].create(vals_list)
|
||||
self.assertEqual(foo.name, 'Foo')
|
||||
self.assertCountEqual(foo.mapped('state_ids.code'), ['NF', 'SF', 'WF', 'EF'])
|
||||
self.assertEqual(bar.name, 'Bar')
|
||||
self.assertCountEqual(bar.mapped('state_ids.code'), ['NB', 'SB'])
|
||||
|
||||
|
||||
class TestInherits(TransactionCase):
|
||||
""" test the behavior of the orm for models that use _inherits;
|
||||
specifically: res.users, that inherits from res.partner
|
||||
"""
|
||||
|
||||
def test_default(self):
|
||||
""" `default_get` cannot return a dictionary or a new id """
|
||||
defaults = self.env['res.users'].default_get(['partner_id'])
|
||||
if 'partner_id' in defaults:
|
||||
self.assertIsInstance(defaults['partner_id'], (bool, int))
|
||||
|
||||
def test_create(self):
|
||||
""" creating a user should automatically create a new partner """
|
||||
partners_before = self.env['res.partner'].search([])
|
||||
user_foo = self.env['res.users'].create({'name': 'Foo', 'login': 'foo'})
|
||||
|
||||
self.assertNotIn(user_foo.partner_id, partners_before)
|
||||
|
||||
def test_create_with_ancestor(self):
|
||||
""" creating a user with a specific 'partner_id' should not create a new partner """
|
||||
partner_foo = self.env['res.partner'].create({'name': 'Foo'})
|
||||
partners_before = self.env['res.partner'].search([])
|
||||
user_foo = self.env['res.users'].create({'partner_id': partner_foo.id, 'login': 'foo'})
|
||||
partners_after = self.env['res.partner'].search([])
|
||||
|
||||
self.assertEqual(partners_before, partners_after)
|
||||
self.assertEqual(user_foo.name, 'Foo')
|
||||
self.assertEqual(user_foo.partner_id, partner_foo)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_read(self):
|
||||
""" inherited fields should be read without any indirection """
|
||||
user_foo = self.env['res.users'].create({'name': 'Foo', 'login': 'foo'})
|
||||
user_values, = user_foo.read()
|
||||
partner_values, = user_foo.partner_id.read()
|
||||
|
||||
self.assertEqual(user_values['name'], partner_values['name'])
|
||||
self.assertEqual(user_foo.name, user_foo.partner_id.name)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_copy(self):
|
||||
""" copying a user should automatically copy its partner, too """
|
||||
user_foo = self.env['res.users'].create({
|
||||
'name': 'Foo',
|
||||
'login': 'foo',
|
||||
'employee': True,
|
||||
})
|
||||
foo_before, = user_foo.read()
|
||||
del foo_before['__last_update']
|
||||
del foo_before['create_date']
|
||||
del foo_before['write_date']
|
||||
user_bar = user_foo.copy({'login': 'bar'})
|
||||
foo_after, = user_foo.read()
|
||||
del foo_after['__last_update']
|
||||
del foo_after['create_date']
|
||||
del foo_after['write_date']
|
||||
self.assertEqual(foo_before, foo_after)
|
||||
|
||||
self.assertEqual(user_bar.name, 'Foo (copy)')
|
||||
self.assertEqual(user_bar.login, 'bar')
|
||||
self.assertEqual(user_foo.employee, user_bar.employee)
|
||||
self.assertNotEqual(user_foo.id, user_bar.id)
|
||||
self.assertNotEqual(user_foo.partner_id.id, user_bar.partner_id.id)
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_copy_with_ancestor(self):
|
||||
""" copying a user with 'parent_id' in defaults should not duplicate the partner """
|
||||
user_foo = self.env['res.users'].create({'login': 'foo', 'name': 'Foo', 'signature': 'Foo'})
|
||||
partner_bar = self.env['res.partner'].create({'name': 'Bar'})
|
||||
|
||||
foo_before, = user_foo.read()
|
||||
del foo_before['__last_update']
|
||||
del foo_before['create_date']
|
||||
del foo_before['write_date']
|
||||
del foo_before['login_date']
|
||||
partners_before = self.env['res.partner'].search([])
|
||||
user_bar = user_foo.copy({'partner_id': partner_bar.id, 'login': 'bar'})
|
||||
foo_after, = user_foo.read()
|
||||
del foo_after['__last_update']
|
||||
del foo_after['create_date']
|
||||
del foo_after['write_date']
|
||||
del foo_after['login_date']
|
||||
partners_after = self.env['res.partner'].search([])
|
||||
|
||||
self.assertEqual(foo_before, foo_after)
|
||||
self.assertEqual(partners_before, partners_after)
|
||||
|
||||
self.assertNotEqual(user_foo.id, user_bar.id)
|
||||
self.assertEqual(user_bar.partner_id.id, partner_bar.id)
|
||||
self.assertEqual(user_bar.login, 'bar', "login is given from copy parameters")
|
||||
self.assertFalse(user_bar.password, "password should not be copied from original record")
|
||||
self.assertEqual(user_bar.name, 'Bar', "name is given from specific partner")
|
||||
self.assertEqual(user_bar.signature, user_foo.signature, "signature should be copied")
|
||||
|
||||
@mute_logger('odoo.models')
|
||||
def test_write_date(self):
|
||||
""" modifying inherited fields must update write_date """
|
||||
user = self.env.user
|
||||
write_date_before = user.write_date
|
||||
|
||||
# write base64 image
|
||||
user.write({'image_1920': 'R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw=='})
|
||||
write_date_after = user.write_date
|
||||
self.assertNotEqual(write_date_before, write_date_after)
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import get_cache_key_counter
|
||||
|
||||
|
||||
class TestOrmcache(TransactionCase):
|
||||
def test_ormcache(self):
|
||||
""" Test the effectiveness of the ormcache() decorator. """
|
||||
IMD = self.env['ir.model.data']
|
||||
XMLID = 'base.group_no_one'
|
||||
|
||||
# retrieve the cache, its key and stat counter
|
||||
cache, key, counter = get_cache_key_counter(IMD._xmlid_lookup, XMLID)
|
||||
hit = counter.hit
|
||||
miss = counter.miss
|
||||
|
||||
# clear the caches of ir.model.data, retrieve its key and
|
||||
IMD.clear_caches()
|
||||
self.assertNotIn(key, cache)
|
||||
|
||||
# lookup some reference
|
||||
self.env.ref(XMLID)
|
||||
self.assertEqual(counter.hit, hit)
|
||||
self.assertEqual(counter.miss, miss + 1)
|
||||
self.assertIn(key, cache)
|
||||
|
||||
# lookup again
|
||||
self.env.ref(XMLID)
|
||||
self.assertEqual(counter.hit, hit + 1)
|
||||
self.assertEqual(counter.miss, miss + 1)
|
||||
self.assertIn(key, cache)
|
||||
|
||||
# lookup again
|
||||
self.env.ref(XMLID)
|
||||
self.assertEqual(counter.hit, hit + 2)
|
||||
self.assertEqual(counter.miss, miss + 1)
|
||||
self.assertIn(key, cache)
|
||||
101
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_osv.py
Normal file
101
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_osv.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import BaseCase
|
||||
from odoo.tools import Query
|
||||
|
||||
|
||||
class QueryTestCase(BaseCase):
|
||||
|
||||
def test_basic_query(self):
|
||||
query = Query(None, 'product_product')
|
||||
query.add_table('product_template')
|
||||
query.add_where("product_product.template_id = product_template.id")
|
||||
# add inner join
|
||||
alias = query.join("product_template", "categ_id", "product_category", "id", "categ_id")
|
||||
self.assertEqual(alias, 'product_template__categ_id')
|
||||
# add left join
|
||||
alias = query.left_join("product_product", "user_id", "res_user", "id", "user_id")
|
||||
self.assertEqual(alias, 'product_product__user_id')
|
||||
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause,
|
||||
'"product_product", "product_template" JOIN "product_category" AS "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" AS "product_product__user_id" ON ("product_product"."user_id" = "product_product__user_id"."id")')
|
||||
self.assertEqual(where_clause, "product_product.template_id = product_template.id")
|
||||
|
||||
def test_query_chained_explicit_joins(self):
|
||||
query = Query(None, 'product_product')
|
||||
query.add_table('product_template')
|
||||
query.add_where("product_product.template_id = product_template.id")
|
||||
# add inner join
|
||||
alias = query.join("product_template", "categ_id", "product_category", "id", "categ_id")
|
||||
self.assertEqual(alias, 'product_template__categ_id')
|
||||
# add CHAINED left join
|
||||
alias = query.left_join("product_template__categ_id", "user_id", "res_user", "id", "user_id")
|
||||
self.assertEqual(alias, 'product_template__categ_id__user_id')
|
||||
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause,
|
||||
'"product_product", "product_template" JOIN "product_category" AS "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" AS "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id")')
|
||||
self.assertEqual(where_clause, "product_product.template_id = product_template.id")
|
||||
|
||||
def test_mixed_query_chained_explicit_implicit_joins(self):
|
||||
query = Query(None, 'product_product')
|
||||
query.add_table('product_template')
|
||||
query.add_where("product_product.template_id = product_template.id")
|
||||
# add inner join
|
||||
alias = query.join("product_template", "categ_id", "product_category", "id", "categ_id")
|
||||
self.assertEqual(alias, 'product_template__categ_id')
|
||||
# add CHAINED left join
|
||||
alias = query.left_join("product_template__categ_id", "user_id", "res_user", "id", "user_id")
|
||||
self.assertEqual(alias, 'product_template__categ_id__user_id')
|
||||
# additional implicit join
|
||||
query.add_table('account.account')
|
||||
query.add_where("product_category.expense_account_id = account_account.id")
|
||||
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause,
|
||||
'"product_product", "product_template", "account.account" JOIN "product_category" AS "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" AS "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id")')
|
||||
self.assertEqual(where_clause, "product_product.template_id = product_template.id AND product_category.expense_account_id = account_account.id")
|
||||
|
||||
def test_raise_missing_lhs(self):
|
||||
query = Query(None, 'product_product')
|
||||
with self.assertRaises(AssertionError):
|
||||
query.join("product_template", "categ_id", "product_category", "id", "categ_id")
|
||||
|
||||
def test_long_aliases(self):
|
||||
query = Query(None, 'product_product')
|
||||
tmp = query.join('product_product', 'product_tmpl_id', 'product_template', 'id', 'product_tmpl_id')
|
||||
self.assertEqual(tmp, 'product_product__product_tmpl_id')
|
||||
# no hashing
|
||||
tmp_cat = query.join(tmp, 'product_category_id', 'product_category', 'id', 'product_category_id')
|
||||
self.assertEqual(tmp_cat, 'product_product__product_tmpl_id__product_category_id')
|
||||
# hashing to limit identifier length
|
||||
tmp_cat_cmp = query.join(tmp_cat, 'company_id', 'res_company', 'id', 'company_id')
|
||||
self.assertEqual(tmp_cat_cmp, 'product_product__product_tmpl_id__product_category_id__9f0ddff7')
|
||||
tmp_cat_stm = query.join(tmp_cat, 'salesteam_id', 'res_company', 'id', 'salesteam_id')
|
||||
self.assertEqual(tmp_cat_stm, 'product_product__product_tmpl_id__product_category_id__953a466f')
|
||||
# extend hashed identifiers
|
||||
tmp_cat_cmp_par = query.join(tmp_cat_cmp, 'partner_id', 'res_partner', 'id', 'partner_id')
|
||||
self.assertEqual(tmp_cat_cmp_par, 'product_product__product_tmpl_id__product_category_id__56d55687')
|
||||
tmp_cat_stm_par = query.join(tmp_cat_stm, 'partner_id', 'res_partner', 'id', 'partner_id')
|
||||
self.assertEqual(tmp_cat_stm_par, 'product_product__product_tmpl_id__product_category_id__00363fdd')
|
||||
|
||||
def test_table_expression(self):
|
||||
query = Query(None, 'foo')
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause, '"foo"')
|
||||
|
||||
query = Query(None, 'bar', 'SELECT id FROM foo')
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause, '(SELECT id FROM foo) AS "bar"')
|
||||
|
||||
query = Query(None, 'foo')
|
||||
query.add_table('bar', 'SELECT id FROM foo')
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause, '"foo", (SELECT id FROM foo) AS "bar"')
|
||||
|
||||
query = Query(None, 'foo')
|
||||
query.join('foo', 'bar_id', 'SELECT id FROM foo', 'id', 'bar')
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
self.assertEqual(from_clause, '"foo" JOIN (SELECT id FROM foo) AS "foo__bar" ON ("foo"."bar_id" = "foo__bar"."id")')
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools import pdf
|
||||
from odoo.modules.module import get_module_resource
|
||||
import io
|
||||
|
||||
|
||||
class TestPdf(TransactionCase):
|
||||
""" Tests on pdf. """
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
file_path = get_module_resource('base', 'tests', 'minimal.pdf')
|
||||
self.file = open(file_path, 'rb').read()
|
||||
self.minimal_reader_buffer = io.BytesIO(self.file)
|
||||
self.minimal_pdf_reader = pdf.OdooPdfFileReader(self.minimal_reader_buffer)
|
||||
|
||||
def test_odoo_pdf_file_reader(self):
|
||||
attachments = list(self.minimal_pdf_reader.getAttachments())
|
||||
self.assertEqual(len(attachments), 0)
|
||||
|
||||
pdf_writer = pdf.PdfFileWriter()
|
||||
pdf_writer.cloneReaderDocumentRoot(self.minimal_pdf_reader)
|
||||
pdf_writer.addAttachment('test_attachment.txt', b'My awesome attachment')
|
||||
|
||||
attachments = list(self.minimal_pdf_reader.getAttachments())
|
||||
self.assertEqual(len(attachments), 1)
|
||||
|
||||
def test_odoo_pdf_file_writer(self):
|
||||
attachments = list(self.minimal_pdf_reader.getAttachments())
|
||||
self.assertEqual(len(attachments), 0)
|
||||
|
||||
pdf_writer = pdf.OdooPdfFileWriter()
|
||||
pdf_writer.cloneReaderDocumentRoot(self.minimal_pdf_reader)
|
||||
|
||||
pdf_writer.addAttachment('test_attachment.txt', b'My awesome attachment')
|
||||
attachments = list(self.minimal_pdf_reader.getAttachments())
|
||||
self.assertEqual(len(attachments), 1)
|
||||
|
||||
pdf_writer.addAttachment('another_attachment.txt', b'My awesome OTHER attachment')
|
||||
attachments = list(self.minimal_pdf_reader.getAttachments())
|
||||
self.assertEqual(len(attachments), 2)
|
||||
|
||||
def test_odoo_pdf_file_reader_with_owner_encryption(self):
|
||||
pdf_writer = pdf.OdooPdfFileWriter()
|
||||
pdf_writer.cloneReaderDocumentRoot(self.minimal_pdf_reader)
|
||||
|
||||
pdf_writer.addAttachment('test_attachment.txt', b'My awesome attachment')
|
||||
pdf_writer.addAttachment('another_attachment.txt', b'My awesome OTHER attachment')
|
||||
|
||||
pdf_writer.encrypt("", "foo")
|
||||
|
||||
with io.BytesIO() as writer_buffer:
|
||||
pdf_writer.write(writer_buffer)
|
||||
encrypted_content = writer_buffer.getvalue()
|
||||
|
||||
with io.BytesIO(encrypted_content) as reader_buffer:
|
||||
pdf_reader = pdf.OdooPdfFileReader(reader_buffer)
|
||||
attachments = list(pdf_reader.getAttachments())
|
||||
|
||||
self.assertEqual(len(attachments), 2)
|
||||
|
||||
def test_merge_pdf(self):
|
||||
self.assertEqual(self.minimal_pdf_reader.getNumPages(), 1)
|
||||
page = self.minimal_pdf_reader.getPage(0)
|
||||
|
||||
merged_pdf = pdf.merge_pdf([self.file, self.file])
|
||||
merged_reader_buffer = io.BytesIO(merged_pdf)
|
||||
merged_pdf_reader = pdf.OdooPdfFileReader(merged_reader_buffer)
|
||||
self.assertEqual(merged_pdf_reader.getNumPages(), 2)
|
||||
merged_reader_buffer.close()
|
||||
|
||||
def test_branded_file_writer(self):
|
||||
# It's not easy to create a PDF with PyPDF2, so instead we copy minimal.pdf with our custom pdf writer
|
||||
pdf_writer = pdf.PdfFileWriter() # BrandedFileWriter
|
||||
pdf_writer.cloneReaderDocumentRoot(self.minimal_pdf_reader)
|
||||
writer_buffer = io.BytesIO()
|
||||
pdf_writer.write(writer_buffer)
|
||||
branded_content = writer_buffer.getvalue()
|
||||
writer_buffer.close()
|
||||
|
||||
# Read the metadata of the newly created pdf.
|
||||
reader_buffer = io.BytesIO(branded_content)
|
||||
pdf_reader = pdf.PdfFileReader(reader_buffer)
|
||||
pdf_info = pdf_reader.getDocumentInfo()
|
||||
self.assertEqual(pdf_info['/Producer'], 'Odoo')
|
||||
self.assertEqual(pdf_info['/Creator'], 'Odoo')
|
||||
reader_buffer.close()
|
||||
|
||||
def tearDown(self):
|
||||
super().tearDown()
|
||||
self.minimal_reader_buffer.close()
|
||||
|
|
@ -0,0 +1,667 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import sys
|
||||
import time
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo.exceptions import AccessError
|
||||
from odoo.tests.common import BaseCase, TransactionCase, tagged, new_test_user
|
||||
from odoo.tools import profiler
|
||||
from odoo.tools.profiler import Profiler, ExecutionContext
|
||||
from odoo.tools.speedscope import Speedscope
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'profiling')
|
||||
# post_install to ensure mail is already loaded if installed (new_test_user would fail otherwise because of notification_type)
|
||||
class TestProfileAccess(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.test_profile = cls.env['ir.profile'].create({})
|
||||
|
||||
def test_admin_has_access(self):
|
||||
self.assertEqual(self.env['ir.profile'].search([('id', '=', self.test_profile.id)]), self.test_profile)
|
||||
self.test_profile.read(['name'])
|
||||
|
||||
def test_user_no_access(self):
|
||||
user = new_test_user(self.env, login='noProfile', groups='base.group_user')
|
||||
with self.with_user('noProfile'), self.assertRaises(AccessError):
|
||||
self.env['ir.profile'].search([])
|
||||
with self.assertRaises(AccessError):
|
||||
self.test_profile.with_user(user).read(['name'])
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'profiling')
|
||||
class TestSpeedscope(BaseCase):
|
||||
def example_profile(self):
|
||||
return {
|
||||
'init_stack_trace': [['/path/to/file_1.py', 135, '__main__', 'main()']],
|
||||
'result': [{ # init frame
|
||||
'start': 2.0,
|
||||
'exec_context': (),
|
||||
'stack': [
|
||||
['/path/to/file_1.py', 10, 'main', 'do_stuff1(test=do_tests)'],
|
||||
['/path/to/file_1.py', 101, 'do_stuff1', 'cr.execute(query, params)'],
|
||||
],
|
||||
}, {
|
||||
'start': 3.0,
|
||||
'exec_context': (),
|
||||
'stack': [
|
||||
['/path/to/file_1.py', 10, 'main', 'do_stuff1(test=do_tests)'],
|
||||
['/path/to/file_1.py', 101, 'do_stuff1', 'cr.execute(query, params)'],
|
||||
['/path/to/sql_db.py', 650, 'execute', 'res = self._obj.execute(query, params)'],
|
||||
],
|
||||
}, { # duplicate frame
|
||||
'start': 4.0,
|
||||
'exec_context': (),
|
||||
'stack': [
|
||||
['/path/to/file_1.py', 10, 'main', 'do_stuff1(test=do_tests)'],
|
||||
['/path/to/file_1.py', 101, 'do_stuff1', 'cr.execute(query, params)'],
|
||||
['/path/to/sql_db.py', 650, 'execute', 'res = self._obj.execute(query, params)'],
|
||||
],
|
||||
}, { # other frame
|
||||
'start': 6.0,
|
||||
'exec_context': (),
|
||||
'stack': [
|
||||
['/path/to/file_1.py', 10, 'main', 'do_stuff1(test=do_tests)'],
|
||||
['/path/to/file_1.py', 101, 'do_stuff1', 'check'],
|
||||
['/path/to/sql_db.py', 650, 'check', 'assert x = y'],
|
||||
],
|
||||
}, { # out of frame
|
||||
'start': 10.0,
|
||||
'exec_context': (),
|
||||
'stack': [
|
||||
['/path/to/file_1.py', 10, 'main', 'do_stuff1(test=do_tests)'],
|
||||
['/path/to/file_1.py', 101, 'do_stuff1', 'for i in range(10):'],
|
||||
],
|
||||
}, { # final frame
|
||||
'start': 10.35,
|
||||
'exec_context': (),
|
||||
'stack': None,
|
||||
}],
|
||||
}
|
||||
|
||||
def test_convert_empty(self):
|
||||
Speedscope().make()
|
||||
|
||||
def test_converts_profile_simple(self):
|
||||
profile = self.example_profile()
|
||||
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=False)
|
||||
res = sp.make()
|
||||
|
||||
frames = res['shared']['frames']
|
||||
self.assertEqual(len(frames), 4)
|
||||
|
||||
profile_combined = res['profiles'][0]
|
||||
events = [(e['type'], e['frame']) for e in profile_combined['events']]
|
||||
self.assertEqual(events, [
|
||||
('O', 0), # /main
|
||||
('O', 1), # /main/do_stuff1
|
||||
('O', 2), # /main/do_stuff1/execute
|
||||
('C', 2), # /main/do_stuff1
|
||||
('O', 3), # /main/do_stuff1/check
|
||||
('C', 3), # /main/do_stuff1
|
||||
('C', 1), # /main
|
||||
('C', 0), # /
|
||||
])
|
||||
self.assertEqual(profile_combined['events'][0]['at'], 0.0)
|
||||
self.assertEqual(profile_combined['events'][-1]['at'], 8.35)
|
||||
|
||||
def test_converts_profile_no_end(self):
|
||||
profile = self.example_profile()
|
||||
profile['result'].pop()
|
||||
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=False)
|
||||
res = sp.make()
|
||||
profile_combined = res['profiles'][0]
|
||||
events = [(e['type'], e['frame']) for e in profile_combined['events']]
|
||||
|
||||
self.assertEqual(events, [
|
||||
('O', 0), # /main
|
||||
('O', 1), # /main/do_stuff1
|
||||
('O', 2), # /main/do_stuff1/execute
|
||||
('C', 2), # /main/do_stuff1
|
||||
('O', 3), # /main/do_stuff1/check
|
||||
('C', 3), # /main/do_stuff1
|
||||
('C', 1), # /main
|
||||
('C', 0), # /
|
||||
])
|
||||
self.assertEqual(profile_combined['events'][-1]['at'], 8)
|
||||
|
||||
def test_converts_init_stack_trace(self):
|
||||
profile = self.example_profile()
|
||||
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=True)
|
||||
res = sp.make()
|
||||
|
||||
profile_combined = res['profiles'][0]
|
||||
events = [(e['type'], e['frame']) for e in profile_combined['events']]
|
||||
|
||||
self.assertEqual(events, [
|
||||
('O', 4), # /__main__/
|
||||
('O', 0), # /__main__/main
|
||||
('O', 1), # /__main__/main/do_stuff1
|
||||
('O', 2), # /__main__/main/do_stuff1/execute
|
||||
('C', 2), # /__main__/main/do_stuff1
|
||||
('O', 3), # /__main__/main/do_stuff1/check
|
||||
('C', 3), # /__main__/main/do_stuff1
|
||||
('C', 1), # /__main__/main
|
||||
('C', 0), # /__main__/
|
||||
('C', 4), # /
|
||||
])
|
||||
self.assertEqual(profile_combined['events'][-1]['at'], 8.35)
|
||||
|
||||
def test_end_priority(self):
|
||||
"""
|
||||
If a sample as a time (usually a query) we expect to keep the complete frame
|
||||
even if another concurent frame tics before the end of the current one:
|
||||
frame duration should always be more reliable.
|
||||
"""
|
||||
|
||||
async_profile = self.example_profile()['result']
|
||||
sql_profile = self.example_profile()['result']
|
||||
# make sql_profile a single frame from 2.5 to 5.5
|
||||
sql_profile = [sql_profile[1]]
|
||||
sql_profile[0]['start'] = 2.5
|
||||
sql_profile[0]['time'] = 3
|
||||
sql_profile[0]['query'] = 'SELECT 1'
|
||||
sql_profile[0]['full_query'] = 'SELECT 1'
|
||||
# some check to ensure the take makes sence
|
||||
self.assertEqual(async_profile[1]['start'], 3)
|
||||
self.assertEqual(async_profile[2]['start'], 4)
|
||||
|
||||
self.assertNotIn('query', async_profile[1]['stack'])
|
||||
self.assertNotIn('time', async_profile[1]['stack'])
|
||||
self.assertEqual(async_profile[1]['stack'], async_profile[2]['stack'])
|
||||
# this last assertion is not really useful but ensures that the samples
|
||||
# are consistent with the sql one, just missing tue query
|
||||
|
||||
sp = Speedscope(init_stack_trace=[])
|
||||
sp.add('sql', async_profile)
|
||||
sp.add('traces', sql_profile)
|
||||
sp.add_output(['sql', 'traces'], complete=False)
|
||||
res = sp.make()
|
||||
profile_combined = res['profiles'][0]
|
||||
events = [
|
||||
(e['at']+2, e['type'], res['shared']['frames'][e['frame']]['name'])
|
||||
for e in profile_combined['events']
|
||||
]
|
||||
self.assertEqual(events, [
|
||||
# pylint: disable=bad-continuation
|
||||
(2.0, 'O', 'main'),
|
||||
(2.0, 'O', 'do_stuff1'),
|
||||
(2.5, 'O', 'execute'),
|
||||
(2.5, 'O', "sql('SELECT 1')"),
|
||||
(5.5, 'C', "sql('SELECT 1')"), # select ends at 5.5 as expected despite another concurent frame at 3 and 4
|
||||
(5.5, 'C', 'execute'),
|
||||
(6.0, 'O', 'check'),
|
||||
(10.0, 'C', 'check'),
|
||||
(10.35, 'C', 'do_stuff1'),
|
||||
(10.35, 'C', 'main'),
|
||||
])
|
||||
|
||||
def test_converts_context(self):
|
||||
stack = [
|
||||
['file.py', 10, 'level1', 'level1'],
|
||||
['file.py', 11, 'level2', 'level2'],
|
||||
]
|
||||
profile = {
|
||||
'init_stack_trace': [['file.py', 1, 'level0', 'level0)']],
|
||||
'result': [{ # init frame
|
||||
'start': 2.0,
|
||||
'exec_context': ((2, {'a': '1'}), (3, {'b': '1'})),
|
||||
'stack': list(stack),
|
||||
}, {
|
||||
'start': 3.0,
|
||||
'exec_context': ((2, {'a': '1'}), (3, {'b': '2'})),
|
||||
'stack': list(stack),
|
||||
}, { # final frame
|
||||
'start': 10.35,
|
||||
'exec_context': (),
|
||||
'stack': None,
|
||||
}],
|
||||
}
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=True)
|
||||
res = sp.make()
|
||||
events = [
|
||||
(e['type'], res['shared']['frames'][e['frame']]['name'])
|
||||
for e in res['profiles'][0]['events']
|
||||
]
|
||||
self.assertEqual(events, [
|
||||
# pylint: disable=bad-continuation
|
||||
('O', 'level0'),
|
||||
('O', 'a=1'),
|
||||
('O', 'level1'),
|
||||
('O', 'b=1'),
|
||||
('O', 'level2'),
|
||||
('C', 'level2'),
|
||||
('C', 'b=1'),
|
||||
('O', 'b=2'),
|
||||
('O', 'level2'),
|
||||
('C', 'level2'),
|
||||
('C', 'b=2'),
|
||||
('C', 'level1'),
|
||||
('C', 'a=1'),
|
||||
('C', 'level0'),
|
||||
])
|
||||
|
||||
def test_converts_context_nested(self):
|
||||
stack = [
|
||||
['file.py', 10, 'level1', 'level1'],
|
||||
['file.py', 11, 'level2', 'level2'],
|
||||
]
|
||||
profile = {
|
||||
'init_stack_trace': [['file.py', 1, 'level0', 'level0)']],
|
||||
'result': [{ # init frame
|
||||
'start': 2.0,
|
||||
'exec_context': ((3, {'a': '1'}), (3, {'b': '1'})), # two contexts at the same level
|
||||
'stack': list(stack),
|
||||
}, { # final frame
|
||||
'start': 10.35,
|
||||
'exec_context': (),
|
||||
'stack': None,
|
||||
}],
|
||||
}
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=True)
|
||||
res = sp.make()
|
||||
events = [
|
||||
(e['type'], res['shared']['frames'][e['frame']]['name'])
|
||||
for e in res['profiles'][0]['events']
|
||||
]
|
||||
self.assertEqual(events, [
|
||||
# pylint: disable=bad-continuation
|
||||
('O', 'level0'),
|
||||
('O', 'level1'),
|
||||
('O', 'a=1'),
|
||||
('O', 'b=1'),
|
||||
('O', 'level2'),
|
||||
('C', 'level2'),
|
||||
('C', 'b=1'),
|
||||
('C', 'a=1'),
|
||||
('C', 'level1'),
|
||||
('C', 'level0'),
|
||||
])
|
||||
|
||||
def test_converts_context_lower(self):
|
||||
stack = [
|
||||
['file.py', 10, 'level4', 'level4'],
|
||||
['file.py', 11, 'level5', 'level5'],
|
||||
]
|
||||
profile = {
|
||||
'init_stack_trace': [
|
||||
['file.py', 1, 'level0', 'level0'],
|
||||
['file.py', 1, 'level1', 'level1'],
|
||||
['file.py', 1, 'level2', 'level2'],
|
||||
['file.py', 1, 'level3', 'level3'],
|
||||
],
|
||||
'result': [{ # init frame
|
||||
'start': 2.0,
|
||||
'exec_context': ((2, {'a': '1'}), (6, {'b': '1'})),
|
||||
'stack': list(stack),
|
||||
}, { # final frame
|
||||
'start': 10.35,
|
||||
'exec_context': (),
|
||||
'stack': None,
|
||||
}],
|
||||
}
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=False)
|
||||
res = sp.make()
|
||||
events = [
|
||||
(e['type'], res['shared']['frames'][e['frame']]['name'])
|
||||
for e in res['profiles'][0]['events']
|
||||
]
|
||||
self.assertEqual(events, [
|
||||
# pylint: disable=bad-continuation
|
||||
('O', 'level4'),
|
||||
('O', 'b=1'),
|
||||
('O', 'level5'),
|
||||
('C', 'level5'),
|
||||
('C', 'b=1'),
|
||||
('C', 'level4'),
|
||||
])
|
||||
|
||||
def test_converts_no_context(self):
|
||||
stack = [
|
||||
['file.py', 10, 'level4', 'level4'],
|
||||
['file.py', 11, 'level5', 'level5'],
|
||||
]
|
||||
profile = {
|
||||
'init_stack_trace': [
|
||||
['file.py', 1, 'level0', 'level0'],
|
||||
['file.py', 1, 'level1', 'level1'],
|
||||
['file.py', 1, 'level2', 'level2'],
|
||||
['file.py', 1, 'level3', 'level3'],
|
||||
],
|
||||
'result': [{ # init frame
|
||||
'start': 2.0,
|
||||
'exec_context': ((2, {'a': '1'}), (6, {'b': '1'})),
|
||||
'stack': list(stack),
|
||||
}, { # final frame
|
||||
'start': 10.35,
|
||||
'exec_context': (),
|
||||
'stack': None,
|
||||
}],
|
||||
}
|
||||
sp = Speedscope(init_stack_trace=profile['init_stack_trace'])
|
||||
sp.add('profile', profile['result'])
|
||||
sp.add_output(['profile'], complete=False, use_context=False)
|
||||
res = sp.make()
|
||||
events = [
|
||||
(e['type'], res['shared']['frames'][e['frame']]['name'])
|
||||
for e in res['profiles'][0]['events']
|
||||
]
|
||||
self.assertEqual(events, [
|
||||
# pylint: disable=bad-continuation
|
||||
('O', 'level4'),
|
||||
('O', 'level5'),
|
||||
('C', 'level5'),
|
||||
('C', 'level4'),
|
||||
])
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'profiling')
|
||||
class TestProfiling(TransactionCase):
|
||||
|
||||
def test_default_values(self):
|
||||
p = Profiler()
|
||||
self.assertEqual(p.db, self.env.cr.dbname)
|
||||
|
||||
def test_env_profiler_database(self):
|
||||
p = Profiler(collectors=[])
|
||||
self.assertEqual(p.db, self.env.cr.dbname)
|
||||
|
||||
def test_env_profiler_description(self):
|
||||
with Profiler(collectors=[], db=None) as p:
|
||||
self.assertIn('test_env_profiler_description', p.description)
|
||||
|
||||
def test_execution_context_save(self):
|
||||
with Profiler(db=None, collectors=['sql']) as p:
|
||||
for letter in ('a', 'b'):
|
||||
stack_level = profiler.stack_size()
|
||||
with ExecutionContext(letter=letter):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
entries = p.collectors[0].entries
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}),))
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'b'}),))
|
||||
|
||||
def test_execution_context_nested(self):
|
||||
"""
|
||||
This test checks that an execution can be nested at the same level of the stack.
|
||||
"""
|
||||
with Profiler(db=None, collectors=['sql']) as p:
|
||||
stack_level = profiler.stack_size()
|
||||
with ExecutionContext(letter='a'):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
with ExecutionContext(letter='b'):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
with ExecutionContext(letter='c'):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
self.env.cr.execute('SELECT 1')
|
||||
entries = p.collectors[0].entries
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}),))
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}), (stack_level, {'letter': 'b'})))
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}), (stack_level, {'letter': 'c'})))
|
||||
self.assertEqual(entries.pop(0)['exec_context'], ((stack_level, {'letter': 'a'}),))
|
||||
|
||||
def test_qweb_recorder(self):
|
||||
template = self.env['ir.ui.view'].create({
|
||||
'name': 'test',
|
||||
'type': 'qweb',
|
||||
'key': 'root',
|
||||
'arch_db': '''<t t-name="root">
|
||||
<t t-foreach="{'a': 3, 'b': 2, 'c': 1}" t-as="item">
|
||||
[<t t-out="item_index"/>: <t t-set="record" t-value="item"/><t t-call="base.dummy"/> <t t-out="item_value"/>]
|
||||
<b t-out="add_one_query()"/></t>
|
||||
</t>'''
|
||||
})
|
||||
child_template = self.env['ir.ui.view'].create({
|
||||
'name': 'test',
|
||||
'type': 'qweb',
|
||||
'key': 'dummy',
|
||||
'arch_db': '<t t-name="dummy"><span t-attf-class="myclass"><t t-out="record"/> <t t-out="add_one_query()"/></span></t>'
|
||||
})
|
||||
self.env.cr.execute("INSERT INTO ir_model_data(name, model, res_id, module)"
|
||||
"VALUES ('dummy', 'ir.ui.view', %s, 'base')", [child_template.id])
|
||||
|
||||
values = {'add_one_query': lambda: self.env.cr.execute('SELECT id FROM ir_ui_view LIMIT 1') or 'query'}
|
||||
result = u"""
|
||||
[0: <span class="myclass">a query</span> 3]
|
||||
<b>query</b>
|
||||
[1: <span class="myclass">b query</span> 2]
|
||||
<b>query</b>
|
||||
[2: <span class="myclass">c query</span> 1]
|
||||
<b>query</b>
|
||||
"""
|
||||
|
||||
# test rendering without profiling
|
||||
rendered = self.env['ir.qweb']._render(template.id, values)
|
||||
self.assertEqual(rendered.strip(), result.strip(), 'Without profiling')
|
||||
|
||||
# This rendering is used to cache the compiled template method so as
|
||||
# not to have a number of requests that vary according to the modules
|
||||
# installed.
|
||||
with Profiler(description='test', collectors=['qweb'], db=None):
|
||||
self.env['ir.qweb']._render(template.id, values)
|
||||
|
||||
with Profiler(description='test', collectors=['qweb'], db=None) as p:
|
||||
rendered = self.env['ir.qweb']._render(template.id, values)
|
||||
# check if qweb is ok
|
||||
self.assertEqual(rendered.strip(), result.strip())
|
||||
|
||||
# check if the arch of all used templates is includes in the result
|
||||
self.assertEqual(p.collectors[0].entries[0]['results']['archs'], {
|
||||
template.id: template.arch_db,
|
||||
child_template.id: child_template.arch_db,
|
||||
})
|
||||
|
||||
# check all directives without duration information
|
||||
for data in p.collectors[0].entries[0]['results']['data']:
|
||||
data.pop('delay')
|
||||
|
||||
data = p.collectors[0].entries[0]['results']['data']
|
||||
expected = [
|
||||
# pylint: disable=bad-whitespace
|
||||
# first template and first directive
|
||||
{'view_id': template.id, 'xpath': '/t/t', 'directive': """t-foreach="{'a': 3, 'b': 2, 'c': 1}" t-as='item'""", 'query': 0},
|
||||
# first pass in the loop
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0}, # 0 because the template is in ir.ui.view cache
|
||||
# first pass in the loop: content of the child template
|
||||
{'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query':1},
|
||||
# second pass in the loop
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query':1},
|
||||
# third pass in the loop
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[1]', 'directive': "t-out='item_index'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[2]', 'directive': "t-set='record' t-value='item'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[3]', 'directive': "t-call='base.dummy'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span', 'directive': "t-attf-class='myclass'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[1]', 'directive': "t-out='record'", 'query': 0},
|
||||
{'view_id': child_template.id, 'xpath': '/t/span/t[2]', 'directive': "t-out='add_one_query()'", 'query': 1},
|
||||
{'view_id': template.id, 'xpath': '/t/t/t[4]', 'directive': "t-out='item_value'", 'query': 0},
|
||||
{'view_id': template.id, 'xpath': '/t/t/b', 'directive': "t-out='add_one_query()'", 'query':1},
|
||||
]
|
||||
self.assertEqual(data, expected)
|
||||
|
||||
def test_default_recorders(self):
|
||||
with Profiler(db=None) as p:
|
||||
queries_start = self.env.cr.sql_log_count
|
||||
for i in range(10):
|
||||
self.env['res.partner'].create({'name': 'snail%s' % i})
|
||||
self.env.flush_all()
|
||||
total_queries = self.env.cr.sql_log_count - queries_start
|
||||
|
||||
rq = next(r for r in p.collectors if r.name == "sql").entries
|
||||
self.assertEqual(p.init_stack_trace[-1][2], 'test_default_recorders')
|
||||
self.assertEqual(p.init_stack_trace[-1][0].split('/')[-1], 'test_profiler.py')
|
||||
|
||||
self.assertEqual(len(rq), total_queries)
|
||||
first_query = rq[0]
|
||||
self.assertEqual(first_query['stack'][0][2], 'create')
|
||||
#self.assertIn("self.env['res.partner'].create({", first_query['stack'][0][3])
|
||||
|
||||
self.assertGreater(first_query['time'], 0)
|
||||
self.assertEqual(first_query['stack'][-1][2], 'execute')
|
||||
self.assertEqual(first_query['stack'][-1][0].split('/')[-1], 'sql_db.py')
|
||||
|
||||
def test_profiler_return(self):
|
||||
# Enter test mode to avoid the profiler to commit the result
|
||||
self.registry.enter_test_mode(self.cr)
|
||||
self.addCleanup(self.registry.leave_test_mode)
|
||||
# Trick: patch db_connect() to make it return the registry with the current test cursor
|
||||
# See `ProfilingHttpCase`
|
||||
self.startClassPatcher(patch('odoo.sql_db.db_connect', return_value=self.registry))
|
||||
with self.profile(collectors=["sql"]) as p:
|
||||
self.env.cr.execute("SELECT 1")
|
||||
p.json() # check we can call it
|
||||
self.assertEqual(p.collectors[0].entries[0]['query'], 'SELECT 1')
|
||||
|
||||
|
||||
def deep_call(func, depth):
|
||||
""" Call the given function at the given call depth. """
|
||||
if depth > 0:
|
||||
deep_call(func, depth - 1)
|
||||
else:
|
||||
func()
|
||||
|
||||
|
||||
@tagged('-standard', 'profiling_performance')
|
||||
class TestPerformance(BaseCase):
|
||||
|
||||
def test_collector_max_frequency(self):
|
||||
"""
|
||||
Check the creation time of an entry
|
||||
"""
|
||||
collector = profiler.Collector()
|
||||
p = Profiler(collectors=[collector], db=None)
|
||||
|
||||
def collect():
|
||||
collector.add()
|
||||
|
||||
# collect on changing stack
|
||||
with p:
|
||||
start = time.time()
|
||||
while start + 1 > time.time():
|
||||
deep_call(collect, 20)
|
||||
|
||||
self.assertGreater(len(collector.entries), 20000) # ~40000
|
||||
|
||||
# collect on identical stack
|
||||
collector = profiler.Collector()
|
||||
p = Profiler(collectors=[collector], db=None)
|
||||
|
||||
def collect_1_s():
|
||||
start = time.time()
|
||||
while start + 1 > time.time():
|
||||
collector.add()
|
||||
|
||||
with p:
|
||||
deep_call(collect_1_s, 20)
|
||||
|
||||
self.assertGreater(len(collector.entries), 50000) # ~70000
|
||||
|
||||
def test_frequencies_1ms_sleep(self):
|
||||
"""
|
||||
Check the number of entries generated in 1s at 1kHz
|
||||
we need to artificially change the frame as often as possible to avoid
|
||||
triggering the memory optimisation skipping identical frames
|
||||
"""
|
||||
def sleep_1():
|
||||
time.sleep(0.0001)
|
||||
|
||||
def sleep_2():
|
||||
time.sleep(0.0001)
|
||||
|
||||
with Profiler(collectors=['traces_async'], db=None) as res:
|
||||
start = time.time()
|
||||
while start + 1 > time.time():
|
||||
sleep_1()
|
||||
sleep_2()
|
||||
|
||||
entry_count = len(res.collectors[0].entries)
|
||||
self.assertGreater(entry_count, 700) # ~920
|
||||
|
||||
def test_traces_async_memory_optimisation(self):
|
||||
"""
|
||||
Identical frames should be saved only once.
|
||||
We should only have a few entries on a 1 second sleep.
|
||||
"""
|
||||
with Profiler(collectors=['traces_async'], db=None) as res:
|
||||
time.sleep(1)
|
||||
entry_count = len(res.collectors[0].entries)
|
||||
self.assertLess(entry_count, 5) # ~3
|
||||
|
||||
|
||||
@tagged('-standard', 'profiling')
|
||||
class TestSyncRecorder(BaseCase):
|
||||
# this test was made non standard because it can break for strange reason because of additionnal _remove or signal_handler frame
|
||||
def test_sync_recorder(self):
|
||||
if sys.gettrace() is not None:
|
||||
self.skipTest(f'Cannot start SyncCollector, settrace already set: {sys.gettrace()}')
|
||||
|
||||
def a():
|
||||
b()
|
||||
c()
|
||||
|
||||
def b():
|
||||
pass
|
||||
|
||||
def c():
|
||||
d()
|
||||
d()
|
||||
|
||||
def d():
|
||||
pass
|
||||
|
||||
with Profiler(description='test', collectors=['traces_sync'], db=None) as p:
|
||||
a()
|
||||
|
||||
stacks = [r['stack'] for r in p.collectors[0].entries]
|
||||
|
||||
# map stack frames to their function name, and check
|
||||
stacks_methods = [[frame[2] for frame in stack] for stack in stacks]
|
||||
self.assertEqual(stacks_methods, [
|
||||
['a'],
|
||||
['a', 'b'],
|
||||
['a'],
|
||||
['a', 'c'],
|
||||
['a', 'c', 'd'],
|
||||
['a', 'c'],
|
||||
['a', 'c', 'd'],
|
||||
['a', 'c'],
|
||||
['a'],
|
||||
[],
|
||||
['__exit__'],
|
||||
['__exit__', 'stop'] # could be removed by cleaning two last frames, or removing last frames only contained in profiler.py
|
||||
])
|
||||
|
||||
# map stack frames to their line number, and check
|
||||
stacks_lines = [[frame[1] for frame in stack] for stack in stacks]
|
||||
self.assertEqual(stacks_lines[1][0] + 1, stacks_lines[3][0],
|
||||
"Call of b() in a() should be one line before call of c()")
|
||||
3238
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_qweb.py
Normal file
3238
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_qweb.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,88 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from odoo.addons.base.tests.common import DISABLED_MAIL_CONTEXT
|
||||
from odoo.tests import common
|
||||
|
||||
|
||||
class TestQwebFieldTime(common.TransactionCase):
|
||||
def value_to_html(self, value, options=None):
|
||||
options = options or {}
|
||||
return self.env['ir.qweb.field.time'].value_to_html(value, options)
|
||||
|
||||
def test_time_value_to_html(self):
|
||||
default_fmt = {'format': 'h:mm a'}
|
||||
self.assertEqual(
|
||||
self.value_to_html(0, default_fmt),
|
||||
"12:00 AM"
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.value_to_html(11.75, default_fmt),
|
||||
"11:45 AM"
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.value_to_html(12, default_fmt),
|
||||
"12:00 PM"
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.value_to_html(14.25, default_fmt),
|
||||
"2:15 PM"
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.value_to_html(15.1, {'format': 'HH:mm:SS'}),
|
||||
"15:06:00"
|
||||
)
|
||||
|
||||
# Only positive values can be used
|
||||
with self.assertRaises(ValueError):
|
||||
self.value_to_html(-6.5)
|
||||
|
||||
# Only values inferior to 24 can be used
|
||||
with self.assertRaises(ValueError):
|
||||
self.value_to_html(24)
|
||||
|
||||
|
||||
class TestQwebFieldInteger(common.TransactionCase):
|
||||
def value_to_html(self, value, options=None):
|
||||
options = options or {}
|
||||
return self.env['ir.qweb.field.integer'].value_to_html(value, options)
|
||||
|
||||
def test_integer_value_to_html(self):
|
||||
self.assertEqual(self.value_to_html(1000), "1,000")
|
||||
self.assertEqual(self.value_to_html(1000000, {'format_decimalized_number': True}), "1M")
|
||||
self.assertEqual(
|
||||
self.value_to_html(125125, {'format_decimalized_number': True, 'precision_digits': 3}),
|
||||
"125.125k"
|
||||
)
|
||||
|
||||
class TestQwebFieldContact(common.TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.env = cls.env(context=dict(cls.env.context, **DISABLED_MAIL_CONTEXT))
|
||||
cls.partner = cls.env['res.partner'].create({
|
||||
'name': 'Wood Corner',
|
||||
'email': 'wood.corner26@example.com',
|
||||
'phone': '(623)-853-7197',
|
||||
'website': 'http://www.wood-corner.com',
|
||||
})
|
||||
|
||||
def test_value_to_html_with_website_and_phone(self):
|
||||
Contact = self.env["ir.qweb.field.contact"]
|
||||
result = Contact.value_to_html(self.partner, {"fields": ["phone", "website"]})
|
||||
self.assertIn('itemprop="website"', result)
|
||||
self.assertIn(self.partner.website, result)
|
||||
self.assertIn('itemprop="telephone"', result)
|
||||
self.assertIn(self.partner.phone, result)
|
||||
self.assertNotIn('itemprop="email"', result)
|
||||
|
||||
def test_value_to_html_without_phone(self):
|
||||
Contact = self.env["ir.qweb.field.contact"]
|
||||
result = Contact.value_to_html(self.partner, {"fields": ["name", "website"]})
|
||||
self.assertIn('itemprop="website"', result)
|
||||
self.assertIn(self.partner.website, result)
|
||||
self.assertNotIn(self.partner.phone, result)
|
||||
self.assertIn('itemprop="telephone"', result, "Empty telephone itemprop should be added to prevent issue with iOS Safari")
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import logging
|
||||
|
||||
import odoo
|
||||
import odoo.tests
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@odoo.tests.tagged('post_install', '-at_install', 'post_install_l10n')
|
||||
class TestReports(odoo.tests.TransactionCase):
|
||||
def test_reports(self):
|
||||
invoice_domain = [('move_type', 'in', ('out_invoice', 'out_refund', 'out_receipt', 'in_invoice', 'in_refund', 'in_receipt'))]
|
||||
specific_model_domains = {
|
||||
'account.report_original_vendor_bill': [('move_type', 'in', ('in_invoice', 'in_receipt'))],
|
||||
'account.report_invoice_with_payments': invoice_domain,
|
||||
'account.report_invoice': invoice_domain,
|
||||
'l10n_th.report_commercial_invoice': invoice_domain,
|
||||
}
|
||||
Report = self.env['ir.actions.report']
|
||||
for report in Report.search([('report_type', 'like', 'qweb')]):
|
||||
report_model = 'report.%s' % report.report_name
|
||||
try:
|
||||
self.env[report_model]
|
||||
except KeyError:
|
||||
# Only test the generic reports here
|
||||
_logger.info("testing report %s", report.report_name)
|
||||
report_model_domain = specific_model_domains.get(report.report_name, [])
|
||||
report_records = self.env[report.model].search(report_model_domain, limit=10)
|
||||
if not report_records:
|
||||
_logger.info("no record found skipping report %s", report.report_name)
|
||||
|
||||
# Test report generation
|
||||
if not report.multi:
|
||||
for record in report_records:
|
||||
Report._render_qweb_html(report.id, record.ids)
|
||||
else:
|
||||
Report._render_qweb_html(report.id, report_records.ids)
|
||||
else:
|
||||
continue
|
||||
|
||||
|
||||
@odoo.tests.tagged('post_install', '-at_install')
|
||||
class TestAggregatePdfReports(odoo.tests.HttpCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.partners = cls.env["res.partner"].create([{
|
||||
"name": "Rodion Romanovich Raskolnikov"
|
||||
}, {
|
||||
"name": "Dmitri Prokofich Razumikhin"
|
||||
}, {
|
||||
"name": "Porfiry Petrovich"
|
||||
}])
|
||||
|
||||
cls.env["ir.actions.report"].create({
|
||||
"name": "test report",
|
||||
"report_name": "base.test_report",
|
||||
"model": "res.partner",
|
||||
})
|
||||
|
||||
def test_aggregate_report_with_some_resources_reloaded_from_attachment(self):
|
||||
"""
|
||||
Test for opw-3827700, which caused reports generated for multiple records to fail if there was a record in
|
||||
the middle that had an attachment, and 'Reload from attachment' was enabled for the report. The misbehavior was
|
||||
caused by an indexing issue.
|
||||
"""
|
||||
self.env["ir.ui.view"].create({
|
||||
"type": "qweb",
|
||||
"name": "base.test_report",
|
||||
"key": "base.test_report",
|
||||
"arch": """
|
||||
<main>
|
||||
<div t-foreach="docs" t-as="user">
|
||||
<div class="article" data-oe-model="res.partner" t-att-data-oe-id="user.id">
|
||||
<span t-esc="user.display_name"/>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
"""
|
||||
})
|
||||
self.assert_report_creation("base.test_report", self.partners, self.partners[1])
|
||||
|
||||
def test_aggregate_report_with_some_resources_reloaded_from_attachment_with_multiple_page_report(self):
|
||||
"""
|
||||
Same as @test_report_with_some_resources_reloaded_from_attachment, but tests the behavior for reports that
|
||||
span multiple pages per record.
|
||||
"""
|
||||
self.env["ir.ui.view"].create({
|
||||
"type": "qweb",
|
||||
"name": "base.test_report",
|
||||
"key": "base.test_report",
|
||||
"arch": """
|
||||
<main>
|
||||
<div t-foreach="docs" t-as="user">
|
||||
<div class="article" data-oe-model="res.partner" t-att-data-oe-id="user.id" >
|
||||
<!-- This headline helps report generation to split pdfs per record after it generates
|
||||
the report in bulk by creating an outline. -->
|
||||
<h1>Name</h1>
|
||||
<!-- Make this a multipage report. -->
|
||||
<div t-foreach="range(100)" t-as="i">
|
||||
<span t-esc="i"/> - <span t-esc="user.display_name"/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
"""
|
||||
})
|
||||
self.assert_report_creation("base.test_report", self.partners, self.partners[1])
|
||||
|
||||
def assert_report_creation(self, report_ref, records, record_to_report):
|
||||
self.assertIn(record_to_report, records, "Record to report must be in records list")
|
||||
|
||||
reports = self.env['ir.actions.report'].with_context(force_report_rendering=True)
|
||||
|
||||
# Make sure attachments are created.
|
||||
report = reports._get_report(report_ref)
|
||||
if not report.attachment:
|
||||
report.attachment = "object.name + '.pdf'"
|
||||
report.attachment_use = True
|
||||
|
||||
# Generate report for chosen record to create an attachment.
|
||||
record_report, content_type = reports._render_qweb_pdf(report_ref, res_ids=record_to_report.id)
|
||||
self.assertEqual(content_type, "pdf", "Report is not a PDF")
|
||||
self.assertTrue(record_report, "PDF not generated")
|
||||
|
||||
# Make sure the attachment is created.
|
||||
report = reports._get_report(report_ref)
|
||||
self.assertTrue(report.retrieve_attachment(record_to_report), "Attachment not generated")
|
||||
|
||||
aggregate_report_content, content_type = reports._render_qweb_pdf(report_ref, res_ids=records.ids)
|
||||
self.assertEqual(content_type, "pdf", "Report is not a PDF")
|
||||
self.assertTrue(aggregate_report_content, "PDF not generated")
|
||||
for record in records:
|
||||
self.assertTrue(report.retrieve_attachment(record), "Attachment not generated")
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestCompany(TransactionCase):
|
||||
|
||||
def test_check_active(self):
|
||||
"""Tests the ability to archive a company whether or not it still has active users.
|
||||
Tests an archived user in an archived company cannot be unarchived
|
||||
without changing its company to an active company."""
|
||||
company = self.env['res.company'].create({'name': 'foo'})
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'foo',
|
||||
'login': 'foo',
|
||||
'company_id': company.id,
|
||||
'company_ids': company.ids,
|
||||
})
|
||||
|
||||
# The company cannot be archived because it still has active users
|
||||
with self.assertRaisesRegex(ValidationError, 'The company foo cannot be archived'):
|
||||
company.action_archive()
|
||||
|
||||
# The company can be archived because it has no active users
|
||||
user.action_archive()
|
||||
company.action_archive()
|
||||
|
||||
# The user cannot be unarchived because it's default company is archived
|
||||
with self.assertRaisesRegex(ValidationError, 'Company foo is not in the allowed companies'):
|
||||
user.action_unarchive()
|
||||
|
||||
# The user can be unarchived once we set another, active, company
|
||||
main_company = self.env.ref('base.main_company')
|
||||
user.write({
|
||||
'company_id': main_company.id,
|
||||
'company_ids': main_company.ids,
|
||||
})
|
||||
user.action_unarchive()
|
||||
|
|
@ -0,0 +1,266 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from collections import defaultdict
|
||||
from lxml import etree
|
||||
import logging
|
||||
|
||||
from odoo import exceptions, Command
|
||||
from odoo.tests.common import Form, TransactionCase, tagged
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestResConfig(TransactionCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestResConfig, self).setUp()
|
||||
self.ResConfig = self.env['res.config.settings']
|
||||
|
||||
# Define the test values
|
||||
self.menu_xml_id = 'base.menu_action_res_users'
|
||||
self.full_field_name = 'res.partner.lang'
|
||||
self.error_msg = "WarningRedirect test string: %(field:res.partner.lang)s - %(menu:base.menu_action_res_users)s."
|
||||
self.error_msg_wo_menu = "WarningRedirect test string: %(field:res.partner.lang)s."
|
||||
# Note: see the get_config_warning() doc for a better example
|
||||
|
||||
# Fetch the expected values
|
||||
menu = self.env.ref(self.menu_xml_id)
|
||||
|
||||
model_name, field_name = self.full_field_name.rsplit('.', 1)
|
||||
|
||||
self.expected_path = menu.complete_name
|
||||
self.expected_action_id = menu.action.id
|
||||
self.expected_name = self.env[model_name].fields_get([field_name])[field_name]['string']
|
||||
self.expected_final_error_msg = self.error_msg % {
|
||||
'field:res.partner.lang': self.expected_name,
|
||||
'menu:base.menu_action_res_users': self.expected_path
|
||||
}
|
||||
self.expected_final_error_msg_wo_menu = self.error_msg_wo_menu % {
|
||||
'field:res.partner.lang': self.expected_name,
|
||||
}
|
||||
|
||||
def test_00_get_option_path(self):
|
||||
""" The get_option_path() method should return a tuple containing a string and an integer """
|
||||
res = self.ResConfig.get_option_path(self.menu_xml_id)
|
||||
|
||||
# Check types
|
||||
self.assertIsInstance(res, tuple)
|
||||
self.assertEqual(len(res), 2, "The result should contain 2 elements")
|
||||
self.assertIsInstance(res[0], str)
|
||||
self.assertIsInstance(res[1], int)
|
||||
|
||||
# Check returned values
|
||||
self.assertEqual(res[0], self.expected_path)
|
||||
self.assertEqual(res[1], self.expected_action_id)
|
||||
|
||||
def test_10_get_option_name(self):
|
||||
""" The get_option_name() method should return a string """
|
||||
res = self.ResConfig.get_option_name(self.full_field_name)
|
||||
|
||||
# Check type
|
||||
self.assertIsInstance(res, str)
|
||||
|
||||
# Check returned value
|
||||
self.assertEqual(res, self.expected_name)
|
||||
|
||||
def test_20_get_config_warning(self):
|
||||
""" The get_config_warning() method should return a RedirectWarning """
|
||||
res = self.ResConfig.get_config_warning(self.error_msg)
|
||||
|
||||
# Check type
|
||||
self.assertIsInstance(res, exceptions.RedirectWarning)
|
||||
|
||||
# Check returned value
|
||||
self.assertEqual(res.args[0], self.expected_final_error_msg)
|
||||
self.assertEqual(res.args[1], self.expected_action_id)
|
||||
|
||||
def test_30_get_config_warning_wo_menu(self):
|
||||
""" The get_config_warning() method should return a Warning exception """
|
||||
res = self.ResConfig.get_config_warning(self.error_msg_wo_menu)
|
||||
|
||||
# Check type
|
||||
self.assertIsInstance(res, exceptions.UserError)
|
||||
|
||||
# Check returned value
|
||||
self.assertEqual(res.args[0], self.expected_final_error_msg_wo_menu)
|
||||
|
||||
def test_40_view_expected_architecture(self):
|
||||
"""Tests the res.config.settings form view architecture expected by the web client.
|
||||
The res.config.settings form view is handled with a custom widget expecting a very specific
|
||||
structure. This architecture is tested extensively in Javascript unit tests.
|
||||
Here we briefly ensure the view sent by the server to the web client has the right architecture,
|
||||
the right blocks with the right classes in the right order.
|
||||
This tests is to ensure the specification/requirements are listed and tested server side, and
|
||||
if a change occurs in future development, this test will need to be adapted to specify these changes."""
|
||||
view = self.env['ir.ui.view'].create({
|
||||
'name': 'foo',
|
||||
'type': 'form',
|
||||
'model': 'res.config.settings',
|
||||
'inherit_id': self.env.ref('base.res_config_settings_view_form').id,
|
||||
'arch': """
|
||||
<xpath expr="//div[hasclass('settings')]" position="inside">
|
||||
<t groups="base.group_system">
|
||||
<div class="app_settings_block" data-string="Foo" string="Foo" data-key="foo">
|
||||
<h2>Foo</h2>
|
||||
</div>
|
||||
</t>
|
||||
</xpath>
|
||||
""",
|
||||
})
|
||||
arch = self.env['res.config.settings'].get_view(view.id)['arch']
|
||||
tree = etree.fromstring(arch)
|
||||
self.assertTrue(tree.xpath("""
|
||||
//form[@class="oe_form_configuration o_base_settings"]
|
||||
/div[@class="o_setting_container"]
|
||||
/div[@class="settings"]
|
||||
/div[@class="app_settings_block"][@data-key="foo"]
|
||||
"""), 'The res.config.settings form view architecture is not what is expected by the web client.')
|
||||
|
||||
def test_50_view_expected_architecture_t_node_groups(self):
|
||||
"""Tests the behavior of the res.config.settings form view postprocessing when a block `app_settings_block`
|
||||
is wrapped in a `<t groups="...">`, which is used when you need to display an app settings section
|
||||
only for users part of two groups at the same time."""
|
||||
view = self.env['ir.ui.view'].create({
|
||||
'name': 'foo',
|
||||
'type': 'form',
|
||||
'model': 'res.config.settings',
|
||||
'inherit_id': self.env.ref('base.res_config_settings_view_form').id,
|
||||
'arch': """
|
||||
<xpath expr="//div[hasclass('settings')]" position="inside">
|
||||
<t groups="base.group_system">
|
||||
<div class="app_settings_block" data-string="Foo"
|
||||
string="Foo" data-key="foo" groups="base.group_no_one">
|
||||
<h2>Foo</h2>
|
||||
</div>
|
||||
</t>
|
||||
</xpath>
|
||||
""",
|
||||
})
|
||||
with self.debug_mode():
|
||||
arch = self.env['res.config.settings'].get_view(view.id)['arch']
|
||||
tree = etree.fromstring(arch)
|
||||
# The <t> must be removed from the structure
|
||||
self.assertFalse(tree.xpath('//t'), 'The `<t groups="...">` block must not remain in the view')
|
||||
self.assertTrue(tree.xpath("""
|
||||
//div[@class="settings"]
|
||||
/div[@class="app_settings_block"][@data-key="foo"]
|
||||
"""), 'The `class="app_settings_block"` block must be a direct child of the `class="settings"` block')
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install')
|
||||
class TestResConfigExecute(TransactionCase):
|
||||
|
||||
def test_01_execute_res_config(self):
|
||||
"""
|
||||
Try to create and execute all res_config models. Target settings that can't be
|
||||
loaded or saved and avoid remaining methods `get_default_foo` or `set_foo` that
|
||||
won't be executed is foo != `fields`
|
||||
"""
|
||||
all_config_settings = self.env['ir.model'].search([('name', 'like', 'config.settings')])
|
||||
for config_settings in all_config_settings:
|
||||
_logger.info("Testing %s" % (config_settings.name))
|
||||
self.env[config_settings.name].create({}).execute()
|
||||
|
||||
def test_settings_access(self):
|
||||
"""Check that settings user are able to open & save settings
|
||||
|
||||
Also check that user with settings rights + any one of the groups restricting
|
||||
a conditional view inheritance of res.config.settings view is also able to
|
||||
open & save the settings (considering the added conditional content)
|
||||
"""
|
||||
ResUsers = self.env['res.users']
|
||||
group_system = self.env.ref('base.group_system')
|
||||
self.settings_view = self.env.ref('base.res_config_settings_view_form')
|
||||
settings_only_user = ResUsers.create({
|
||||
'name': 'Sleepy Joe',
|
||||
'login': 'sleepy',
|
||||
'groups_id': [Command.link(group_system.id)],
|
||||
})
|
||||
|
||||
# If not enabled (like in demo data), landing on res.config will try
|
||||
# to disable module_sale_quotation_builder and raise an issue
|
||||
group_order_template = self.env.ref('sale_management.group_sale_order_template', raise_if_not_found=False)
|
||||
if group_order_template:
|
||||
self.env.ref('base.group_user').write({"implied_ids": [(4, group_order_template.id)]})
|
||||
|
||||
_logger.info("Testing settings access for group %s", group_system.full_name)
|
||||
forbidden_models = self._test_user_settings_fields_access(settings_only_user)
|
||||
self._test_user_settings_view_save(settings_only_user)
|
||||
|
||||
for model in forbidden_models:
|
||||
_logger.warning("Settings user doesn\'t have read access to the model %s", model)
|
||||
|
||||
settings_view_conditional_groups = self.env['ir.ui.view'].search([
|
||||
('model', '=', 'res.config.settings'),
|
||||
]).groups_id
|
||||
|
||||
# Semi hack to recover part of the coverage lost when the groups_id
|
||||
# were moved from the views records to the view nodes (with groups attributes)
|
||||
groups_data = self.env['res.groups'].get_groups_by_application()
|
||||
for group_data in groups_data:
|
||||
if group_data[1] == 'selection' and group_data[3] != (100, 'Other'):
|
||||
manager_group = group_data[2][-1]
|
||||
settings_view_conditional_groups += manager_group
|
||||
settings_view_conditional_groups -= group_system # Already tested above
|
||||
|
||||
for group in settings_view_conditional_groups:
|
||||
group_name = group.full_name
|
||||
_logger.info("Testing settings access for group %s", group_name)
|
||||
create_values = {
|
||||
'name': f'Test {group_name}',
|
||||
'login': group_name,
|
||||
'groups_id': [Command.link(group_system.id), Command.link(group.id)]
|
||||
}
|
||||
user = ResUsers.create(create_values)
|
||||
self._test_user_settings_view_save(user)
|
||||
forbidden_models_fields = self._test_user_settings_fields_access(user)
|
||||
|
||||
for model, fields in forbidden_models_fields.items():
|
||||
_logger.warning(
|
||||
"Settings + %s user doesn\'t have read access to the model %s"
|
||||
"linked to settings records by the field(s) %s",
|
||||
group_name, model, ", ".join(str(field) for field in fields)
|
||||
)
|
||||
|
||||
def _test_user_settings_fields_access(self, user):
|
||||
"""Verify that settings user are able to create & save settings."""
|
||||
settings = self.env['res.config.settings'].with_user(user).create({})
|
||||
|
||||
# Save the settings
|
||||
settings.set_values()
|
||||
|
||||
# Check user has access to all models of relational fields in view
|
||||
# because the webclient makes a name_get request for all specified records
|
||||
# even if they are not shown to the user.
|
||||
settings_view_arch = etree.fromstring(settings.get_view(view_id=self.settings_view.id)['arch'])
|
||||
seen_fields = set()
|
||||
for node in settings_view_arch.iterdescendants(tag='field'):
|
||||
fname = node.get('name')
|
||||
if fname not in settings._fields:
|
||||
# fname isn't a settings fields, but the field of a model
|
||||
# linked to settings through a relational field
|
||||
continue
|
||||
seen_fields.add(fname)
|
||||
|
||||
models_to_check = defaultdict(set)
|
||||
for field_name in seen_fields:
|
||||
field = settings._fields[field_name]
|
||||
if field.relational:
|
||||
models_to_check[field.comodel_name].add(field)
|
||||
|
||||
forbidden_models_fields = defaultdict(set)
|
||||
for model in models_to_check:
|
||||
has_read_access = self.env[model].with_user(user).check_access_rights(
|
||||
'read', raise_exception=False)
|
||||
if not has_read_access:
|
||||
forbidden_models_fields[model] = models_to_check[model]
|
||||
|
||||
return forbidden_models_fields
|
||||
|
||||
def _test_user_settings_view_save(self, user):
|
||||
"""Verify that settings user are able to save the settings form."""
|
||||
ResConfigSettings = self.env['res.config.settings'].with_user(user)
|
||||
|
||||
settings_form = Form(ResConfigSettings)
|
||||
settings_form.save()
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
from odoo.tests import TransactionCase, tagged
|
||||
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class TestResCountryState(TransactionCase):
|
||||
def test_find_by_name(self):
|
||||
"""It should be possible to find a state by its display name
|
||||
"""
|
||||
glorious_arstotzka = self.env['res.country'].create({
|
||||
'name': 'Arstotzka',
|
||||
'code': 'AA',
|
||||
})
|
||||
altan = self.env['res.country.state'].create({
|
||||
'country_id': glorious_arstotzka.id,
|
||||
'code': 'AL',
|
||||
'name': 'Altan',
|
||||
})
|
||||
|
||||
for name in [
|
||||
altan.name,
|
||||
altan.display_name,
|
||||
'Altan(AA)',
|
||||
'Altan ( AA )',
|
||||
'Altan (Arstotzka)',
|
||||
'Altan (Arst)', # dubious
|
||||
]:
|
||||
with self.subTest(name):
|
||||
self.assertEqual(
|
||||
self.env['res.country.state'].name_search(name, operator='='),
|
||||
[(altan.id, altan.display_name)]
|
||||
)
|
||||
|
||||
# imitates basque provinces
|
||||
vescillo = self.env['res.country.state'].create({
|
||||
'country_id': glorious_arstotzka.id,
|
||||
'code': 'VE',
|
||||
'name': "Vescillo (Vesilo)",
|
||||
})
|
||||
for name in [
|
||||
vescillo.name,
|
||||
vescillo.display_name,
|
||||
"vescillo",
|
||||
"vesilo",
|
||||
"vescillo (AA)",
|
||||
"vesilo (AA)",
|
||||
"vesilo (Arstotzka)",
|
||||
]:
|
||||
with self.subTest(name):
|
||||
# note operator for more flexible state name matching
|
||||
self.assertEqual(
|
||||
self.env['res.country.state'].name_search(name, operator='ilike'),
|
||||
[(vescillo.id, vescillo.display_name)]
|
||||
)
|
||||
|
||||
# search in state list
|
||||
for name in [
|
||||
[altan.name],
|
||||
[altan.display_name],
|
||||
['Altan(AA)'],
|
||||
['Altan ( AA )'],
|
||||
['Altan (Arstotzka)'],
|
||||
['Altan (Arst)'],
|
||||
]:
|
||||
with self.subTest(name):
|
||||
self.assertEqual(
|
||||
self.env['res.country.state'].name_search(name, operator='in'),
|
||||
[(altan.id, altan.display_name)]
|
||||
)
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from lxml import etree
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class TestResConfig(TransactionCase):
|
||||
def test_view_company_rate_label(self):
|
||||
"""Tests the label of the company_rate and inverse_company_rate fields
|
||||
are well set according to the company currency in the currency form view and the currency rate list view.
|
||||
e.g. in the currency rate list view of a company using EUR, the company_rate label must be `Unit per EUR`"""
|
||||
company_foo, company_bar = self.env['res.company'].create([
|
||||
{'name': 'foo', 'currency_id': self.env.ref('base.EUR').id},
|
||||
{'name': 'bar', 'currency_id': self.env.ref('base.USD').id},
|
||||
])
|
||||
for company, expected_currency in [(company_foo, 'EUR'), (company_bar, 'USD')]:
|
||||
for model, view_type in [('res.currency', 'form'), ('res.currency.rate', 'tree')]:
|
||||
arch = self.env[model].with_company(company).get_view(view_type=view_type)['arch']
|
||||
tree = etree.fromstring(arch)
|
||||
node_company_rate = tree.xpath('//field[@name="company_rate"]')[0]
|
||||
node_inverse_company_rate = tree.xpath('//field[@name="inverse_company_rate"]')[0]
|
||||
self.assertEqual(node_company_rate.get('string'), f'Unit per {expected_currency}')
|
||||
self.assertEqual(node_inverse_company_rate.get('string'), f'{expected_currency} per Unit')
|
||||
|
||||
def test_res_currency_name_search(self):
|
||||
currency_A, currency_B = self.env["res.currency"].create([
|
||||
{"name": "cuA", "symbol": "A"},
|
||||
{"name": "cuB", "symbol": "B"},
|
||||
])
|
||||
self.env["res.currency.rate"].create([
|
||||
{"name": "1971-01-01", "rate": 2.0, "currency_id": currency_A.id},
|
||||
{"name": "1971-01-01", "rate": 1.5, "currency_id": currency_B.id},
|
||||
{"name": "1972-01-01", "rate": 0.69, "currency_id": currency_B.id},
|
||||
])
|
||||
# should not try to match field 'rate' (float field)
|
||||
self.assertEqual(self.env["res.currency"].search_count([["rate_ids", "=", "1971-01-01"]]), 2)
|
||||
# should not try to match field 'name' (date field)
|
||||
self.assertEqual(self.env["res.currency"].search_count([["rate_ids", "=", "0.69"]]), 1)
|
||||
# should not try to match any of 'name' and 'rate'
|
||||
self.assertEqual(self.env["res.currency"].search_count([["rate_ids", "=", "irrelevant"]]), 0)
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
class test_res_lang(TransactionCase):
|
||||
|
||||
def test_00_intersperse(self):
|
||||
from odoo.addons.base.models.res_lang import intersperse
|
||||
|
||||
assert intersperse("", []) == ("", 0)
|
||||
assert intersperse("0", []) == ("0", 0)
|
||||
assert intersperse("012", []) == ("012", 0)
|
||||
assert intersperse("1", []) == ("1", 0)
|
||||
assert intersperse("12", []) == ("12", 0)
|
||||
assert intersperse("123", []) == ("123", 0)
|
||||
assert intersperse("1234", []) == ("1234", 0)
|
||||
assert intersperse("123456789", []) == ("123456789", 0)
|
||||
assert intersperse("&ab%#@1", []) == ("&ab%#@1", 0)
|
||||
|
||||
assert intersperse("0", []) == ("0", 0)
|
||||
assert intersperse("0", [1]) == ("0", 0)
|
||||
assert intersperse("0", [2]) == ("0", 0)
|
||||
assert intersperse("0", [200]) == ("0", 0)
|
||||
|
||||
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
|
||||
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
|
||||
assert intersperse("12345678", [2], '.') == ('123456.78', 1)
|
||||
assert intersperse("12345678", [2,1], '.') == ('12345.6.78', 2)
|
||||
assert intersperse("12345678", [2,0], '.') == ('12.34.56.78', 3)
|
||||
assert intersperse("12345678", [-1,2], '.') == ('12345678', 0)
|
||||
assert intersperse("12345678", [2,-1], '.') == ('123456.78', 1)
|
||||
assert intersperse("12345678", [2,0,1], '.') == ('12.34.56.78', 3)
|
||||
assert intersperse("12345678", [2,0,0], '.') == ('12.34.56.78', 3)
|
||||
assert intersperse("12345678", [2,0,-1], '.') == ('12.34.56.78', 3)
|
||||
assert intersperse("12345678", [3,3,3,3], '.') == ('12.345.678', 2)
|
||||
|
||||
assert intersperse("abc1234567xy", [2], '.') == ('abc1234567.xy', 1)
|
||||
assert intersperse("abc1234567xy8", [2], '.') == ('abc1234567x.y8', 1) # ... w.r.t. here.
|
||||
assert intersperse("abc12", [3], '.') == ('abc12', 0)
|
||||
assert intersperse("abc12", [2], '.') == ('abc12', 0)
|
||||
assert intersperse("abc12", [1], '.') == ('abc1.2', 1)
|
||||
|
||||
def test_inactive_users_lang_deactivation(self):
|
||||
# activate the language en_GB
|
||||
language = self.env['res.lang']._activate_lang('en_GB')
|
||||
|
||||
# assign it to an inactive (new) user
|
||||
user = self.env['res.users'].create({
|
||||
'name': 'Foo',
|
||||
'login': 'foo@example.com',
|
||||
'lang': 'en_GB',
|
||||
'active': False,
|
||||
})
|
||||
|
||||
# make sure it is only used by that user
|
||||
self.assertEqual(self.env['res.users'].with_context(active_test=False).search([('lang', '=', 'en_GB')]), user)
|
||||
|
||||
with self.assertRaises(UserError):
|
||||
language.active = False
|
||||
|
|
@ -0,0 +1,331 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.addons.base.models.ir_mail_server import extract_rfc2822_addresses
|
||||
from odoo.tests import Form
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.exceptions import AccessError, UserError
|
||||
from odoo.tests import tagged
|
||||
|
||||
|
||||
@tagged('res_partner')
|
||||
class TestPartner(TransactionCase):
|
||||
|
||||
def test_email_formatted(self):
|
||||
""" Test various combinations of name / email, notably to check result
|
||||
of email_formatted field. """
|
||||
# multi create
|
||||
new_partners = self.env['res.partner'].create([{
|
||||
'name': "Vlad the Impaler",
|
||||
'email': f'vlad.the.impaler.{idx:02d}@example.com',
|
||||
} for idx in range(2)])
|
||||
self.assertEqual(
|
||||
sorted(new_partners.mapped('email_formatted')),
|
||||
sorted([f'"Vlad the Impaler" <vlad.the.impaler.{idx:02d}@example.com>' for idx in range(2)]),
|
||||
'Email formatted should be "name" <email>'
|
||||
)
|
||||
|
||||
# test name_create with formatting / multi emails
|
||||
for source, (exp_name, exp_email, exp_email_formatted) in [
|
||||
(
|
||||
'Balázs <vlad.the.negociator@example.com>, vlad.the.impaler@example.com',
|
||||
("Balázs", "vlad.the.negociator@example.com", '"Balázs" <vlad.the.negociator@example.com>')
|
||||
),
|
||||
(
|
||||
'Balázs <vlad.the.impaler@example.com>',
|
||||
("Balázs", "vlad.the.impaler@example.com", '"Balázs" <vlad.the.impaler@example.com>')
|
||||
),
|
||||
]:
|
||||
with self.subTest(source=source):
|
||||
new_partner_id = self.env['res.partner'].name_create(source)[0]
|
||||
new_partner = self.env['res.partner'].browse(new_partner_id)
|
||||
self.assertEqual(new_partner.name, exp_name)
|
||||
self.assertEqual(new_partner.email, exp_email)
|
||||
self.assertEqual(
|
||||
new_partner.email_formatted, exp_email_formatted,
|
||||
'Name_create should take first found email'
|
||||
)
|
||||
|
||||
# check name updates and extract_rfc2822_addresses
|
||||
for source, exp_email_formatted, exp_addr in [
|
||||
(
|
||||
'Vlad the Impaler',
|
||||
'"Vlad the Impaler" <vlad.the.impaler@example.com>',
|
||||
['vlad.the.impaler@example.com']
|
||||
), (
|
||||
'Balázs', '"Balázs" <vlad.the.impaler@example.com>',
|
||||
['vlad.the.impaler@example.com']
|
||||
),
|
||||
# check with '@' in name
|
||||
(
|
||||
'Bike@Home', '"Bike@Home" <vlad.the.impaler@example.com>',
|
||||
['Bike@Home', 'vlad.the.impaler@example.com']
|
||||
), (
|
||||
'Bike @ Home@Home', '"Bike @ Home@Home" <vlad.the.impaler@example.com>',
|
||||
['Home@Home', 'vlad.the.impaler@example.com']
|
||||
), (
|
||||
'Balázs <email.in.name@example.com>',
|
||||
'"Balázs <email.in.name@example.com>" <vlad.the.impaler@example.com>',
|
||||
['email.in.name@example.com', 'vlad.the.impaler@example.com']
|
||||
),
|
||||
]:
|
||||
with self.subTest(source=source):
|
||||
new_partner.write({'name': source})
|
||||
self.assertEqual(new_partner.email_formatted, exp_email_formatted)
|
||||
self.assertEqual(extract_rfc2822_addresses(new_partner.email_formatted), exp_addr)
|
||||
|
||||
# check email updates
|
||||
new_partner.write({'name': 'Balázs'})
|
||||
for source, exp_email_formatted in [
|
||||
# encapsulated email
|
||||
(
|
||||
"Vlad the Impaler <vlad.the.impaler@example.com>",
|
||||
'"Balázs" <vlad.the.impaler@example.com>'
|
||||
), (
|
||||
'"Balázs" <balazs@adam.hu>',
|
||||
'"Balázs" <balazs@adam.hu>'
|
||||
),
|
||||
# multi email
|
||||
(
|
||||
"vlad.the.impaler@example.com, vlad.the.dragon@example.com",
|
||||
'"Balázs" <vlad.the.impaler@example.com,vlad.the.dragon@example.com>'
|
||||
), (
|
||||
"vlad.the.impaler.com, vlad.the.dragon@example.com",
|
||||
'"Balázs" <vlad.the.dragon@example.com>'
|
||||
), (
|
||||
'vlad.the.impaler.com, "Vlad the Dragon" <vlad.the.dragon@example.com>',
|
||||
'"Balázs" <vlad.the.dragon@example.com>'
|
||||
),
|
||||
# falsy emails
|
||||
(False, False),
|
||||
('', False),
|
||||
(' ', '"Balázs" <@ >'),
|
||||
('notanemail', '"Balázs" <@notanemail>'),
|
||||
]:
|
||||
with self.subTest(source=source):
|
||||
new_partner.write({'email': source})
|
||||
self.assertEqual(new_partner.email_formatted, exp_email_formatted)
|
||||
|
||||
def test_name_search(self):
|
||||
""" Check name_search on partner, especially with domain based on auto_join
|
||||
user_ids field. Check specific SQL of name_search correctly handle joined tables. """
|
||||
test_partner = self.env['res.partner'].create({'name': 'Vlad the Impaler'})
|
||||
test_user = self.env['res.users'].create({'name': 'Vlad the Impaler', 'login': 'vlad', 'email': 'vlad.the.impaler@example.com'})
|
||||
|
||||
ns_res = self.env['res.partner'].name_search('Vlad', operator='ilike')
|
||||
self.assertEqual(set(i[0] for i in ns_res), set((test_partner | test_user.partner_id).ids))
|
||||
|
||||
ns_res = self.env['res.partner'].name_search('Vlad', args=[('user_ids.email', 'ilike', 'vlad')])
|
||||
self.assertEqual(set(i[0] for i in ns_res), set(test_user.partner_id.ids))
|
||||
|
||||
# Check a partner may be searched when current user has no access but sudo is used
|
||||
public_user = self.env.ref('base.public_user')
|
||||
with self.assertRaises(AccessError):
|
||||
test_partner.with_user(public_user).check_access_rule('read')
|
||||
ns_res = self.env['res.partner'].with_user(public_user).sudo().name_search('Vlad', args=[('user_ids.email', 'ilike', 'vlad')])
|
||||
self.assertEqual(set(i[0] for i in ns_res), set(test_user.partner_id.ids))
|
||||
|
||||
def test_name_get(self):
|
||||
""" Check name_get on partner, especially with different context
|
||||
Check name_get correctly return name with context. """
|
||||
test_partner_jetha = self.env['res.partner'].create({'name': 'Jethala', 'street': 'Powder gali', 'street2': 'Gokuldham Society'})
|
||||
test_partner_bhide = self.env['res.partner'].create({'name': 'Atmaram Bhide'})
|
||||
|
||||
res_jetha = test_partner_jetha.with_context(show_address=1).name_get()
|
||||
self.assertEqual(res_jetha[0][1], "Jethala\nPowder gali\nGokuldham Society", "name should contain comma separated name and address")
|
||||
res_bhide = test_partner_bhide.with_context(show_address=1).name_get()
|
||||
self.assertEqual(res_bhide[0][1], "Atmaram Bhide", "name should contain only name if address is not available, without extra commas")
|
||||
|
||||
res_jetha = test_partner_jetha.with_context(show_address=1, address_inline=1).name_get()
|
||||
self.assertEqual(res_jetha[0][1], "Jethala, Powder gali, Gokuldham Society", "name should contain comma separated name and address")
|
||||
res_bhide = test_partner_bhide.with_context(show_address=1, address_inline=1).name_get()
|
||||
self.assertEqual(res_bhide[0][1], "Atmaram Bhide", "name should contain only name if address is not available, without extra commas")
|
||||
|
||||
def test_company_change_propagation(self):
|
||||
""" Check propagation of company_id across children """
|
||||
User = self.env['res.users']
|
||||
Partner = self.env['res.partner']
|
||||
Company = self.env['res.company']
|
||||
|
||||
company_1 = Company.create({'name': 'company_1'})
|
||||
company_2 = Company.create({'name': 'company_2'})
|
||||
|
||||
test_partner_company = Partner.create({'name': 'This company'})
|
||||
test_user = User.create({'name': 'This user', 'login': 'thisu', 'email': 'this.user@example.com', 'company_id': company_1.id, 'company_ids': [company_1.id]})
|
||||
test_user.partner_id.write({'parent_id': test_partner_company.id})
|
||||
|
||||
test_partner_company.write({'company_id': company_1.id})
|
||||
self.assertEqual(test_user.partner_id.company_id.id, company_1.id, "The new company_id of the partner company should be propagated to its children")
|
||||
|
||||
test_partner_company.write({'company_id': False})
|
||||
self.assertFalse(test_user.partner_id.company_id.id, "If the company_id is deleted from the partner company, it should be propagated to its children")
|
||||
|
||||
with self.assertRaises(UserError, msg="You should not be able to update the company_id of the partner company if the linked user of a child partner is not an allowed to be assigned to that company"), self.cr.savepoint():
|
||||
test_partner_company.write({'company_id': company_2.id})
|
||||
|
||||
def test_commercial_field_sync(self):
|
||||
"""Check if commercial fields are synced properly: testing with VAT field"""
|
||||
Partner = self.env['res.partner']
|
||||
company_1 = Partner.create({'name': 'company 1', 'is_company': True, 'vat': 'BE0123456789'})
|
||||
company_2 = Partner.create({'name': 'company 2', 'is_company': True, 'vat': 'BE9876543210'})
|
||||
|
||||
partner = Partner.create({'name': 'someone', 'is_company': False, 'parent_id': company_1.id})
|
||||
Partner.flush_recordset()
|
||||
self.assertEqual(partner.vat, company_1.vat, "VAT should be inherited from the company 1")
|
||||
|
||||
# create a delivery address for the partner
|
||||
delivery = Partner.create({'name': 'somewhere', 'type': 'delivery', 'parent_id': partner.id})
|
||||
self.assertEqual(delivery.commercial_partner_id.id, company_1.id, "Commercial partner should be recomputed")
|
||||
self.assertEqual(delivery.vat, company_1.vat, "VAT should be inherited from the company 1")
|
||||
|
||||
# move the partner to another company
|
||||
partner.write({'parent_id': company_2.id})
|
||||
partner.flush_recordset()
|
||||
self.assertEqual(partner.commercial_partner_id.id, company_2.id, "Commercial partner should be recomputed")
|
||||
self.assertEqual(partner.vat, company_2.vat, "VAT should be inherited from the company 2")
|
||||
self.assertEqual(delivery.commercial_partner_id.id, company_2.id, "Commercial partner should be recomputed on delivery")
|
||||
self.assertEqual(delivery.vat, company_2.vat, "VAT should be inherited from the company 2 to delivery")
|
||||
|
||||
def test_lang_computation_code(self):
|
||||
""" Check computation of lang: coming from installed languages, forced
|
||||
default value and propagation from parent."""
|
||||
default_lang_info = self.env['res.lang'].get_installed()[0]
|
||||
default_lang_code = default_lang_info[0]
|
||||
self.assertNotEqual(default_lang_code, 'de_DE') # should not be the case, just to ease test
|
||||
self.assertNotEqual(default_lang_code, 'fr_FR') # should not be the case, just to ease test
|
||||
|
||||
# default is installed lang
|
||||
partner = self.env['res.partner'].create({'name': "Test Company"})
|
||||
self.assertEqual(partner.lang, default_lang_code)
|
||||
|
||||
# check propagation of parent to child
|
||||
child = self.env['res.partner'].create({'name': 'First Child', 'parent_id': partner.id})
|
||||
self.assertEqual(child.lang, default_lang_code)
|
||||
|
||||
# activate another languages to test language propagation when being in multi-lang
|
||||
self.env['res.lang']._activate_lang('de_DE')
|
||||
self.env['res.lang']._activate_lang('fr_FR')
|
||||
|
||||
# default from context > default from installed
|
||||
partner = self.env['res.partner'].with_context(default_lang='de_DE').create({'name': "Test Company"})
|
||||
self.assertEqual(partner.lang, 'de_DE')
|
||||
first_child = self.env['res.partner'].create({'name': 'First Child', 'parent_id': partner.id})
|
||||
partner.write({'lang': 'fr_FR'})
|
||||
second_child = self.env['res.partner'].create({'name': 'Second Child', 'parent_id': partner.id})
|
||||
|
||||
# check user input is kept
|
||||
self.assertEqual(partner.lang, 'fr_FR')
|
||||
self.assertEqual(first_child.lang, 'de_DE')
|
||||
self.assertEqual(second_child.lang, 'fr_FR')
|
||||
|
||||
def test_lang_computation_form_view(self):
|
||||
""" Check computation of lang: coming from installed languages, forced
|
||||
default value and propagation from parent."""
|
||||
default_lang_info = self.env['res.lang'].get_installed()[0]
|
||||
default_lang_code = default_lang_info[0]
|
||||
self.assertNotEqual(default_lang_code, 'de_DE') # should not be the case, just to ease test
|
||||
self.assertNotEqual(default_lang_code, 'fr_FR') # should not be the case, just to ease test
|
||||
|
||||
# default is installed lang
|
||||
partner_form = Form(self.env['res.partner'], 'base.view_partner_form')
|
||||
partner_form.name = "Test Company"
|
||||
self.assertEqual(partner_form.lang, default_lang_code, "New partner's lang should be default one")
|
||||
partner = partner_form.save()
|
||||
self.assertEqual(partner.lang, default_lang_code)
|
||||
|
||||
# check propagation of parent to child
|
||||
with partner_form.child_ids.new() as child:
|
||||
child.name = "First Child"
|
||||
self.assertEqual(child.lang, default_lang_code, "Child contact's lang should have the same as its parent")
|
||||
partner = partner_form.save()
|
||||
self.assertEqual(partner.child_ids.lang, default_lang_code)
|
||||
|
||||
# activate another languages to test language propagation when being in multi-lang
|
||||
self.env['res.lang']._activate_lang('de_DE')
|
||||
self.env['res.lang']._activate_lang('fr_FR')
|
||||
|
||||
# default from context > default from installed
|
||||
partner_form = Form(
|
||||
self.env['res.partner'].with_context(default_lang='de_DE'),
|
||||
'base.view_partner_form'
|
||||
)
|
||||
# <field name="is_company" invisible="1"/>
|
||||
# <field name="company_type" widget="radio" options="{'horizontal': true}"/>
|
||||
# @api.onchange('company_type')
|
||||
# def onchange_company_type(self):
|
||||
# self.is_company = (self.company_type == 'company')
|
||||
partner_form.company_type = 'company'
|
||||
partner_form.name = "Test Company"
|
||||
self.assertEqual(partner_form.lang, 'de_DE', "New partner's lang should take default from context")
|
||||
with partner_form.child_ids.new() as child:
|
||||
child.name = "First Child"
|
||||
self.assertEqual(child.lang, 'de_DE', "Child contact's lang should be the same as its parent.")
|
||||
partner_form.lang = 'fr_FR'
|
||||
self.assertEqual(partner_form.lang, 'fr_FR', "New partner's lang should take user input")
|
||||
with partner_form.child_ids.new() as child:
|
||||
child.name = "Second Child"
|
||||
self.assertEqual(child.lang, 'fr_FR', "Child contact's lang should be the same as its parent.")
|
||||
partner = partner_form.save()
|
||||
|
||||
# check final values (kept from form input)
|
||||
self.assertEqual(partner.lang, 'fr_FR')
|
||||
self.assertEqual(partner.child_ids.filtered(lambda p: p.name == "First Child").lang, 'de_DE')
|
||||
self.assertEqual(partner.child_ids.filtered(lambda p: p.name == "Second Child").lang, 'fr_FR')
|
||||
|
||||
def test_partner_merge_wizard_dst_partner_id(self):
|
||||
""" Check that dst_partner_id in merge wizard displays id along with partner name """
|
||||
test_partner = self.env['res.partner'].create({'name': 'Radu the Handsome'})
|
||||
expected_partner_name = '%s (%s)' % (test_partner.name, test_partner.id)
|
||||
|
||||
partner_merge_wizard = self.env['base.partner.merge.automatic.wizard'].with_context(
|
||||
{'partner_show_db_id': True, 'default_dst_partner_id': test_partner}).new()
|
||||
self.assertEqual(
|
||||
partner_merge_wizard.dst_partner_id.name_get(),
|
||||
[(test_partner.id, expected_partner_name)],
|
||||
"'Destination Contact' name should contain db ID in brackets"
|
||||
)
|
||||
|
||||
def test_partner_is_public(self):
|
||||
""" Check that base.partner_user is a public partner."""
|
||||
self.assertFalse(self.env.ref('base.public_user').active)
|
||||
self.assertFalse(self.env.ref('base.public_partner').active)
|
||||
self.assertTrue(self.env.ref('base.public_partner').is_public)
|
||||
|
||||
def test_onchange_parent_sync_user(self):
|
||||
company_1 = self.env['res.company'].create({'name': 'company_1'})
|
||||
test_user = self.env['res.users'].create({
|
||||
'name': 'This user',
|
||||
'login': 'thisu',
|
||||
'email': 'this.user@example.com',
|
||||
'company_id': company_1.id,
|
||||
'company_ids': [company_1.id],
|
||||
})
|
||||
test_parent_partner = self.env['res.partner'].create({
|
||||
'company_type': 'company',
|
||||
'name': 'Micheline',
|
||||
'user_id': test_user.id,
|
||||
})
|
||||
with Form(self.env['res.partner']) as partner_form:
|
||||
partner_form.parent_id = test_parent_partner
|
||||
partner_form.company_type = 'person'
|
||||
partner_form.name = 'Philip'
|
||||
self.assertEqual(partner_form.user_id, test_parent_partner.user_id)
|
||||
|
||||
def test_display_address_missing_key(self):
|
||||
""" Check _display_address when some keys are missing. As a defaultdict is used, missing keys should be
|
||||
filled with empty strings. """
|
||||
country = self.env["res.country"].create({"name": "TestCountry", "address_format": "%(city)s %(zip)s"})
|
||||
partner = self.env["res.partner"].create({
|
||||
"name": "TestPartner",
|
||||
"country_id": country.id,
|
||||
"city": "TestCity",
|
||||
"zip": "12345",
|
||||
})
|
||||
before = partner._display_address()
|
||||
# Manually update the country address_format because placeholders are checked by create
|
||||
self.env.cr.execute(
|
||||
"UPDATE res_country SET address_format ='%%(city)s %%(zip)s %%(nothing)s' WHERE id=%s",
|
||||
[country.id]
|
||||
)
|
||||
self.env["res.country"].invalidate_model()
|
||||
self.assertEqual(before, partner._display_address().strip())
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
|
||||
|
||||
from odoo.addons.base.tests.common import SavepointCaseWithUserDemo
|
||||
|
||||
|
||||
class TestResPartnerBank(SavepointCaseWithUserDemo):
|
||||
"""Tests acc_number
|
||||
"""
|
||||
|
||||
def test_sanitized_acc_number(self):
|
||||
partner_bank_model = self.env['res.partner.bank']
|
||||
acc_number = " BE-001 2518823 03 "
|
||||
vals = partner_bank_model.search([('acc_number', '=', acc_number)])
|
||||
self.assertEqual(0, len(vals))
|
||||
partner_bank = partner_bank_model.create({
|
||||
'acc_number': acc_number,
|
||||
'partner_id': self.env['res.partner'].create({'name': 'Pepper Test'}).id,
|
||||
'acc_type': 'bank',
|
||||
})
|
||||
vals = partner_bank_model.search([('acc_number', '=', acc_number)])
|
||||
self.assertEqual(1, len(vals))
|
||||
self.assertEqual(partner_bank, vals[0])
|
||||
vals = partner_bank_model.search([('acc_number', 'in', [acc_number])])
|
||||
self.assertEqual(1, len(vals))
|
||||
self.assertEqual(partner_bank, vals[0])
|
||||
|
||||
self.assertEqual(partner_bank.acc_number, acc_number)
|
||||
|
||||
# sanitaze the acc_number
|
||||
sanitized_acc_number = 'BE001251882303'
|
||||
vals = partner_bank_model.search(
|
||||
[('acc_number', '=', sanitized_acc_number)])
|
||||
self.assertEqual(1, len(vals))
|
||||
self.assertEqual(partner_bank, vals[0])
|
||||
vals = partner_bank_model.search(
|
||||
[('acc_number', 'in', [sanitized_acc_number])])
|
||||
self.assertEqual(1, len(vals))
|
||||
self.assertEqual(partner_bank, vals[0])
|
||||
self.assertEqual(partner_bank.sanitized_acc_number,
|
||||
sanitized_acc_number)
|
||||
|
||||
# search is case insensitive
|
||||
vals = partner_bank_model.search(
|
||||
[('acc_number', '=', sanitized_acc_number.lower())])
|
||||
self.assertEqual(1, len(vals))
|
||||
vals = partner_bank_model.search(
|
||||
[('acc_number', '=', acc_number.lower())])
|
||||
self.assertEqual(1, len(vals))
|
||||
|
|
@ -0,0 +1,569 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
from odoo.addons.base.models.res_users import is_selection_groups, get_selection_groups, name_selection_groups
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
from odoo.tests.common import TransactionCase, Form, tagged, new_test_user
|
||||
from odoo.tools import mute_logger
|
||||
|
||||
|
||||
class TestUsers(TransactionCase):
|
||||
|
||||
def test_name_search(self):
|
||||
""" Check name_search on user. """
|
||||
User = self.env['res.users']
|
||||
|
||||
test_user = User.create({'name': 'Flad the Impaler', 'login': 'vlad'})
|
||||
like_user = User.create({'name': 'Wlad the Impaler', 'login': 'vladi'})
|
||||
other_user = User.create({'name': 'Nothing similar', 'login': 'nothing similar'})
|
||||
all_users = test_user | like_user | other_user
|
||||
|
||||
res = User.name_search('vlad', operator='ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, test_user)
|
||||
|
||||
res = User.name_search('vlad', operator='not ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, all_users)
|
||||
|
||||
res = User.name_search('', operator='ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, all_users)
|
||||
|
||||
res = User.name_search('', operator='not ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, User)
|
||||
|
||||
res = User.name_search('lad', operator='ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, test_user | like_user)
|
||||
|
||||
res = User.name_search('lad', operator='not ilike')
|
||||
self.assertEqual(User.browse(i[0] for i in res) & all_users, other_user)
|
||||
|
||||
def test_user_partner(self):
|
||||
""" Check that the user partner is well created """
|
||||
|
||||
User = self.env['res.users']
|
||||
Partner = self.env['res.partner']
|
||||
Company = self.env['res.company']
|
||||
|
||||
company_1 = Company.create({'name': 'company_1'})
|
||||
company_2 = Company.create({'name': 'company_2'})
|
||||
|
||||
partner = Partner.create({
|
||||
'name': 'Bob Partner',
|
||||
'company_id': company_2.id
|
||||
})
|
||||
|
||||
# case 1 : the user has no partner
|
||||
test_user = User.create({
|
||||
'name': 'John Smith',
|
||||
'login': 'jsmith',
|
||||
'company_ids': [company_1.id],
|
||||
'company_id': company_1.id
|
||||
})
|
||||
|
||||
self.assertFalse(
|
||||
test_user.partner_id.company_id,
|
||||
"The partner_id linked to a user should be created without any company_id")
|
||||
|
||||
# case 2 : the user has a partner
|
||||
test_user = User.create({
|
||||
'name': 'Bob Smith',
|
||||
'login': 'bsmith',
|
||||
'company_ids': [company_1.id],
|
||||
'company_id': company_1.id,
|
||||
'partner_id': partner.id
|
||||
})
|
||||
|
||||
self.assertEqual(
|
||||
test_user.partner_id.company_id,
|
||||
company_1,
|
||||
"If the partner_id of a user has already a company, it is replaced by the user company"
|
||||
)
|
||||
|
||||
|
||||
def test_change_user_company(self):
|
||||
""" Check the partner company update when the user company is changed """
|
||||
|
||||
User = self.env['res.users']
|
||||
Company = self.env['res.company']
|
||||
|
||||
test_user = User.create({'name': 'John Smith', 'login': 'jsmith'})
|
||||
company_1 = Company.create({'name': 'company_1'})
|
||||
company_2 = Company.create({'name': 'company_2'})
|
||||
|
||||
test_user.company_ids += company_1
|
||||
test_user.company_ids += company_2
|
||||
|
||||
# 1: the partner has no company_id, no modification
|
||||
test_user.write({
|
||||
'company_id': company_1.id
|
||||
})
|
||||
|
||||
self.assertFalse(
|
||||
test_user.partner_id.company_id,
|
||||
"On user company change, if its partner_id has no company_id,"
|
||||
"the company_id of the partner_id shall NOT be updated")
|
||||
|
||||
# 2: the partner has a company_id different from the new one, update it
|
||||
test_user.partner_id.write({
|
||||
'company_id': company_1.id
|
||||
})
|
||||
|
||||
test_user.write({
|
||||
'company_id': company_2.id
|
||||
})
|
||||
|
||||
self.assertEqual(
|
||||
test_user.partner_id.company_id,
|
||||
company_2,
|
||||
"On user company change, if its partner_id has already a company_id,"
|
||||
"the company_id of the partner_id shall be updated"
|
||||
)
|
||||
|
||||
@mute_logger('odoo.sql_db')
|
||||
def test_deactivate_portal_users_access(self):
|
||||
"""Test that only a portal users can deactivate his account."""
|
||||
user_internal = self.env['res.users'].create({
|
||||
'name': 'Internal',
|
||||
'login': 'user_internal',
|
||||
'password': 'password',
|
||||
'groups_id': [self.env.ref('base.group_user').id],
|
||||
})
|
||||
|
||||
with self.assertRaises(UserError, msg='Internal users should not be able to deactivate their account'):
|
||||
user_internal._deactivate_portal_user()
|
||||
|
||||
@mute_logger('odoo.sql_db')
|
||||
def test_deactivate_portal_users_archive_and_remove(self):
|
||||
"""Test that if the account can not be removed, it's archived instead
|
||||
and sensitive information are removed.
|
||||
|
||||
In this test, the deletion of "portal_user" will succeed,
|
||||
but the deletion of "portal_user_2" will fail.
|
||||
"""
|
||||
User = self.env['res.users']
|
||||
portal_user = User.create({
|
||||
'name': 'Portal',
|
||||
'login': 'portal_user',
|
||||
'password': 'password',
|
||||
'groups_id': [self.env.ref('base.group_portal').id],
|
||||
})
|
||||
portal_partner = portal_user.partner_id
|
||||
|
||||
portal_user_2 = User.create({
|
||||
'name': 'Portal',
|
||||
'login': 'portal_user_2',
|
||||
'password': 'password',
|
||||
'groups_id': [self.env.ref('base.group_portal').id],
|
||||
})
|
||||
portal_partner_2 = portal_user_2.partner_id
|
||||
|
||||
(portal_user | portal_user_2)._deactivate_portal_user()
|
||||
|
||||
self.assertTrue(portal_user.exists() and not portal_user.active, 'Should have archived the user 1')
|
||||
|
||||
self.assertEqual(portal_user.name, 'Portal', 'Should have kept the user name')
|
||||
self.assertEqual(portal_user.partner_id.name, 'Portal', 'Should have kept the partner name')
|
||||
self.assertNotEqual(portal_user.login, 'portal_user', 'Should have removed the user login')
|
||||
|
||||
asked_deletion_1 = self.env['res.users.deletion'].search([('user_id', '=', portal_user.id)])
|
||||
asked_deletion_2 = self.env['res.users.deletion'].search([('user_id', '=', portal_user_2.id)])
|
||||
|
||||
self.assertTrue(asked_deletion_1, 'Should have added the user 1 in the deletion queue')
|
||||
self.assertTrue(asked_deletion_2, 'Should have added the user 2 in the deletion queue')
|
||||
|
||||
# The deletion will fail for "portal_user_2",
|
||||
# because of the absence of "ondelete=cascade"
|
||||
self.cron = self.env['ir.cron'].create({
|
||||
'name': 'Test Cron',
|
||||
'user_id': portal_user_2.id,
|
||||
'model_id': self.env.ref('base.model_res_partner').id,
|
||||
})
|
||||
|
||||
self.env['res.users.deletion']._gc_portal_users()
|
||||
|
||||
self.assertFalse(portal_user.exists(), 'Should have removed the user')
|
||||
self.assertFalse(portal_partner.exists(), 'Should have removed the partner')
|
||||
self.assertEqual(asked_deletion_1.state, 'done', 'Should have marked the deletion as done')
|
||||
|
||||
self.assertTrue(portal_user_2.exists(), 'Should have kept the user')
|
||||
self.assertTrue(portal_partner_2.exists(), 'Should have kept the partner')
|
||||
self.assertEqual(asked_deletion_2.state, 'fail', 'Should have marked the deletion as failed')
|
||||
|
||||
def test_user_home_action_restriction(self):
|
||||
test_user = new_test_user(self.env, 'hello world')
|
||||
|
||||
# Find an action that contains restricted context ('active_id')
|
||||
restricted_action = self.env['ir.actions.act_window'].search([('context', 'ilike', 'active_id')], limit=1)
|
||||
with self.assertRaises(ValidationError):
|
||||
test_user.action_id = restricted_action.id
|
||||
|
||||
# Find an action without restricted context
|
||||
allowed_action = self.env['ir.actions.act_window'].search(['!', ('context', 'ilike', 'active_id')], limit=1)
|
||||
|
||||
test_user.action_id = allowed_action.id
|
||||
self.assertEqual(test_user.action_id.id, allowed_action.id)
|
||||
|
||||
def test_context_get_lang(self):
|
||||
self.env['res.lang'].with_context(active_test=False).search([
|
||||
('code', 'in', ['fr_FR', 'es_ES', 'de_DE', 'en_US'])
|
||||
]).write({'active': True})
|
||||
|
||||
user = new_test_user(self.env, 'jackoneill')
|
||||
user = user.with_user(user)
|
||||
user.lang = 'fr_FR'
|
||||
|
||||
company = user.company_id.partner_id.sudo()
|
||||
company.lang = 'de_DE'
|
||||
|
||||
request = SimpleNamespace()
|
||||
request.best_lang = 'es_ES'
|
||||
request_patch = patch('odoo.addons.base.models.res_users.request', request)
|
||||
self.addCleanup(request_patch.stop)
|
||||
request_patch.start()
|
||||
|
||||
self.assertEqual(user.context_get()['lang'], 'fr_FR')
|
||||
self.env.registry.clear_caches()
|
||||
user.lang = False
|
||||
|
||||
self.assertEqual(user.context_get()['lang'], 'es_ES')
|
||||
self.env.registry.clear_caches()
|
||||
request_patch.stop()
|
||||
|
||||
self.assertEqual(user.context_get()['lang'], 'de_DE')
|
||||
self.env.registry.clear_caches()
|
||||
company.lang = False
|
||||
|
||||
self.assertEqual(user.context_get()['lang'], 'en_US')
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install')
|
||||
class TestUsers2(TransactionCase):
|
||||
|
||||
def test_reified_groups(self):
|
||||
""" The groups handler doesn't use the "real" view with pseudo-fields
|
||||
during installation, so it always works (because it uses the normal
|
||||
groups_id field).
|
||||
"""
|
||||
# use the specific views which has the pseudo-fields
|
||||
f = Form(self.env['res.users'], view='base.view_users_form')
|
||||
f.name = "bob"
|
||||
f.login = "bob"
|
||||
user = f.save()
|
||||
|
||||
self.assertIn(self.env.ref('base.group_user'), user.groups_id)
|
||||
|
||||
# all template user groups are copied
|
||||
default_user = self.env.ref('base.default_user')
|
||||
self.assertEqual(default_user.groups_id, user.groups_id)
|
||||
|
||||
def test_selection_groups(self):
|
||||
# create 3 groups that should be in a selection
|
||||
app = self.env['ir.module.category'].create({'name': 'Foo'})
|
||||
group1, group2, group0 = self.env['res.groups'].create([
|
||||
{'name': name, 'category_id': app.id}
|
||||
for name in ('User', 'Manager', 'Visitor')
|
||||
])
|
||||
# THIS PART IS NECESSARY TO REPRODUCE AN ISSUE: group1.id < group2.id < group0.id
|
||||
self.assertLess(group1.id, group2.id)
|
||||
self.assertLess(group2.id, group0.id)
|
||||
# implication order is group0 < group1 < group2
|
||||
group2.implied_ids = group1
|
||||
group1.implied_ids = group0
|
||||
groups = group0 + group1 + group2
|
||||
|
||||
# determine the name of the field corresponding to groups
|
||||
fname = next(
|
||||
name
|
||||
for name in self.env['res.users'].fields_get()
|
||||
if is_selection_groups(name) and group0.id in get_selection_groups(name)
|
||||
)
|
||||
self.assertCountEqual(get_selection_groups(fname), groups.ids)
|
||||
|
||||
# create a user
|
||||
user = self.env['res.users'].create({'name': 'foo', 'login': 'foo'})
|
||||
|
||||
# put user in group0, and check field value
|
||||
user.write({fname: group0.id})
|
||||
self.assertEqual(user.groups_id & groups, group0)
|
||||
self.assertEqual(user.read([fname])[0][fname], group0.id)
|
||||
|
||||
# put user in group1, and check field value
|
||||
user.write({fname: group1.id})
|
||||
self.assertEqual(user.groups_id & groups, group0 + group1)
|
||||
self.assertEqual(user.read([fname])[0][fname], group1.id)
|
||||
|
||||
# put user in group2, and check field value
|
||||
user.write({fname: group2.id})
|
||||
self.assertEqual(user.groups_id & groups, groups)
|
||||
self.assertEqual(user.read([fname])[0][fname], group2.id)
|
||||
|
||||
normalized_values = user._remove_reified_groups({fname: group0.id})
|
||||
self.assertEqual(sorted(normalized_values['groups_id']), [(3, group1.id), (3, group2.id), (4, group0.id)])
|
||||
|
||||
normalized_values = user._remove_reified_groups({fname: group1.id})
|
||||
self.assertEqual(sorted(normalized_values['groups_id']), [(3, group2.id), (4, group1.id)])
|
||||
|
||||
normalized_values = user._remove_reified_groups({fname: group2.id})
|
||||
self.assertEqual(normalized_values['groups_id'], [(4, group2.id)])
|
||||
|
||||
def test_read_group_with_reified_field(self):
|
||||
""" Check that read_group gets rid of reified fields"""
|
||||
User = self.env['res.users']
|
||||
fnames = ['name', 'email', 'login']
|
||||
|
||||
# find some reified field name
|
||||
reified_fname = next(
|
||||
fname
|
||||
for fname in User.fields_get()
|
||||
if fname.startswith(('in_group_', 'sel_groups_'))
|
||||
)
|
||||
|
||||
# check that the reified field name has no effect in fields
|
||||
res_with_reified = User.read_group([], fnames + [reified_fname], ['company_id'])
|
||||
res_without_reified = User.read_group([], fnames, ['company_id'])
|
||||
self.assertEqual(res_with_reified, res_without_reified, "Reified fields should be ignored")
|
||||
|
||||
# Verify that the read_group is raising an error if reified field is used as groupby
|
||||
with self.assertRaises(ValueError):
|
||||
User.read_group([], fnames + [reified_fname], [reified_fname])
|
||||
|
||||
def test_reified_groups_on_change(self):
|
||||
"""Test that a change on a reified fields trigger the onchange of groups_id."""
|
||||
group_public = self.env.ref('base.group_public')
|
||||
group_portal = self.env.ref('base.group_portal')
|
||||
group_user = self.env.ref('base.group_user')
|
||||
|
||||
# Build the reified group field name
|
||||
user_groups = group_public | group_portal | group_user
|
||||
user_groups_ids = [str(group_id) for group_id in sorted(user_groups.ids)]
|
||||
group_field_name = f"sel_groups_{'_'.join(user_groups_ids)}"
|
||||
|
||||
# <group col="4" attrs="{'invisible': [('sel_groups_1_9_10', '!=', 1)]}" groups="base.group_no_one" class="o_label_nowrap">
|
||||
with self.debug_mode():
|
||||
user_form = Form(self.env['res.users'], view='base.view_users_form')
|
||||
user_form.name = "Test"
|
||||
user_form.login = "Test"
|
||||
self.assertFalse(user_form.share)
|
||||
|
||||
setattr(user_form, group_field_name, group_portal.id)
|
||||
self.assertTrue(user_form.share, 'The groups_id onchange should have been triggered')
|
||||
|
||||
setattr(user_form, group_field_name, group_user.id)
|
||||
self.assertFalse(user_form.share, 'The groups_id onchange should have been triggered')
|
||||
|
||||
setattr(user_form, group_field_name, group_public.id)
|
||||
self.assertTrue(user_form.share, 'The groups_id onchange should have been triggered')
|
||||
|
||||
|
||||
@tagged('post_install', '-at_install', 'res_groups')
|
||||
class TestUsersGroupWarning(TransactionCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""
|
||||
These are the Groups and their Hierarchy we have Used to test Group warnings.
|
||||
|
||||
Category groups hierarchy:
|
||||
Sales
|
||||
├── User: All Documents
|
||||
└── Administrator
|
||||
Timesheets
|
||||
├── User: own timesheets only
|
||||
├── User: all timesheets
|
||||
└── Administrator
|
||||
Project
|
||||
├── User
|
||||
└── Administrator
|
||||
Field Service
|
||||
├── User
|
||||
└── Administrator
|
||||
|
||||
Implied groups hierarchy:
|
||||
Sales / Administrator
|
||||
└── Sales / User: All Documents
|
||||
|
||||
Timesheets / Administrator
|
||||
└── Timesheets / User: all timesheets
|
||||
└── Timehseets / User: own timesheets only
|
||||
|
||||
Project / Administrator
|
||||
├── Project / User
|
||||
└── Timesheets / User: all timesheets
|
||||
|
||||
Field Service / Administrator
|
||||
├── Sales / Administrator
|
||||
├── Project / Administrator
|
||||
└── Field Service / User
|
||||
"""
|
||||
super().setUpClass()
|
||||
ResGroups = cls.env['res.groups']
|
||||
IrModuleCategory = cls.env['ir.module.category']
|
||||
categ_sales = IrModuleCategory.create({'name': 'Sales'})
|
||||
categ_project = IrModuleCategory.create({'name': 'Project'})
|
||||
categ_field_service = IrModuleCategory.create({'name': 'Field Service'})
|
||||
categ_timesheets = IrModuleCategory.create({'name': 'Timesheets'})
|
||||
|
||||
# Sales
|
||||
cls.group_sales_user, cls.group_sales_administrator = ResGroups.create([
|
||||
{'name': 'User: All Documents', 'category_id': categ_sales.id},
|
||||
{'name': 'Administrator', 'category_id': categ_sales.id},
|
||||
])
|
||||
cls.sales_categ_field = name_selection_groups((cls.group_sales_user | cls.group_sales_administrator).ids)
|
||||
cls.group_sales_administrator.implied_ids = cls.group_sales_user
|
||||
|
||||
# Timesheets
|
||||
cls.group_timesheets_user_own_timesheet = ResGroups.create([
|
||||
{'name': 'User: own timesheets only', 'category_id': categ_timesheets.id}
|
||||
])
|
||||
cls.group_timesheets_user_all_timesheet = ResGroups.create([
|
||||
{'name': 'User: all timesheets', 'category_id': categ_timesheets.id}
|
||||
])
|
||||
cls.group_timesheets_administrator = ResGroups.create([
|
||||
{'name': 'Administrator', 'category_id': categ_timesheets.id}
|
||||
])
|
||||
cls.timesheets_categ_field = name_selection_groups((cls.group_timesheets_user_own_timesheet |
|
||||
cls.group_timesheets_user_all_timesheet |
|
||||
cls.group_timesheets_administrator).ids
|
||||
)
|
||||
cls.group_timesheets_administrator.implied_ids += cls.group_timesheets_user_all_timesheet
|
||||
cls.group_timesheets_user_all_timesheet.implied_ids += cls.group_timesheets_user_own_timesheet
|
||||
|
||||
# Project
|
||||
cls.group_project_user, cls.group_project_admnistrator = ResGroups.create([
|
||||
{'name': 'User', 'category_id': categ_project.id},
|
||||
{'name': 'Administrator', 'category_id': categ_project.id},
|
||||
])
|
||||
cls.project_categ_field = name_selection_groups((cls.group_project_user | cls.group_project_admnistrator).ids)
|
||||
cls.group_project_admnistrator.implied_ids = (cls.group_project_user | cls.group_timesheets_user_all_timesheet)
|
||||
|
||||
# Field Service
|
||||
cls.group_field_service_user, cls.group_field_service_administrator = ResGroups.create([
|
||||
{'name': 'User', 'category_id': categ_field_service.id},
|
||||
{'name': 'Administrator', 'category_id': categ_field_service.id},
|
||||
])
|
||||
cls.field_service_categ_field = name_selection_groups((cls.group_field_service_user | cls.group_field_service_administrator).ids)
|
||||
cls.group_field_service_administrator.implied_ids = (cls.group_sales_administrator |
|
||||
cls.group_project_admnistrator |
|
||||
cls.group_field_service_user).ids
|
||||
|
||||
# User
|
||||
cls.test_group_user = cls.env['res.users'].create({
|
||||
'name': 'Test Group User',
|
||||
'login': 'TestGroupUser',
|
||||
'groups_id': (
|
||||
cls.env.ref('base.group_user') |
|
||||
cls.group_timesheets_administrator |
|
||||
cls.group_field_service_administrator).ids,
|
||||
})
|
||||
|
||||
|
||||
def test_user_group_empty_group_warning(self):
|
||||
""" User changes Empty Sales access from 'Sales: Administrator'. The
|
||||
warning should be there since 'Sales: Administrator' is required when
|
||||
user is having 'Field Service: Administrator'. When user reverts the
|
||||
changes, warning should disappear. """
|
||||
# 97 requests if only base is installed
|
||||
# 412 runbot community
|
||||
# 549 runbot enterprise
|
||||
with self.assertQueryCount(__system__=549), \
|
||||
Form(self.test_group_user.with_context(show_user_group_warning=True), view='base.view_users_form') as UserForm:
|
||||
UserForm._values[self.sales_categ_field] = False
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
|
||||
self.assertEqual(
|
||||
UserForm.user_group_warning,
|
||||
'Since Test Group User is a/an "Field Service: Administrator", they will at least obtain the right "Sales: Administrator"'
|
||||
)
|
||||
|
||||
UserForm._values[self.sales_categ_field] = self.group_sales_administrator.id
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
self.assertFalse(UserForm.user_group_warning)
|
||||
|
||||
def test_user_group_inheritance_warning(self):
|
||||
""" User changes 'Sales: User' from 'Sales: Administrator'. The warning
|
||||
should be there since 'Sales: Administrator' is required when user is
|
||||
having 'Field Service: Administrator'. When user reverts the changes,
|
||||
warning should disappear. """
|
||||
# 97 requests if only base is installed
|
||||
# 412 runbot community
|
||||
# 549 runbot enterprise
|
||||
with self.assertQueryCount(__system__=549), \
|
||||
Form(self.test_group_user.with_context(show_user_group_warning=True), view='base.view_users_form') as UserForm:
|
||||
UserForm._values[self.sales_categ_field] = self.group_sales_user.id
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
|
||||
self.assertEqual(
|
||||
UserForm.user_group_warning,
|
||||
'Since Test Group User is a/an "Field Service: Administrator", they will at least obtain the right "Sales: Administrator"'
|
||||
)
|
||||
|
||||
UserForm._values[self.sales_categ_field] = self.group_sales_administrator.id
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
self.assertFalse(UserForm.user_group_warning)
|
||||
|
||||
def test_user_group_inheritance_warning_multi(self):
|
||||
""" User changes 'Sales: User' from 'Sales: Administrator' and
|
||||
'Project: User' from 'Project: Administrator'. The warning should
|
||||
be there since 'Sales: Administrator' and 'Project: Administrator'
|
||||
are required when user is havning 'Field Service: Administrator'.
|
||||
When user reverts the changes For 'Sales: Administrator', warning
|
||||
should disappear for Sales Access."""
|
||||
# 101 requests if only base is installed
|
||||
# 416 runbot community
|
||||
# 553 runbot enterprise
|
||||
with self.assertQueryCount(__system__=553), \
|
||||
Form(self.test_group_user.with_context(show_user_group_warning=True), view='base.view_users_form') as UserForm:
|
||||
UserForm._values[self.sales_categ_field] = self.group_sales_user.id
|
||||
UserForm._values[self.project_categ_field] = self.group_project_user.id
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
|
||||
self.assertTrue(
|
||||
UserForm.user_group_warning,
|
||||
'Since Test Group User is a/an "Field Service: Administrator", they will at least obtain the right "Sales: Administrator", Project: Administrator"',
|
||||
)
|
||||
|
||||
UserForm._values[self.sales_categ_field] = self.group_sales_administrator.id
|
||||
UserForm._perform_onchange([self.sales_categ_field])
|
||||
|
||||
self.assertEqual(
|
||||
UserForm.user_group_warning,
|
||||
'Since Test Group User is a/an "Field Service: Administrator", they will at least obtain the right "Project: Administrator"'
|
||||
)
|
||||
|
||||
def test_user_group_least_possible_inheritance_warning(self):
|
||||
""" User changes 'Timesheets: User: own timesheets only ' from
|
||||
'Timesheets: Administrator'. The warning should be there since
|
||||
'Timesheets: User: all timesheets' is at least required when user is
|
||||
having 'Project: Administrator'. When user reverts the changes For
|
||||
'Timesheets: User: all timesheets', warning should disappear."""
|
||||
# 98 requests if only base is installed
|
||||
# 413 runbot community
|
||||
# 550 runbot enterprise
|
||||
with self.assertQueryCount(__system__=550), \
|
||||
Form(self.test_group_user.with_context(show_user_group_warning=True), view='base.view_users_form') as UserForm:
|
||||
UserForm._values[self.timesheets_categ_field] = self.group_timesheets_user_own_timesheet.id
|
||||
UserForm._perform_onchange([self.timesheets_categ_field])
|
||||
|
||||
self.assertEqual(
|
||||
UserForm.user_group_warning,
|
||||
'Since Test Group User is a/an "Project: Administrator", they will at least obtain the right "Timesheets: User: all timesheets"'
|
||||
)
|
||||
|
||||
UserForm._values[self.timesheets_categ_field] = self.group_timesheets_user_all_timesheet.id
|
||||
UserForm._perform_onchange([self.timesheets_categ_field])
|
||||
self.assertFalse(UserForm.user_group_warning)
|
||||
|
||||
def test_user_group_parent_inheritance_no_warning(self):
|
||||
""" User changes 'Field Service: User' from 'Field Service: Administrator'.
|
||||
The warning should not be there since 'Field Service: User' is not affected
|
||||
by any other groups."""
|
||||
# 83 requests if only base is installed
|
||||
# 397 runbot community
|
||||
# 534 runbot enterprise
|
||||
with self.assertQueryCount(__system__=534), \
|
||||
Form(self.test_group_user.with_context(show_user_group_warning=True), view='base.view_users_form') as UserForm:
|
||||
UserForm._values[self.field_service_categ_field] = self.group_field_service_user.id
|
||||
UserForm._perform_onchange([self.field_service_categ_field])
|
||||
|
||||
self.assertFalse(UserForm.user_group_warning)
|
||||
209
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_search.py
Normal file
209
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_search.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class test_search(TransactionCase):
|
||||
|
||||
def patch_order(self, model, order):
|
||||
self.patch(self.registry[model], '_order', order)
|
||||
|
||||
def test_00_search_order(self):
|
||||
# Create 6 partners with a given name, and a given creation order to
|
||||
# ensure the order of their ID. Some are set as inactive to verify they
|
||||
# are by default excluded from the searches and to provide a second
|
||||
# `order` argument.
|
||||
|
||||
Partner = self.env['res.partner']
|
||||
c = Partner.create({'name': 'test_search_order_C'})
|
||||
d = Partner.create({'name': 'test_search_order_D', 'active': False})
|
||||
a = Partner.create({'name': 'test_search_order_A'})
|
||||
b = Partner.create({'name': 'test_search_order_B'})
|
||||
ab = Partner.create({'name': 'test_search_order_AB'})
|
||||
e = Partner.create({'name': 'test_search_order_E', 'active': False})
|
||||
|
||||
# The tests.
|
||||
|
||||
# The basic searches should exclude records that have active = False.
|
||||
# The order of the returned ids should be given by the `order`
|
||||
# parameter of search().
|
||||
|
||||
name_asc = Partner.search([('name', 'like', 'test_search_order%')], order="name asc")
|
||||
self.assertEqual([a, ab, b, c], list(name_asc), "Search with 'NAME ASC' order failed.")
|
||||
name_desc = Partner.search([('name', 'like', 'test_search_order%')], order="name desc")
|
||||
self.assertEqual([c, b, ab, a], list(name_desc), "Search with 'NAME DESC' order failed.")
|
||||
id_asc = Partner.search([('name', 'like', 'test_search_order%')], order="id asc")
|
||||
self.assertEqual([c, a, b, ab], list(id_asc), "Search with 'ID ASC' order failed.")
|
||||
id_desc = Partner.search([('name', 'like', 'test_search_order%')], order="id desc")
|
||||
self.assertEqual([ab, b, a, c], list(id_desc), "Search with 'ID DESC' order failed.")
|
||||
|
||||
# The inactive records shouldn't be excluded as soon as a condition on
|
||||
# that field is present in the domain. The `order` parameter of
|
||||
# search() should support any legal coma-separated values.
|
||||
|
||||
active_asc_id_asc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active asc, id asc")
|
||||
self.assertEqual([d, e, c, a, b, ab], list(active_asc_id_asc), "Search with 'ACTIVE ASC, ID ASC' order failed.")
|
||||
active_desc_id_asc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active desc, id asc")
|
||||
self.assertEqual([c, a, b, ab, d, e], list(active_desc_id_asc), "Search with 'ACTIVE DESC, ID ASC' order failed.")
|
||||
active_asc_id_desc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active asc, id desc")
|
||||
self.assertEqual([e, d, ab, b, a, c], list(active_asc_id_desc), "Search with 'ACTIVE ASC, ID DESC' order failed.")
|
||||
active_desc_id_desc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active desc, id desc")
|
||||
self.assertEqual([ab, b, a, c, e, d], list(active_desc_id_desc), "Search with 'ACTIVE DESC, ID DESC' order failed.")
|
||||
id_asc_active_asc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id asc, active asc")
|
||||
self.assertEqual([c, d, a, b, ab, e], list(id_asc_active_asc), "Search with 'ID ASC, ACTIVE ASC' order failed.")
|
||||
id_asc_active_desc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id asc, active desc")
|
||||
self.assertEqual([c, d, a, b, ab, e], list(id_asc_active_desc), "Search with 'ID ASC, ACTIVE DESC' order failed.")
|
||||
id_desc_active_asc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id desc, active asc")
|
||||
self.assertEqual([e, ab, b, a, d, c], list(id_desc_active_asc), "Search with 'ID DESC, ACTIVE ASC' order failed.")
|
||||
id_desc_active_desc = Partner.search([('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id desc, active desc")
|
||||
self.assertEqual([e, ab, b, a, d, c], list(id_desc_active_desc), "Search with 'ID DESC, ACTIVE DESC' order failed.")
|
||||
|
||||
def test_10_inherits_m2order(self):
|
||||
Users = self.env['res.users']
|
||||
|
||||
# Find Employee group
|
||||
group_employee = self.env.ref('base.group_user')
|
||||
|
||||
# Get country/state data
|
||||
country_be = self.env.ref('base.be')
|
||||
country_us = self.env.ref('base.us')
|
||||
states_us = country_us.state_ids[:2]
|
||||
|
||||
# Create test users
|
||||
u = Users.create({'name': '__search', 'login': '__search', 'groups_id': [Command.set([group_employee.id])]})
|
||||
a = Users.create({'name': '__test_A', 'login': '__test_A', 'country_id': country_be.id, 'state_id': country_be.id})
|
||||
b = Users.create({'name': '__test_B', 'login': '__a_test_B', 'country_id': country_us.id, 'state_id': states_us[1].id})
|
||||
c = Users.create({'name': '__test_B', 'login': '__z_test_B', 'country_id': country_us.id, 'state_id': states_us[0].id})
|
||||
|
||||
# Search as search user
|
||||
Users = Users.with_user(u)
|
||||
|
||||
# Do: search on res.users, order on a field on res.partner to try inherits'd fields, then res.users
|
||||
expected_ids = [u.id, a.id, c.id, b.id]
|
||||
user_ids = Users.search([('id', 'in', expected_ids)], order='name asc, login desc').ids
|
||||
self.assertEqual(user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
|
||||
|
||||
# Do: order on many2one and inherits'd fields
|
||||
expected_ids = [c.id, b.id, a.id, u.id]
|
||||
user_ids = Users.search([('id', 'in', expected_ids)], order='state_id asc, country_id desc, name asc, login desc').ids
|
||||
self.assertEqual(user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
|
||||
|
||||
# Do: order on many2one and inherits'd fields
|
||||
expected_ids = [u.id, b.id, c.id, a.id]
|
||||
user_ids = Users.search([('id', 'in', expected_ids)], order='country_id desc, state_id desc, name asc, login desc').ids
|
||||
self.assertEqual(user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
|
||||
|
||||
# Do: order on many2one, but not by specifying in order parameter of search, but by overriding _order of res_users
|
||||
self.patch_order('res.users', 'country_id desc, name asc, login desc')
|
||||
expected_ids = [u.id, c.id, b.id, a.id]
|
||||
user_ids = Users.search([('id', 'in', expected_ids)]).ids
|
||||
self.assertEqual(user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
|
||||
|
||||
def test_11_indirect_inherits_m2o_order(self):
|
||||
Cron = self.env['ir.cron']
|
||||
Users = self.env['res.users']
|
||||
|
||||
user_ids = {}
|
||||
cron_ids = {}
|
||||
for u in 'BAC':
|
||||
user_ids[u] = Users.create({'name': u, 'login': u}).id
|
||||
cron_ids[u] = Cron.create({'name': u, 'model_id': self.env.ref('base.model_res_partner').id, 'user_id': user_ids[u]}).id
|
||||
|
||||
ids = Cron.search([('id', 'in', list(cron_ids.values()))], order='user_id').ids
|
||||
expected_ids = [cron_ids[l] for l in 'ABC']
|
||||
self.assertEqual(ids, expected_ids)
|
||||
|
||||
def test_12_m2o_order_loop_self(self):
|
||||
Cats = self.env['ir.module.category']
|
||||
cat_ids = {}
|
||||
def create(name, **kw):
|
||||
cat_ids[name] = Cats.create(dict(kw, name=name)).id
|
||||
|
||||
self.patch_order('ir.module.category', 'parent_id desc, name')
|
||||
|
||||
create('A')
|
||||
create('B', parent_id=cat_ids['A'])
|
||||
create('C', parent_id=cat_ids['A'])
|
||||
create('D')
|
||||
create('E', parent_id=cat_ids['D'])
|
||||
create('F', parent_id=cat_ids['D'])
|
||||
|
||||
expected_ids = [cat_ids[x] for x in 'ADEFBC']
|
||||
found_ids = Cats.search([('id', 'in', list(cat_ids.values()))]).ids
|
||||
self.assertEqual(found_ids, expected_ids)
|
||||
|
||||
def test_13_m2o_order_loop_multi(self):
|
||||
Users = self.env['res.users']
|
||||
|
||||
# will sort by login desc of the creator, then by name
|
||||
self.patch_order('res.partner', 'create_uid, name')
|
||||
self.patch_order('res.users', 'partner_id, login desc')
|
||||
|
||||
kw = dict(groups_id=[Command.set([self.ref('base.group_system'),
|
||||
self.ref('base.group_partner_manager')])])
|
||||
|
||||
u1 = Users.create(dict(name='Q', login='m', **kw)).id
|
||||
u2 = Users.with_user(u1).create(dict(name='B', login='f', **kw)).id
|
||||
u3 = Users.create(dict(name='C', login='c', **kw)).id
|
||||
u4 = Users.with_user(u2).create(dict(name='D', login='z', **kw)).id
|
||||
|
||||
expected_ids = [u2, u4, u3, u1]
|
||||
found_ids = Users.search([('id', 'in', expected_ids)]).ids
|
||||
self.assertEqual(found_ids, expected_ids)
|
||||
|
||||
def test_20_x_active(self):
|
||||
"""Check the behaviour of the x_active field."""
|
||||
# test that a custom field x_active filters like active
|
||||
# we take the model res.country as a test model as it is included in base and does
|
||||
# not have an active field
|
||||
model_country = self.env['res.country']
|
||||
self.assertNotIn('active', model_country._fields) # just in case someone adds the active field in the model
|
||||
self.env['ir.model.fields'].create({
|
||||
'name': 'x_active',
|
||||
'model_id': self.env.ref('base.model_res_country').id,
|
||||
'ttype': 'boolean',
|
||||
})
|
||||
self.assertEqual('x_active', model_country._active_name)
|
||||
country_ussr = model_country.create({'name': 'USSR', 'x_active': False})
|
||||
ussr_search = model_country.search([('name', '=', 'USSR')])
|
||||
self.assertFalse(ussr_search)
|
||||
ussr_search = model_country.with_context(active_test=False).search([('name', '=', 'USSR')])
|
||||
self.assertIn(country_ussr, ussr_search, "Search with active_test on a custom x_active field failed")
|
||||
ussr_search = model_country.search([('name', '=', 'USSR'), ('x_active', '=', False)])
|
||||
self.assertIn(country_ussr, ussr_search, "Search with active_test on a custom x_active field failed")
|
||||
# test that a custom field x_active on a model with the standard active
|
||||
# field does not interfere with the standard behaviour
|
||||
# use res.bank since it has an active field and is simple to use
|
||||
model_bank = self.env['res.bank']
|
||||
self.env['ir.model.fields'].create({
|
||||
'name': 'x_active',
|
||||
'model_id': self.env.ref('base.model_res_bank').id,
|
||||
'ttype': 'boolean',
|
||||
})
|
||||
self.assertEqual('active', model_bank._active_name)
|
||||
bank_credit_communal = model_bank.create({'name': 'Crédit Communal', 'x_active': False, 'active': True})
|
||||
cc_search = model_bank.search([('name', '=', 'Crédit Communal')])
|
||||
self.assertIn(bank_credit_communal, cc_search, "Search for active record with x_active set to False has failed")
|
||||
bank_credit_communal.write({
|
||||
'active': False,
|
||||
'x_active': True,
|
||||
})
|
||||
cc_search = model_bank.search([('name', '=', 'Crédit Communal')])
|
||||
self.assertNotIn(bank_credit_communal, cc_search, "Search for inactive record with x_active set to True has failed")
|
||||
|
||||
def test_21_search_count(self):
|
||||
Partner = self.env['res.partner']
|
||||
count_partner_before = Partner.search_count([])
|
||||
partners = Partner.create([
|
||||
{'name': 'abc'},
|
||||
{'name': 'zer'},
|
||||
{'name': 'christope'},
|
||||
{'name': 'runbot'},
|
||||
])
|
||||
self.assertEqual(len(partners) + count_partner_before, Partner.search_count([]))
|
||||
self.assertEqual(len(partners) + count_partner_before, Partner.search([], count=True))
|
||||
|
||||
self.assertEqual(3, Partner.search_count([], limit=3))
|
||||
self.assertEqual(3, Partner.search([], count=True, limit=3))
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from odoo.tests import BaseCase, tagged
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestRetryCommon(BaseCase):
|
||||
def get_tests_run_count(self):
|
||||
return int(os.environ.get('ODOO_TEST_FAILURE_RETRIES', 0)) + 1
|
||||
|
||||
def update_count(self):
|
||||
self.count = getattr(self, 'count', 0) + 1
|
||||
|
||||
|
||||
@tagged('-standard', 'test_retry', 'test_retry_success')
|
||||
class TestRetry(TestRetryCommon):
|
||||
""" Check some tests behaviour when ODOO_TEST_FAILURE_RETRIES is set"""
|
||||
|
||||
def test_log_levels(self):
|
||||
_logger.debug('test debug')
|
||||
_logger.info('test info')
|
||||
_logger.runbot('test 25')
|
||||
|
||||
def test_retry_success(self):
|
||||
tests_run_count = self.get_tests_run_count()
|
||||
self.update_count()
|
||||
if tests_run_count != self.count:
|
||||
_logger.error('Failure')
|
||||
self.assertEqual(tests_run_count, self.count)
|
||||
|
||||
|
||||
@tagged('-standard', 'test_retry', 'test_retry_failures')
|
||||
class TestRetryFailures(TestRetryCommon):
|
||||
def test_retry_failure_assert(self):
|
||||
self.assertFalse(1 == 1)
|
||||
|
||||
def test_retry_failure_log(self):
|
||||
_logger.error('Failure')
|
||||
|
||||
|
||||
@tagged('-standard', 'test_retry', 'test_retry_success')
|
||||
class TestRetrySubtest(TestRetryCommon):
|
||||
|
||||
def test_retry_subtest_success_one(self):
|
||||
tests_run_count = self.get_tests_run_count()
|
||||
self.update_count()
|
||||
for i in range(3):
|
||||
if i == 1:
|
||||
with self.subTest():
|
||||
if tests_run_count != self.count:
|
||||
_logger.error('Failure')
|
||||
self.assertEqual(tests_run_count, self.count)
|
||||
|
||||
def test_retry_subtest_success_all(self):
|
||||
tests_run_count = self.get_tests_run_count()
|
||||
self.update_count()
|
||||
for _ in range(3):
|
||||
with self.subTest():
|
||||
if tests_run_count != self.count:
|
||||
_logger.error('Failure')
|
||||
self.assertEqual(tests_run_count, self.count)
|
||||
|
||||
|
||||
@tagged('-standard', 'test_retry', 'test_retry_failures')
|
||||
class TestRetrySubtestFailures(TestRetryCommon):
|
||||
|
||||
def test_retry_subtest_failure_one(self):
|
||||
for i in range(3):
|
||||
if i == 1:
|
||||
with self.subTest():
|
||||
_logger.error('Failure')
|
||||
self.assertFalse(1 == 1)
|
||||
|
||||
def test_retry_subtest_failure_all(self):
|
||||
for _ in range(3):
|
||||
with self.subTest():
|
||||
_logger.error('Failure')
|
||||
self.assertFalse(1 == 1)
|
||||
|
|
@ -0,0 +1,535 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import contextlib
|
||||
import difflib
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import PurePath
|
||||
from unittest import SkipTest, skip
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo.tests.case import TestCase
|
||||
from odoo.tests.common import BaseCase, TransactionCase, users, warmup
|
||||
from odoo.tests.result import OdooTestResult
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
from odoo.tests import MetaCase
|
||||
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
# this is mainly to ensure that simple tests will continue to work even if BaseCase should be used
|
||||
# this only works if doClassCleanup is available on testCase because of the vendoring of suite.py.
|
||||
# this test will only work in python 3.8 +
|
||||
class TestTestSuite(TestCase, metaclass=MetaCase):
|
||||
|
||||
def test_test_suite(self):
|
||||
""" Check that OdooSuite handles unittest.TestCase correctly. """
|
||||
|
||||
def get_method_additional_tags(self, method):
|
||||
return []
|
||||
|
||||
|
||||
class TestRunnerLoggingCommon(TransactionCase):
|
||||
"""
|
||||
The purpose of this class is to do some "metatesting": it actually checks
|
||||
that on error, the runner logged the error with the right file reference.
|
||||
This is mainly to avoid having errors in test/common.py or test/runner.py`.
|
||||
This kind of metatesting is tricky; in this case the logs are made outside
|
||||
of the test method, after the teardown actually.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.expected_logs = None
|
||||
self.expected_first_frame_methods = None
|
||||
return super().setUp()
|
||||
|
||||
def _addError(self, result, test, exc_info):
|
||||
# We use this hook to catch the logged error. It is initially called
|
||||
# post tearDown, and logs the actual errors. Because of our hack
|
||||
# tests.common._ErrorCatcher, the errors are logged directly. This is
|
||||
# still useful to test errors raised from tests. We cannot assert what
|
||||
# was logged after the test inside the test, though. This method can be
|
||||
# temporary renamed to test the real failure.
|
||||
try:
|
||||
self.test_result = result
|
||||
# while we are here, let's check that the first frame of the stack
|
||||
# is always inside the test method
|
||||
|
||||
if exc_info:
|
||||
tb = exc_info[2]
|
||||
self._check_first_frame(tb)
|
||||
|
||||
# intercept all ir_logging. We cannot use log catchers or other
|
||||
# fancy stuff because makeRecord is too low level.
|
||||
log_records = []
|
||||
|
||||
def makeRecord(logger, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None):
|
||||
log_records.append({
|
||||
'logger': logger, 'name': name, 'level': level, 'fn': fn, 'lno': lno,
|
||||
'msg': msg % args, 'exc_info': exc_info, 'func': func, 'extra': extra, 'sinfo': sinfo,
|
||||
})
|
||||
|
||||
def handle(logger, record):
|
||||
# disable error logging
|
||||
return
|
||||
|
||||
fake_result = OdooTestResult()
|
||||
with patch('logging.Logger.makeRecord', makeRecord), patch('logging.Logger.handle', handle):
|
||||
super()._addError(fake_result, test, exc_info)
|
||||
|
||||
self._check_log_records(log_records)
|
||||
|
||||
except Exception as e:
|
||||
# we don't expect _feedErrorsToResult() to raise any exception, this
|
||||
# will make it more robust to future changes and eventual mistakes
|
||||
_logger.exception(e)
|
||||
|
||||
def _check_first_frame(self, tb):
|
||||
""" Check that the first frame of the given traceback is the expected method name. """
|
||||
# the list expected_first_frame_methods allow to define a list of first
|
||||
# expected frame (useful for setup/teardown tests)
|
||||
if self.expected_first_frame_methods is None:
|
||||
expected_first_frame_method = self._testMethodName
|
||||
else:
|
||||
expected_first_frame_method = self.expected_first_frame_methods.pop(0)
|
||||
first_frame_method = tb.tb_frame.f_code.co_name
|
||||
if first_frame_method != expected_first_frame_method:
|
||||
self._log_error(f"Checking first tb frame: {first_frame_method} is not equal to {expected_first_frame_method}")
|
||||
|
||||
def _check_log_records(self, log_records):
|
||||
""" Check that what was logged is what was expected. """
|
||||
for log_record in log_records:
|
||||
self._assert_log_equal(log_record, 'logger', _logger)
|
||||
self._assert_log_equal(log_record, 'name', 'odoo.addons.base.tests.test_test_suite')
|
||||
self._assert_log_equal(log_record, 'fn', __file__)
|
||||
self._assert_log_equal(log_record, 'func', self._testMethodName)
|
||||
|
||||
if self.expected_logs is not None:
|
||||
for log_record in log_records:
|
||||
level, msg = self.expected_logs.pop(0)
|
||||
self._assert_log_equal(log_record, 'level', level)
|
||||
self._assert_log_equal(log_record, 'msg', msg)
|
||||
|
||||
def _assert_log_equal(self, log_record, key, expected):
|
||||
""" Check the content of a log record. """
|
||||
value = log_record[key]
|
||||
if key == 'msg':
|
||||
value = self._clean_message(value)
|
||||
if value != expected:
|
||||
if key != 'msg':
|
||||
self._log_error(f"Key `{key}` => `{value}` is not equal to `{expected}` \n {log_record['msg']}")
|
||||
else:
|
||||
diff = '\n'.join(difflib.ndiff(expected.splitlines(), value.splitlines()))
|
||||
self._log_error(f"Key `{key}` did not matched expected:\n{diff}")
|
||||
|
||||
def _log_error(self, message):
|
||||
""" Log an actual error (about a log in a test that doesn't match expectations) """
|
||||
# we would just log, but using the test_result will help keeping the tests counters correct
|
||||
self.test_result.addError(self, (AssertionError, AssertionError(message), None))
|
||||
|
||||
def _clean_message(self, message):
|
||||
root_path = PurePath(__file__).parents[4] # removes /odoo/addons/base/tests/test_test_suite.py
|
||||
python_path = PurePath(contextlib.__file__).parent # /usr/lib/pythonx.x, C:\\python\\Lib, ...
|
||||
message = re.sub(r'line \d+', 'line $line', message)
|
||||
message = re.sub(r'py:\d+', 'py:$line', message)
|
||||
message = re.sub(r'decorator-gen-\d+', 'decorator-gen-xxx', message)
|
||||
message = message.replace(f'"{root_path}', '"/root_path/odoo')
|
||||
message = message.replace(f'"{python_path}', '"/usr/lib/python')
|
||||
message = message.replace('\\', '/')
|
||||
return message
|
||||
|
||||
|
||||
class TestRunnerLogging(TestRunnerLoggingCommon):
|
||||
|
||||
def test_has_add_error(self):
|
||||
self.assertTrue(hasattr(self, '_addError'))
|
||||
|
||||
def test_raise(self):
|
||||
raise Exception('This is an error')
|
||||
|
||||
def test_raise_subtest(self):
|
||||
"""
|
||||
with subtest, we expect to have multiple errors, one per subtest
|
||||
"""
|
||||
def make_message(message):
|
||||
return (
|
||||
f'''ERROR: Subtest TestRunnerLogging.test_raise_subtest (<subtest>)
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_raise_subtest
|
||||
raise Exception('{message}')
|
||||
Exception: {message}
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, make_message('This is an error')),
|
||||
]
|
||||
with self.subTest():
|
||||
raise Exception('This is an error')
|
||||
|
||||
self.assertFalse(self.expected_logs, "Error should have been logged immediatly")
|
||||
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, make_message('This is an error2')),
|
||||
]
|
||||
|
||||
with self.subTest():
|
||||
raise Exception('This is an error2')
|
||||
|
||||
self.assertFalse(self.expected_logs, "Error should have been logged immediatly")
|
||||
|
||||
@users('__system__')
|
||||
@warmup
|
||||
def test_with_decorators(self):
|
||||
# note, this test may be broken with a decorator in decorator=5.0.5 since the behaviour changed
|
||||
# but decoratorx was not introduced yet.
|
||||
message = (
|
||||
'''ERROR: Subtest TestRunnerLogging.test_with_decorators (login='__system__')
|
||||
Traceback (most recent call last):
|
||||
File "<decorator-gen-xxx>", line $line, in test_with_decorators
|
||||
File "/root_path/odoo/odoo/tests/common.py", line $line, in _users
|
||||
func(*args, **kwargs)
|
||||
File "<decorator-gen-xxx>", line $line, in test_with_decorators
|
||||
File "/root_path/odoo/odoo/tests/common.py", line $line, in warmup
|
||||
func(*args, **kwargs)
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_with_decorators
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
raise Exception('This is an error')
|
||||
|
||||
def test_traverse_contextmanager(self):
|
||||
@contextmanager
|
||||
def assertSomething():
|
||||
yield
|
||||
raise Exception('This is an error')
|
||||
|
||||
with assertSomething():
|
||||
pass
|
||||
|
||||
def test_subtest_sub_call(self):
|
||||
def func():
|
||||
with self.subTest():
|
||||
raise Exception('This is an error')
|
||||
|
||||
func()
|
||||
|
||||
def test_call_stack(self):
|
||||
message = (
|
||||
'''ERROR: TestRunnerLogging.test_call_stack
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_call_stack
|
||||
alpha()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
beta()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in beta
|
||||
gamma()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in gamma
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
|
||||
def alpha():
|
||||
beta()
|
||||
|
||||
def beta():
|
||||
gamma()
|
||||
|
||||
def gamma():
|
||||
raise Exception('This is an error')
|
||||
|
||||
alpha()
|
||||
|
||||
def test_call_stack_context_manager(self):
|
||||
message = (
|
||||
'''ERROR: TestRunnerLogging.test_call_stack_context_manager
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_call_stack_context_manager
|
||||
alpha()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
beta()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in beta
|
||||
gamma()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in gamma
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
|
||||
def alpha():
|
||||
beta()
|
||||
|
||||
def beta():
|
||||
with self.with_user('admin'):
|
||||
gamma()
|
||||
return 0
|
||||
|
||||
def gamma():
|
||||
raise Exception('This is an error')
|
||||
|
||||
alpha()
|
||||
|
||||
def test_call_stack_subtest(self):
|
||||
message = (
|
||||
'''ERROR: Subtest TestRunnerLogging.test_call_stack_subtest (<subtest>)
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_call_stack_subtest
|
||||
alpha()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
beta()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in beta
|
||||
gamma()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in gamma
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
|
||||
def alpha():
|
||||
beta()
|
||||
|
||||
def beta():
|
||||
with self.subTest():
|
||||
gamma()
|
||||
|
||||
def gamma():
|
||||
raise Exception('This is an error')
|
||||
|
||||
alpha()
|
||||
|
||||
def test_assertQueryCount(self):
|
||||
message = (
|
||||
'''FAIL: Subtest TestRunnerLogging.test_assertQueryCount (<subtest>)
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_assertQueryCount
|
||||
with self.assertQueryCount(system=0):
|
||||
File "/usr/lib/python/contextlib.py", line $line, in __exit__
|
||||
next(self.gen)
|
||||
File "/root_path/odoo/odoo/tests/common.py", line $line, in assertQueryCount
|
||||
self.fail(msg % (login, count, expected, funcname, filename, linenum))
|
||||
AssertionError: Query count more than expected for user __system__: 1 > 0 in test_assertQueryCount at base/tests/test_test_suite.py:$line
|
||||
''')
|
||||
if self._python_version < (3, 10, 0):
|
||||
message = message.replace("with self.assertQueryCount(system=0):", "self.env.cr.execute('SELECT 1')")
|
||||
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
with self.assertQueryCount(system=0):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
|
||||
@users('__system__')
|
||||
@warmup
|
||||
def test_assertQueryCount_with_decorators(self):
|
||||
with self.assertQueryCount(system=0):
|
||||
self.env.cr.execute('SELECT 1')
|
||||
|
||||
def test_reraise(self):
|
||||
message = (
|
||||
'''ERROR: TestRunnerLogging.test_reraise
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_reraise
|
||||
alpha()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
beta()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in beta
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
|
||||
def alpha():
|
||||
# pylint: disable=try-except-raise
|
||||
try:
|
||||
beta()
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def beta():
|
||||
raise Exception('This is an error')
|
||||
|
||||
alpha()
|
||||
|
||||
def test_handle_error(self):
|
||||
message = (
|
||||
'''ERROR: TestRunnerLogging.test_handle_error
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
beta()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in beta
|
||||
raise Exception('This is an error')
|
||||
Exception: This is an error
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
|
||||
Traceback (most recent call last):
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in test_handle_error
|
||||
alpha()
|
||||
File "/root_path/odoo/odoo/addons/base/tests/test_test_suite.py", line $line, in alpha
|
||||
raise Exception('This is an error2')
|
||||
Exception: This is an error2
|
||||
''')
|
||||
self.expected_logs = [
|
||||
(logging.INFO, '=' * 70),
|
||||
(logging.ERROR, message),
|
||||
]
|
||||
|
||||
def alpha():
|
||||
try:
|
||||
beta()
|
||||
except Exception:
|
||||
raise Exception('This is an error2')
|
||||
|
||||
def beta():
|
||||
raise Exception('This is an error')
|
||||
|
||||
alpha()
|
||||
|
||||
|
||||
class TestRunnerLoggingSetup(TestRunnerLoggingCommon):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.expected_first_frame_methods = [
|
||||
'setUp',
|
||||
'cleanupError2',
|
||||
'cleanupError',
|
||||
]
|
||||
|
||||
def cleanupError():
|
||||
raise Exception("This is a cleanup error")
|
||||
self.addCleanup(cleanupError)
|
||||
|
||||
def cleanupError2():
|
||||
raise Exception("This is a second cleanup error")
|
||||
self.addCleanup(cleanupError2)
|
||||
|
||||
raise Exception('This is a setup error')
|
||||
|
||||
def test_raises_setup(self):
|
||||
_logger.error("This shouldn't be executed")
|
||||
|
||||
def tearDown(self):
|
||||
_logger.error("This shouldn't be executed since setup failed")
|
||||
|
||||
|
||||
class TestRunnerLoggingTeardown(TestRunnerLoggingCommon):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.expected_first_frame_methods = [
|
||||
'test_raises_teardown',
|
||||
'test_raises_teardown',
|
||||
'test_raises_teardown',
|
||||
'tearDown',
|
||||
'cleanupError2',
|
||||
'cleanupError',
|
||||
]
|
||||
|
||||
def cleanupError():
|
||||
raise Exception("This is a cleanup error")
|
||||
self.addCleanup(cleanupError)
|
||||
|
||||
def cleanupError2():
|
||||
raise Exception("This is a second cleanup error")
|
||||
self.addCleanup(cleanupError2)
|
||||
|
||||
def tearDown(self):
|
||||
raise Exception('This is a tearDown error')
|
||||
|
||||
def test_raises_teardown(self):
|
||||
with self.subTest():
|
||||
raise Exception('This is a subTest error')
|
||||
with self.subTest():
|
||||
raise Exception('This is a second subTest error')
|
||||
raise Exception('This is a test error')
|
||||
|
||||
|
||||
class TestSubtests(BaseCase):
|
||||
|
||||
def test_nested_subtests(self):
|
||||
with self.subTest(a=1, x=2):
|
||||
with self.subTest(b=3, x=4):
|
||||
self.assertEqual(self._subtest._subDescription(), '(b=3, x=4, a=1)')
|
||||
with self.subTest(b=5, x=6):
|
||||
self.assertEqual(self._subtest._subDescription(), '(b=5, x=6, a=1)')
|
||||
|
||||
|
||||
class TestClassSetup(BaseCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
raise SkipTest('Skip this class')
|
||||
|
||||
def test_method(self):
|
||||
pass
|
||||
|
||||
|
||||
class TestClassTeardown(BaseCase):
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
raise SkipTest('Skip this class')
|
||||
|
||||
def test_method(self):
|
||||
pass
|
||||
|
||||
|
||||
class Test01ClassCleanups(BaseCase):
|
||||
"""
|
||||
The purpose of this test combined with Test02ClassCleanupsCheck is to check that
|
||||
class cleanup work. class cleanup where introduced in python3.8 but tests should
|
||||
remain compatible with python 3.7
|
||||
"""
|
||||
executed = False
|
||||
cleanup = False
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.executed = True
|
||||
|
||||
def doCleanup():
|
||||
cls.cleanup = True
|
||||
cls.addClassCleanup(doCleanup)
|
||||
|
||||
def test_dummy(self):
|
||||
pass
|
||||
|
||||
|
||||
class Test02ClassCleanupsCheck(BaseCase):
|
||||
def test_classcleanups(self):
|
||||
self.assertTrue(Test01ClassCleanups.executed, "This test only makes sence when executed after Test01ClassCleanups")
|
||||
self.assertTrue(Test01ClassCleanups.cleanup, "TestClassCleanup shoudl have been cleanuped")
|
||||
|
||||
|
||||
@skip
|
||||
class TestSkipClass(BaseCase):
|
||||
def test_classcleanups(self):
|
||||
raise Exception('This should be skipped')
|
||||
|
||||
|
||||
class TestSkipMethof(BaseCase):
|
||||
@skip
|
||||
def test_skip_method(self):
|
||||
raise Exception('This should be skipped')
|
||||
|
|
@ -0,0 +1,403 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase, tagged, BaseCase
|
||||
from odoo.tests.tag_selector import TagsSelector
|
||||
|
||||
|
||||
@tagged('nodatabase')
|
||||
class TestSetTags(TransactionCase):
|
||||
|
||||
def test_set_tags_empty(self):
|
||||
"""Test the set_tags decorator with an empty set of tags"""
|
||||
|
||||
@tagged()
|
||||
class FakeClass(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClass()
|
||||
|
||||
self.assertTrue(hasattr(fc, 'test_tags'))
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'standard'})
|
||||
self.assertEqual(fc.test_module, 'base')
|
||||
|
||||
def test_set_tags_not_decorated(self):
|
||||
"""Test that a TransactionCase has some test_tags by default"""
|
||||
|
||||
class FakeClass(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClass()
|
||||
|
||||
self.assertTrue(hasattr(fc, 'test_tags'))
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'standard'})
|
||||
self.assertEqual(fc.test_module, 'base')
|
||||
|
||||
def test_set_tags_single_tag(self):
|
||||
"""Test the set_tags decorator with a single tag"""
|
||||
|
||||
@tagged('slow')
|
||||
class FakeClass(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClass()
|
||||
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'standard', 'slow'})
|
||||
self.assertEqual(fc.test_module, 'base')
|
||||
|
||||
def test_set_tags_multiple_tags(self):
|
||||
"""Test the set_tags decorator with multiple tags"""
|
||||
|
||||
@tagged('slow', 'nightly')
|
||||
class FakeClass(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClass()
|
||||
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'standard', 'slow', 'nightly'})
|
||||
self.assertEqual(fc.test_module, 'base')
|
||||
|
||||
def test_inheritance(self):
|
||||
"""Test inheritance when using the 'tagged' decorator"""
|
||||
|
||||
@tagged('slow')
|
||||
class FakeClassA(TransactionCase):
|
||||
pass
|
||||
|
||||
class FakeClassC(FakeClassA):
|
||||
pass
|
||||
|
||||
fc = FakeClassC()
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'standard', 'slow'})
|
||||
|
||||
@tagged('-standard')
|
||||
class FakeClassD(FakeClassA):
|
||||
pass
|
||||
|
||||
fc = FakeClassD()
|
||||
self.assertEqual(fc.test_tags, {'at_install', 'slow'})
|
||||
|
||||
def test_untagging(self):
|
||||
"""Test that one can remove the 'standard' tag"""
|
||||
|
||||
@tagged('-standard')
|
||||
class FakeClassA(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClassA()
|
||||
self.assertEqual(fc.test_tags, {'at_install'})
|
||||
self.assertEqual(fc.test_module, 'base')
|
||||
|
||||
@tagged('-standard', '-base', '-at_install', 'post_install')
|
||||
class FakeClassB(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClassB()
|
||||
self.assertEqual(fc.test_tags, {'post_install'})
|
||||
|
||||
@tagged('-standard', '-base', 'fast')
|
||||
class FakeClassC(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClassC()
|
||||
self.assertEqual(fc.test_tags, {'fast', 'at_install'})
|
||||
|
||||
def test_parental_advisory(self):
|
||||
"""Explicit test tags on the class should override anything
|
||||
"""
|
||||
@tagged('flow')
|
||||
class FakeClassA(TransactionCase):
|
||||
pass
|
||||
class FakeClassB(FakeClassA):
|
||||
test_tags = {'foo', 'bar'}
|
||||
|
||||
self.assertEqual(FakeClassA().test_tags, {'standard', 'at_install', 'flow'})
|
||||
self.assertEqual(FakeClassB().test_tags, {'foo', 'bar'})
|
||||
|
||||
@tagged('nodatabase')
|
||||
class TestSelector(TransactionCase):
|
||||
|
||||
def test_selector_parser(self):
|
||||
"""Test the parser part of the TagsSelector class"""
|
||||
|
||||
tags = TagsSelector('+slow')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('+slow,nightly')
|
||||
self.assertEqual({('slow', None, None, None, None), ('nightly', None, None, None, None)}, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('+slow,-standard')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude)
|
||||
|
||||
# same with space after the comma
|
||||
tags = TagsSelector('+slow, -standard')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude)
|
||||
|
||||
# same with space before and after the comma
|
||||
tags = TagsSelector('+slow , -standard')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude)
|
||||
|
||||
tags = TagsSelector('+slow ,-standard,+js')
|
||||
self.assertEqual({('slow', None, None, None, None), ('js', None, None, None, None)}, tags.include)
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude)
|
||||
|
||||
# without +
|
||||
tags = TagsSelector('slow, ')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
# duplicates
|
||||
tags = TagsSelector('+slow,-standard, slow,-standard ')
|
||||
self.assertEqual({('slow', None, None, None, None), }, tags.include)
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude)
|
||||
|
||||
tags = TagsSelector('')
|
||||
self.assertEqual(set(), tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('/module') # all standard test of a module
|
||||
self.assertEqual({('standard', 'module', None, None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('/module/tests/test_file.py') # all standard test of a module
|
||||
self.assertEqual({('standard', None, None, None, 'module.tests.test_file'), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('*/module') # all tests of a module
|
||||
self.assertEqual({(None, 'module', None, None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector(':class') # all standard test of a class
|
||||
self.assertEqual({('standard', None, 'class', None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('.method')
|
||||
self.assertEqual({('standard', None, None, 'method', None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector(':class.method')
|
||||
self.assertEqual({('standard', None, 'class', 'method', None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('/module:class.method') # only a specific test func in a module (standard)
|
||||
self.assertEqual({('standard', 'module', 'class', 'method', None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('*/module:class.method') # only a specific test func in a module
|
||||
self.assertEqual({(None, 'module', 'class', 'method', None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('-/module:class.method') # disable a specific test func in a module
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.include) # all strandard
|
||||
self.assertEqual({(None, 'module', 'class', 'method', None), }, tags.exclude) # exept the test func
|
||||
|
||||
tags = TagsSelector('-*/module:class.method')
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.include)
|
||||
self.assertEqual({(None, 'module', 'class', 'method', None), }, tags.exclude)
|
||||
|
||||
tags = TagsSelector('tag/module')
|
||||
self.assertEqual({('tag', 'module', None, None, None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('tag.method')
|
||||
self.assertEqual({('tag', None, None, 'method', None), }, tags.include)
|
||||
self.assertEqual(set(), tags.exclude)
|
||||
|
||||
tags = TagsSelector('*/module,-standard') # all non standard test of a module
|
||||
self.assertEqual({(None, 'module', None, None, None), }, tags.include) # all in module
|
||||
self.assertEqual({('standard', None, None, None, None), }, tags.exclude) # exept standard ones
|
||||
|
||||
|
||||
@tagged('nodatabase')
|
||||
class TestSelectorSelection(TransactionCase):
|
||||
def test_selector_selection(self):
|
||||
"""Test check_tags use cases"""
|
||||
class Test_A(TransactionCase):
|
||||
pass
|
||||
|
||||
@tagged('stock')
|
||||
class Test_B(BaseCase):
|
||||
pass
|
||||
|
||||
@tagged('stock', 'slow')
|
||||
class Test_C(BaseCase):
|
||||
pass
|
||||
|
||||
@tagged('standard', 'slow')
|
||||
class Test_D(BaseCase):
|
||||
pass
|
||||
|
||||
@tagged('-at_install', 'post_install')
|
||||
class Test_E(TransactionCase):
|
||||
pass
|
||||
|
||||
no_tags_obj = Test_A()
|
||||
stock_tag_obj = Test_B()
|
||||
multiple_tags_obj = Test_C()
|
||||
multiple_tags_standard_obj = Test_D()
|
||||
post_install_obj = Test_E()
|
||||
|
||||
# if 'standard' in not explicitly removed, tests without tags are
|
||||
# considered tagged standard and they are run by default if
|
||||
# not explicitly deselected with '-standard' or if 'standard' is not
|
||||
# selectected along with another test tag
|
||||
# same as "--test-tags=''" parameters:
|
||||
tags = TagsSelector('')
|
||||
self.assertFalse(tags.check(no_tags_obj))
|
||||
|
||||
# same as "--test-tags '+slow'":
|
||||
tags = TagsSelector('+slow')
|
||||
self.assertFalse(tags.check(no_tags_obj))
|
||||
|
||||
# same as "--test-tags '+slow,+fake'":
|
||||
tags = TagsSelector('+slow,fake')
|
||||
self.assertFalse(tags.check(no_tags_obj))
|
||||
|
||||
# same as "--test-tags '+slow,+standard'":
|
||||
tags = TagsSelector('slow,standard')
|
||||
self.assertTrue(no_tags_obj)
|
||||
|
||||
# same as "--test-tags '+slow,-standard'":
|
||||
tags = TagsSelector('slow,-standard')
|
||||
self.assertFalse(tags.check(no_tags_obj))
|
||||
|
||||
# same as "--test-tags '-slow,-standard'":
|
||||
tags = TagsSelector('-slow,-standard')
|
||||
self.assertFalse(tags.check(no_tags_obj))
|
||||
|
||||
# same as "--test-tags '-slow,+standard'":
|
||||
tags = TagsSelector('-slow,+standard')
|
||||
self.assertTrue(tags.check(no_tags_obj))
|
||||
|
||||
tags = TagsSelector('')
|
||||
self.assertFalse(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('slow')
|
||||
self.assertFalse(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('standard')
|
||||
self.assertTrue(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('slow,standard')
|
||||
self.assertTrue(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('slow,-standard')
|
||||
self.assertFalse(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('+stock')
|
||||
self.assertTrue(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('stock,fake')
|
||||
self.assertTrue(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('stock,standard')
|
||||
self.assertTrue(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('-stock')
|
||||
self.assertFalse(tags.check(stock_tag_obj))
|
||||
|
||||
tags = TagsSelector('')
|
||||
self.assertFalse(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('-stock')
|
||||
self.assertFalse(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('-slow')
|
||||
self.assertFalse(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('slow')
|
||||
self.assertTrue(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('slow,stock')
|
||||
self.assertTrue(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('-slow,stock')
|
||||
self.assertFalse(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('slow,stock,-slow')
|
||||
self.assertFalse(tags.check(multiple_tags_obj))
|
||||
|
||||
tags = TagsSelector('')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('standard')
|
||||
self.assertTrue(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('slow')
|
||||
self.assertTrue(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('slow,fake')
|
||||
self.assertTrue(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('-slow')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('-standard')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('-slow,-standard')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('standard,-slow')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
tags = TagsSelector('slow,-standard')
|
||||
self.assertFalse(tags.check(multiple_tags_standard_obj))
|
||||
|
||||
# Mimic the real post_install use case
|
||||
# That uses a second tags selector
|
||||
tags = TagsSelector('standard')
|
||||
position = TagsSelector('post_install')
|
||||
self.assertTrue(tags.check(post_install_obj) and position.check(post_install_obj))
|
||||
|
||||
def test_selector_parser_parameters(self):
|
||||
tags = ','.join([
|
||||
'/base:FakeClassA[failfast=0,filter=-livechat]',
|
||||
#'/base:FakeClassA[filter=[-barecode,-stock_x]]',
|
||||
'/other[notForThisClass]',
|
||||
'-/base:FakeClassA[arg1,arg2]',
|
||||
])
|
||||
tags = TagsSelector(tags)
|
||||
class FakeClassA(TransactionCase):
|
||||
pass
|
||||
|
||||
fc = FakeClassA()
|
||||
tags.check(fc)
|
||||
self.assertEqual(fc._test_params, [('+', 'failfast=0,filter=-livechat'), ('-', 'arg1,arg2')])
|
||||
|
||||
def test_negative_parameters_translate(self):
|
||||
tags = TagsSelector('.test_negative_parameters_translate')
|
||||
self.assertTrue(tags.check(self), "Sanity check")
|
||||
self.assertEqual(self._test_params, [])
|
||||
|
||||
tags = TagsSelector('/other_module,-.test_negative_parameters_translate[someparam]')
|
||||
self.assertFalse(tags.check(self), "we don't expect a negative parameter to enable the test if not enabled in other tags")
|
||||
self.assertEqual(self._test_params, [])
|
||||
|
||||
tags = TagsSelector('/base,-.test_negative_parameters_translate[someparam]')
|
||||
self.assertTrue(tags.check(self), "A negative parametric tag should not disable the test")
|
||||
self.assertEqual(self._test_params, [('-', 'someparam')])
|
||||
|
||||
tags = TagsSelector('-.test_negative_parameters_translate[someparam]')
|
||||
self.assertTrue(tags.check(self), "we don't expect a single negative parameter to disable the test that should run by edfault")
|
||||
self.assertEqual(self._test_params, [('-', 'someparam')])
|
||||
|
||||
tags = TagsSelector('/base,-.test_negative_parameters_translate')
|
||||
self.assertFalse(tags.check(self), "Sanity check, a negative parametric tag without params still disable the test")
|
||||
self.assertEqual(self._test_params, [])
|
||||
|
||||
tags = TagsSelector('.test_negative_parameters_translate[-someparam]')
|
||||
self.assertTrue(tags.check(self), "A parametric tag should enable test")
|
||||
self.assertEqual(self._test_params, [('+', '-someparam')])
|
||||
|
||||
class TestTestClass(BaseCase):
|
||||
def test_canonical_tag(self):
|
||||
self.assertEqual(self.canonical_tag, '/base/tests/test_tests_tags.py:TestTestClass.test_canonical_tag')
|
||||
|
||||
def get_log_metadata(self):
|
||||
self.assertEqual(self.log_metadata['canonical_tag'], '/base/tests/test_tests_tags.py:TestTestClass.test_canonical_tag')
|
||||
1329
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_translate.py
Normal file
1329
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_translate.py
Normal file
File diff suppressed because it is too large
Load diff
67
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_tz.py
Normal file
67
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_tz.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import datetime
|
||||
import logging
|
||||
import pytz
|
||||
from unittest.mock import patch
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.tools._monkeypatches_pytz import _tz_mapping
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestTZ(TransactionCase):
|
||||
|
||||
def test_tz_legacy(self):
|
||||
d = datetime.datetime(1969, 7, 16)
|
||||
# See https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
def assertTZEqual(tz1, tz2):
|
||||
self.assertEqual(tz1.localize(d).strftime('%z'), tz2.localize(d).strftime('%z'))
|
||||
|
||||
# in some version of tzdata the timezones are not symlink, as an example in 2023c-0ubuntu0.20.04.1
|
||||
# this as a side effect to have sligh difference in timezones seconds, breaking the following assertions
|
||||
# in some cases:
|
||||
#
|
||||
# self.assertEqual(tz1._utcoffset, tz2._utcoffset)
|
||||
# if hasattr(tz2, '_transition_info'):
|
||||
# self.assertEqual(tz1._transition_info, tz2._transition_info)
|
||||
#
|
||||
# the first one is more robust
|
||||
|
||||
for source, target in _tz_mapping.items():
|
||||
with self.subTest(source=source, target=target):
|
||||
if source == 'Pacific/Enderbury': # this one was wrong in some version of tzdata
|
||||
continue
|
||||
try:
|
||||
target_tz = pytz.timezone(target)
|
||||
except pytz.UnknownTimeZoneError:
|
||||
_logger.info("Skipping test for %s -> %s, target does not exist", source, target)
|
||||
continue
|
||||
assertTZEqual(pytz.timezone(source), target_tz)
|
||||
|
||||
def test_dont_adapt_available_tz(self):
|
||||
with patch.dict(_tz_mapping, {
|
||||
'DeprecatedUtc': 'UTC',
|
||||
'America/New_York': 'UTC',
|
||||
}):
|
||||
self.assertNotIn('DeprecatedUtc', pytz.all_timezones_set, 'DeprecatedUtc is not available')
|
||||
self.assertEqual(pytz.timezone('DeprecatedUtc'), pytz.timezone('UTC'), 'DeprecatedUtc does not exist and should have been replaced with UTC')
|
||||
self.assertIn('America/New_York', pytz.all_timezones_set, 'America/New_York is available')
|
||||
self.assertNotEqual(pytz.timezone('America/New_York'), pytz.timezone('UTC'), 'America/New_York exists and should not have been replaced with UTC')
|
||||
|
||||
def test_cannot_set_deprecated_timezone(self):
|
||||
# this should be ok
|
||||
self.env.user.tz = "America/New_York"
|
||||
if "US/Eastern" not in pytz.all_timezones:
|
||||
with self.assertRaises(ValueError):
|
||||
self.env.user.tz = "US/Eastern"
|
||||
|
||||
def test_partner_with_old_tz(self):
|
||||
# this test makes sence after ubuntu noble without tzdata-legacy installed
|
||||
partner = self.env['res.partner'].create({'name': 'test', 'tz': 'UTC'})
|
||||
self.env.cr.execute("""UPDATE res_partner set tz='US/Eastern' WHERE id=%s""", (partner.id,))
|
||||
partner.invalidate_recordset()
|
||||
self.assertEqual(partner.tz, 'US/Eastern') # tz was update despite selection not existing, but data was not migrated
|
||||
# comparing with 'America/New_York' see tools/_monkeypatches_pytz.py for mapping
|
||||
expected_offset = datetime.datetime.now(pytz.timezone('America/New_York')).strftime('%z')
|
||||
# offest will be -0400 in summer, -0500 in winter
|
||||
self.assertEqual(partner.tz_offset, expected_offset, "We don't expect pytz.timezone to fail if the timezone diseapeared when chaging os version")
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
# This assumes an existing but uninitialized database.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import unittest
|
||||
|
||||
from odoo import api, registry, SUPERUSER_ID
|
||||
from odoo.tests import common
|
||||
from odoo.tests.common import BaseCase
|
||||
|
||||
from odoo.modules.registry import Registry
|
||||
|
||||
|
||||
@contextmanager
|
||||
def environment():
|
||||
""" Return an environment with a new cursor for the current database; the
|
||||
cursor is committed and closed after the context block.
|
||||
"""
|
||||
reg = registry(common.get_db_name())
|
||||
with reg.cursor() as cr:
|
||||
yield api.Environment(cr, SUPERUSER_ID, {})
|
||||
|
||||
|
||||
MODULE = 'test_uninstall'
|
||||
MODEL = 'test_uninstall.model'
|
||||
|
||||
|
||||
class TestUninstall(BaseCase):
|
||||
"""
|
||||
Test the install/uninstall of a test module. The module is available in
|
||||
`odoo.tests` which should be present in the addons-path.
|
||||
"""
|
||||
|
||||
def test_01_install(self):
|
||||
""" Check a few things showing the module is installed. """
|
||||
with environment() as env:
|
||||
module = env['ir.module.module'].search([('name', '=', MODULE)])
|
||||
assert len(module) == 1
|
||||
module.button_install()
|
||||
Registry.new(common.get_db_name(), update_module=True)
|
||||
|
||||
with environment() as env:
|
||||
self.assertIn('test_uninstall.model', env.registry)
|
||||
self.assertTrue(env['ir.model.data'].search([('module', '=', MODULE)]))
|
||||
self.assertTrue(env['ir.model.fields'].search([('model', '=', MODEL)]))
|
||||
|
||||
env.cr.execute(
|
||||
r"""
|
||||
SELECT conname
|
||||
FROM pg_constraint
|
||||
WHERE conrelid = 'res_users'::regclass
|
||||
AND conname LIKE 'res\_users\_test\_uninstall\_res\_user\_%'
|
||||
"""
|
||||
)
|
||||
existing_constraints = [r[0] for r in env.cr.fetchall()]
|
||||
self.assertTrue(len(existing_constraints) == 4, existing_constraints)
|
||||
|
||||
def test_02_uninstall(self):
|
||||
""" Check a few things showing the module is uninstalled. """
|
||||
with environment() as env:
|
||||
module = env['ir.module.module'].search([('name', '=', MODULE)])
|
||||
assert len(module) == 1
|
||||
module.button_uninstall()
|
||||
Registry.new(common.get_db_name(), update_module=True)
|
||||
|
||||
with environment() as env:
|
||||
self.assertNotIn('test_uninstall.model', env.registry)
|
||||
self.assertFalse(env['ir.model.data'].search([('module', '=', MODULE)]))
|
||||
self.assertFalse(env['ir.model.fields'].search([('model', '=', MODEL)]))
|
||||
|
||||
env.cr.execute(
|
||||
r"""
|
||||
SELECT conname
|
||||
FROM pg_constraint
|
||||
WHERE conrelid = 'res_users'::regclass
|
||||
AND conname LIKE 'res\_users\_test\_uninstall\_res\_user\_%'
|
||||
"""
|
||||
)
|
||||
remaining_constraints = [r[0] for r in env.cr.fetchall()]
|
||||
self.assertFalse(remaining_constraints)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1,308 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo.tests.common import TransactionCase
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo import Command
|
||||
|
||||
|
||||
class TestHasGroup(TransactionCase):
|
||||
def setUp(self):
|
||||
super(TestHasGroup, self).setUp()
|
||||
|
||||
self.group0 = 'test_user_has_group.group0'
|
||||
self.group1 = 'test_user_has_group.group1'
|
||||
group0, group1 = self.env['res.groups']._load_records([
|
||||
dict(xml_id=self.group0, values={'name': 'group0'}),
|
||||
dict(xml_id=self.group1, values={'name': 'group1'}),
|
||||
])
|
||||
|
||||
self.test_user = self.env['res.users'].create({
|
||||
'login': 'testuser',
|
||||
'partner_id': self.env['res.partner'].create({
|
||||
'name': "Strawman Test User"
|
||||
}).id,
|
||||
'groups_id': [Command.set([group0.id])]
|
||||
})
|
||||
|
||||
self.grp_internal_xml_id = 'base.group_user'
|
||||
self.grp_internal = self.env.ref(self.grp_internal_xml_id)
|
||||
self.grp_portal_xml_id = 'base.group_portal'
|
||||
self.grp_portal = self.env.ref(self.grp_portal_xml_id)
|
||||
self.grp_public_xml_id = 'base.group_public'
|
||||
self.grp_public = self.env.ref(self.grp_public_xml_id)
|
||||
|
||||
def test_env_uid(self):
|
||||
Users = self.env['res.users'].with_user(self.test_user)
|
||||
self.assertTrue(
|
||||
Users.has_group(self.group0),
|
||||
"the test user should belong to group0"
|
||||
)
|
||||
self.assertFalse(
|
||||
Users.has_group(self.group1),
|
||||
"the test user should *not* belong to group1"
|
||||
)
|
||||
|
||||
def test_record(self):
|
||||
self.assertTrue(
|
||||
self.test_user.has_group(self.group0),
|
||||
"the test user should belong to group0",
|
||||
)
|
||||
self.assertFalse(
|
||||
self.test_user.has_group(self.group1),
|
||||
"the test user shoudl not belong to group1"
|
||||
)
|
||||
|
||||
def test_portal_creation(self):
|
||||
"""Here we check that portal user creation fails if it tries to create a user
|
||||
who would also have group_user by implied_group.
|
||||
Otherwise, it succeeds with the groups we asked for.
|
||||
"""
|
||||
grp_public = self.env.ref('base.group_public')
|
||||
grp_test_portal_xml_id = 'test_user_has_group.portal_implied_group'
|
||||
grp_test_portal = self.env['res.groups']._load_records([
|
||||
dict(xml_id=grp_test_portal_xml_id, values={'name': 'Test Group Portal'})
|
||||
])
|
||||
grp_test_internal1 = self.env['res.groups']._load_records([
|
||||
dict(xml_id='test_user_has_group.internal_implied_group1', values={'name': 'Test Group Itnernal 1'})
|
||||
])
|
||||
grp_test_internal2_xml_id = 'test_user_has_group.internal_implied_group2'
|
||||
grp_test_internal2 = self.env['res.groups']._load_records([
|
||||
dict(xml_id=grp_test_internal2_xml_id, values={'name': 'Test Group Internal 2'})
|
||||
])
|
||||
self.grp_portal.implied_ids = grp_test_portal
|
||||
|
||||
grp_test_internal1.implied_ids = False
|
||||
grp_test_internal2.implied_ids = False
|
||||
|
||||
portal_user = self.env['res.users'].create({
|
||||
'login': 'portalTest',
|
||||
'name': 'Portal test',
|
||||
'sel_groups_%s_%s_%s' % (self.grp_internal.id, self.grp_portal.id, grp_public.id): self.grp_portal.id,
|
||||
'sel_groups_%s_%s' % (grp_test_internal1.id, grp_test_internal2.id): grp_test_internal2.id,
|
||||
})
|
||||
|
||||
self.assertTrue(
|
||||
portal_user.has_group(self.grp_portal_xml_id),
|
||||
"The portal user should belong to '%s'" % self.grp_portal_xml_id,
|
||||
)
|
||||
self.assertTrue(
|
||||
portal_user.has_group(grp_test_portal_xml_id),
|
||||
"The portal user should belong to '%s'" % grp_test_portal_xml_id,
|
||||
)
|
||||
self.assertTrue(
|
||||
portal_user.has_group(grp_test_internal2_xml_id),
|
||||
"The portal user should belong to '%s'" % grp_test_internal2_xml_id
|
||||
)
|
||||
self.assertFalse(
|
||||
portal_user.has_group(self.grp_internal_xml_id),
|
||||
"The portal user should not belong to '%s'" % self.grp_internal_xml_id
|
||||
)
|
||||
|
||||
portal_user.unlink() # otherwise, badly modifying the implication would raise
|
||||
|
||||
grp_test_internal1.implied_ids = self.grp_internal
|
||||
grp_test_internal2.implied_ids = self.grp_internal
|
||||
|
||||
with self.assertRaises(ValidationError): # current group implications forbid to create a portal user
|
||||
portal_user = self.env['res.users'].create({
|
||||
'login': 'portalFail',
|
||||
'name': 'Portal fail',
|
||||
'sel_groups_%s_%s_%s' % (self.grp_internal.id, self.grp_portal.id, grp_public.id): self.grp_portal.id,
|
||||
'sel_groups_%s_%s' % (grp_test_internal1.id, grp_test_internal2.id): grp_test_internal2.id,
|
||||
})
|
||||
|
||||
def test_portal_write(self):
|
||||
"""Check that adding a new group to a portal user works as expected,
|
||||
except if it implies group_user/public, in chich case it should raise.
|
||||
"""
|
||||
grp_test_portal = self.env["res.groups"].create({"name": "implied by portal"})
|
||||
self.grp_portal.implied_ids = grp_test_portal
|
||||
|
||||
portal_user = self.env['res.users'].create({
|
||||
'login': 'portalTest2',
|
||||
'name': 'Portal test 2',
|
||||
'groups_id': [Command.set([self.grp_portal.id])],
|
||||
})
|
||||
|
||||
self.assertEqual(
|
||||
portal_user.groups_id, (self.grp_portal + grp_test_portal),
|
||||
"The portal user should have the implied group.",
|
||||
)
|
||||
|
||||
grp_fail = self.env["res.groups"].create(
|
||||
{"name": "fail", "implied_ids": [Command.set([self.grp_internal.id])]})
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
portal_user.write({'groups_id': [Command.link(grp_fail.id)]})
|
||||
|
||||
def test_two_user_types(self):
|
||||
#Create a user with two groups of user types kind (Internal and Portal)
|
||||
grp_test = self.env['res.groups']._load_records([
|
||||
dict(xml_id='test_two_user_types.implied_groups', values={'name': 'Test Group'})
|
||||
])
|
||||
grp_test.implied_ids += self.grp_internal
|
||||
grp_test.implied_ids += self.grp_portal
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
self.env['res.users'].create({
|
||||
'login': 'test_two_user_types',
|
||||
'name': "Test User with two user types",
|
||||
'groups_id': [Command.set([grp_test.id])]
|
||||
})
|
||||
|
||||
#Add a user with portal to the group Internal
|
||||
test_user = self.env['res.users'].create({
|
||||
'login': 'test_user_portal',
|
||||
'name': "Test User with two user types",
|
||||
'groups_id': [Command.set([self.grp_portal.id])]
|
||||
})
|
||||
with self.assertRaises(ValidationError):
|
||||
self.grp_internal.users = [Command.link(test_user.id)]
|
||||
|
||||
def test_two_user_types_implied_groups(self):
|
||||
"""Contrarily to test_two_user_types, we simply add an implied_id to a group.
|
||||
This will trigger the addition of the relevant users to the relevant groups;
|
||||
if, say, this was done in SQL and thus bypassing the ORM, it would bypass the constraints
|
||||
and field recomputations, and thus give us a case uncovered by the aforementioned test.
|
||||
"""
|
||||
grp_test = self.env["res.groups"].create(
|
||||
{"name": "test", "implied_ids": [Command.set([self.grp_internal.id])]})
|
||||
|
||||
test_user = self.env['res.users'].create({
|
||||
'login': 'test_user_portal',
|
||||
'name': "Test User with one user types",
|
||||
'groups_id': [Command.set([grp_test.id])]
|
||||
})
|
||||
|
||||
with self.assertRaisesRegex(ValidationError, "The user cannot have more than one user types"), self.env.cr.savepoint():
|
||||
grp_test.write({'implied_ids': [Command.link(self.grp_portal.id)]})
|
||||
|
||||
self.env["ir.model.fields"].create(
|
||||
{
|
||||
"name": "x_group_names",
|
||||
"model_id": self.env.ref("base.model_res_users").id,
|
||||
"state": "manual",
|
||||
"field_description": "A computed field that depends on groups_id",
|
||||
"compute": "for r in self: r['x_group_names'] = ', '.join(r.groups_id.mapped('name'))",
|
||||
"depends": "groups_id",
|
||||
"store": True,
|
||||
"ttype": "char",
|
||||
}
|
||||
)
|
||||
self.env["ir.model.fields"].create(
|
||||
{
|
||||
"name": "x_user_names",
|
||||
"model_id": self.env.ref("base.model_res_groups").id,
|
||||
"state": "manual",
|
||||
"field_description": "A computed field that depends on users",
|
||||
"compute": "for r in self: r['x_user_names'] = ', '.join(r.users.mapped('name'))",
|
||||
"depends": "users",
|
||||
"store": True,
|
||||
"ttype": "char",
|
||||
}
|
||||
)
|
||||
|
||||
grp_additional = self.env["res.groups"].create({"name": "additional"})
|
||||
grp_test.write({'implied_ids': [Command.link(grp_additional.id)]})
|
||||
|
||||
self.assertIn(grp_additional.name, test_user.x_group_names)
|
||||
self.assertIn(test_user.name, grp_additional.x_user_names)
|
||||
|
||||
def test_demote_user(self):
|
||||
"""When a user is demoted to the status of portal/public,
|
||||
we should strip him of all his (previous) rights
|
||||
"""
|
||||
group_0 = self.env.ref(self.group0) # the group to which test_user already belongs
|
||||
group_U = self.env["res.groups"].create({"name": "U", "implied_ids": [Command.set([self.grp_internal.id])]})
|
||||
self.grp_internal.implied_ids = False # only there to simplify the test by not having to care about its trans_implied_ids
|
||||
|
||||
self.test_user.write({'groups_id': [Command.link(group_U.id)]})
|
||||
self.assertEqual(
|
||||
self.test_user.groups_id, (group_0 + group_U + self.grp_internal),
|
||||
"We should have our 2 groups and the implied user group",
|
||||
)
|
||||
|
||||
# Now we demote him. The JS framework sends 3 and 4 commands,
|
||||
# which is what we write here, but it should work even with a 5 command or whatever.
|
||||
self.test_user.write({'groups_id': [
|
||||
Command.unlink(self.grp_internal.id),
|
||||
Command.unlink(self.grp_public.id),
|
||||
Command.link(self.grp_portal.id),
|
||||
]})
|
||||
|
||||
# if we screw up the removing groups/adding the implied ids, we could end up in two situations:
|
||||
# 1. we have a portal user with way too much rights (e.g. 'Contact Creation', which does not imply any other group)
|
||||
# 2. because a group may be (transitively) implying group_user, then it would raise an exception
|
||||
# so as a compromise we remove all groups when demoting a user
|
||||
# (even technical display groups, e.g. TaxB2B, which could be re-added later)
|
||||
self.assertEqual(
|
||||
self.test_user.groups_id, (self.grp_portal),
|
||||
"Here the portal group does not imply any other group, so we should only have this group.",
|
||||
)
|
||||
|
||||
def test_implied_groups(self):
|
||||
""" We check that the adding of implied ids works correctly for normal users and portal users.
|
||||
In the second case, working normally means raising if a group implies to give 'group_user'
|
||||
rights to a portal user.
|
||||
"""
|
||||
U = self.env["res.users"]
|
||||
G = self.env["res.groups"]
|
||||
group_user = self.env.ref('base.group_user')
|
||||
group_portal = self.env.ref('base.group_portal')
|
||||
group_no_one = self.env.ref('base.group_no_one')
|
||||
|
||||
group_A = G.create({"name": "A"})
|
||||
group_AA = G.create({"name": "AA", "implied_ids": [Command.set([group_A.id])]})
|
||||
group_B = G.create({"name": "B"})
|
||||
group_BB = G.create({"name": "BB", "implied_ids": [Command.set([group_B.id])]})
|
||||
|
||||
# user_a is a normal user, so we expect groups to be added when we add them,
|
||||
# as well as 'implied_groups'; otherwise nothing else should happen.
|
||||
# By contrast, for a portal user we want implied groups not to be added
|
||||
# if and only if it would not give group_user (or group_public) privileges
|
||||
user_a = U.create({"name": "a", "login": "a", "groups_id": [Command.set([group_AA.id, group_user.id])]})
|
||||
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_user + group_no_one))
|
||||
|
||||
user_b = U.create({"name": "b", "login": "b", "groups_id": [Command.set([group_portal.id, group_AA.id])]})
|
||||
self.assertEqual(user_b.groups_id, (group_AA + group_A + group_portal))
|
||||
|
||||
# user_b is not an internal user, but giving it a new group just added a new group
|
||||
(user_a + user_b).write({"groups_id": [Command.link(group_BB.id)]})
|
||||
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_BB + group_B + group_user + group_no_one))
|
||||
self.assertEqual(user_b.groups_id, (group_AA + group_A + group_BB + group_B + group_portal))
|
||||
|
||||
# now we create a group that implies the group_user
|
||||
# adding it to a user should work normally, whereas adding it to a portal user should raise
|
||||
group_C = G.create({"name": "C", "implied_ids": [Command.set([group_user.id])]})
|
||||
|
||||
user_a.write({"groups_id": [Command.link(group_C.id)]})
|
||||
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_BB + group_B + group_C + group_user + group_no_one))
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
user_b.write({"groups_id": [Command.link(group_C.id)]})
|
||||
|
||||
def test_has_group_cleared_cache_on_write(self):
|
||||
self.registry._clear_cache()
|
||||
self.assertFalse(self.registry._Registry__cache, "Ensure ormcache is empty")
|
||||
|
||||
def populate_cache():
|
||||
self.test_user.has_group('test_user_has_group.group0')
|
||||
self.assertTrue(self.registry._Registry__cache, "user.has_group cache must be populated")
|
||||
|
||||
populate_cache()
|
||||
|
||||
self.env.ref(self.group0).write({"share": True})
|
||||
self.assertFalse(self.registry._Registry__cache, "Writing on group must invalidate user.has_group cache")
|
||||
|
||||
populate_cache()
|
||||
# call_cache_clearing_methods is called in res.groups.write to invalidate
|
||||
# cache before calling its parent class method (`odoo.models.Model.write`)
|
||||
# as explain in the `res.group.write` comment.
|
||||
# This verifies that calling `call_cache_clearing_methods()` invalidates
|
||||
# the ormcache of method `user.has_group()`
|
||||
self.env['ir.model.access'].call_cache_clearing_methods()
|
||||
self.assertFalse(
|
||||
self.registry._Registry__cache,
|
||||
"call_cache_clearing_methods() must invalidate user.has_group cache"
|
||||
)
|
||||
4048
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_views.py
Normal file
4048
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_views.py
Normal file
File diff suppressed because it is too large
Load diff
238
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_xmlrpc.py
Normal file
238
odoo-bringout-oca-ocb-base/odoo/addons/base/tests/test_xmlrpc.py
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import collections
|
||||
import time
|
||||
from xmlrpc.client import Binary
|
||||
|
||||
from odoo.exceptions import AccessDenied, AccessError
|
||||
from odoo.http import _request_stack
|
||||
|
||||
import odoo
|
||||
import odoo.tools
|
||||
from odoo.tests import common
|
||||
from odoo.service import common as auth, model
|
||||
from odoo.tools import DotDict
|
||||
|
||||
|
||||
@common.tagged('post_install', '-at_install')
|
||||
class TestXMLRPC(common.HttpCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestXMLRPC, self).setUp()
|
||||
self.admin_uid = self.env.ref('base.user_admin').id
|
||||
|
||||
def xmlrpc(self, model, method, *args, **kwargs):
|
||||
return self.xmlrpc_object.execute_kw(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
model, method, args, kwargs
|
||||
)
|
||||
|
||||
def test_01_xmlrpc_login(self):
|
||||
""" Try to login on the common service. """
|
||||
db_name = common.get_db_name()
|
||||
uid = self.xmlrpc_common.login(db_name, 'admin', 'admin')
|
||||
self.assertEqual(uid, self.admin_uid)
|
||||
|
||||
def test_xmlrpc_ir_model_search(self):
|
||||
""" Try a search on the object service. """
|
||||
o = self.xmlrpc_object
|
||||
db_name = common.get_db_name()
|
||||
ids = o.execute(db_name, self.admin_uid, 'admin', 'ir.model', 'search', [])
|
||||
self.assertIsInstance(ids, list)
|
||||
ids = o.execute(db_name, self.admin_uid, 'admin', 'ir.model', 'search', [], {})
|
||||
self.assertIsInstance(ids, list)
|
||||
|
||||
def test_xmlrpc_read_group(self):
|
||||
groups = self.xmlrpc_object.execute(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
'res.partner', 'read_group', [], ['is_company', 'color'], ['parent_id']
|
||||
)
|
||||
|
||||
def test_xmlrpc_name_search(self):
|
||||
self.xmlrpc_object.execute(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
'res.partner', 'name_search', "admin"
|
||||
)
|
||||
|
||||
def test_xmlrpc_html_field(self):
|
||||
sig = '<p>bork bork bork <span style="font-weight: bork">bork</span><br></p>'
|
||||
r = self.env['res.users'].create({
|
||||
'name': 'bob',
|
||||
'login': 'bob',
|
||||
'signature': sig
|
||||
})
|
||||
self.assertEqual(str(r.signature), sig)
|
||||
[x] = self.xmlrpc('res.users', 'read', r.id, ['signature'])
|
||||
self.assertEqual(x['signature'], sig)
|
||||
|
||||
def test_xmlrpc_frozendict_marshalling(self):
|
||||
""" Test that the marshalling of a frozendict object works properly over XMLRPC """
|
||||
self.env.ref('base.user_admin').tz = "Europe/Brussels"
|
||||
ctx = self.xmlrpc_object.execute(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
'res.users', 'context_get',
|
||||
)
|
||||
self.assertEqual(ctx['lang'], 'en_US')
|
||||
self.assertEqual(ctx['tz'], 'Europe/Brussels')
|
||||
|
||||
def test_xmlrpc_defaultdict_marshalling(self):
|
||||
"""
|
||||
Test that the marshalling of a collections.defaultdict object
|
||||
works properly over XMLRPC
|
||||
"""
|
||||
self.patch(self.registry['res.users'], 'context_get',
|
||||
odoo.api.model(lambda *_: collections.defaultdict(int)))
|
||||
self.assertEqual(self.xmlrpc('res.users', 'context_get'), {})
|
||||
|
||||
def test_xmlrpc_remove_control_characters(self):
|
||||
record = self.env['res.users'].create({
|
||||
'name': 'bob with a control character: \x03',
|
||||
'login': 'bob',
|
||||
})
|
||||
self.assertEqual(record.name, 'bob with a control character: \x03')
|
||||
[record_data] = self.xmlrpc('res.users', 'read', record.id, ['name'])
|
||||
self.assertEqual(record_data['name'], 'bob with a control character: ')
|
||||
|
||||
def test_jsonrpc_read_group(self):
|
||||
self._json_call(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
'res.partner', 'read_group', [], ['is_company', 'color'], ['parent_id']
|
||||
)
|
||||
|
||||
def test_jsonrpc_name_search(self):
|
||||
# well that's some sexy sexy call right there
|
||||
self._json_call(
|
||||
common.get_db_name(),
|
||||
self.admin_uid, 'admin',
|
||||
'res.partner', 'name_search', 'admin'
|
||||
)
|
||||
|
||||
def _json_call(self, *args):
|
||||
self.opener.post("http://%s:%s/jsonrpc" % (common.HOST, odoo.tools.config['http_port']), json={
|
||||
'jsonrpc': '2.0',
|
||||
'id': None,
|
||||
'method': 'call',
|
||||
'params': {
|
||||
'service': 'object',
|
||||
'method': 'execute',
|
||||
'args': args
|
||||
}
|
||||
})
|
||||
|
||||
def test_xmlrpc_attachment_raw(self):
|
||||
ids = self.env['ir.attachment'].create({'name': 'n', 'raw': b'\x01\x09'}).ids
|
||||
[att] = self.xmlrpc_object.execute(
|
||||
common.get_db_name(), self.admin_uid, 'admin',
|
||||
'ir.attachment', 'read', ids, ['raw'])
|
||||
self.assertEqual(att['raw'], '\t',
|
||||
"on read, binary data should be decoded as a string and stripped from control character")
|
||||
|
||||
# really just for the test cursor
|
||||
@common.tagged('post_install', '-at_install')
|
||||
class TestAPIKeys(common.HttpCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls._user = cls.env['res.users'].create({
|
||||
'name': "Bylan",
|
||||
'login': 'byl',
|
||||
'password': 'ananananan',
|
||||
'tz': 'Australia/Eucla',
|
||||
})
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# needs a fake request in order to call methods protected with check_identity
|
||||
fake_req = DotDict({
|
||||
# various things go and access request items
|
||||
'httprequest': DotDict({
|
||||
'environ': {'REMOTE_ADDR': 'localhost'},
|
||||
'cookies': {},
|
||||
}),
|
||||
# bypass check_identity flow
|
||||
'session': {'identity-check-last': time.time()},
|
||||
'geoip': {},
|
||||
})
|
||||
_request_stack.push(fake_req)
|
||||
self.addCleanup(_request_stack.pop)
|
||||
|
||||
def test_trivial(self):
|
||||
uid = auth.dispatch('authenticate', [self.env.cr.dbname, 'byl', 'ananananan', {}])
|
||||
self.assertEqual(uid, self._user.id)
|
||||
|
||||
ctx = model.dispatch('execute_kw', [
|
||||
self.env.cr.dbname, uid, 'ananananan',
|
||||
'res.users', 'context_get', []
|
||||
])
|
||||
self.assertEqual(ctx['tz'], 'Australia/Eucla')
|
||||
|
||||
def test_wrongpw(self):
|
||||
# User.authenticate raises but RPC.authenticate returns False
|
||||
uid = auth.dispatch('authenticate', [self.env.cr.dbname, 'byl', 'aws', {}])
|
||||
self.assertFalse(uid)
|
||||
with self.assertRaises(AccessDenied):
|
||||
model.dispatch('execute_kw', [
|
||||
self.env.cr.dbname, self._user.id, 'aws',
|
||||
'res.users', 'context_get', []
|
||||
])
|
||||
|
||||
def test_key(self):
|
||||
env = self.env(user=self._user)
|
||||
r = env['res.users.apikeys.description'].create({
|
||||
'name': 'a',
|
||||
}).make_key()
|
||||
k = r['context']['default_key']
|
||||
|
||||
uid = auth.dispatch('authenticate', [self.env.cr.dbname, 'byl', 'ananananan', {}])
|
||||
self.assertEqual(uid, self._user.id)
|
||||
|
||||
uid = auth.dispatch('authenticate', [self.env.cr.dbname, 'byl', k, {}])
|
||||
self.assertEqual(uid, self._user.id)
|
||||
|
||||
ctx = model.dispatch('execute_kw', [
|
||||
self.env.cr.dbname, uid, k,
|
||||
'res.users', 'context_get', []
|
||||
])
|
||||
self.assertEqual(ctx['tz'], 'Australia/Eucla')
|
||||
|
||||
def test_delete(self):
|
||||
env = self.env(user=self._user)
|
||||
env['res.users.apikeys.description'].create({'name': 'b',}).make_key()
|
||||
env['res.users.apikeys.description'].create({'name': 'b',}).make_key()
|
||||
env['res.users.apikeys.description'].create({'name': 'b',}).make_key()
|
||||
k0, k1, k2 = env['res.users.apikeys'].search([])
|
||||
|
||||
# user can remove their own keys
|
||||
k0.remove()
|
||||
self.assertFalse(k0.exists())
|
||||
|
||||
# admin can remove user keys
|
||||
k1.with_user(self.env.ref('base.user_admin')).remove ()
|
||||
self.assertFalse(k1.exists())
|
||||
|
||||
# other user can't remove user keys
|
||||
u = self.env['res.users'].create({
|
||||
'name': 'a',
|
||||
'login': 'a',
|
||||
'groups_id': self.env.ref('base.group_user').ids,
|
||||
})
|
||||
with self.assertRaises(AccessError):
|
||||
k2.with_user(u).remove()
|
||||
|
||||
def test_disabled(self):
|
||||
env = self.env(user=self._user)
|
||||
k = env['res.users.apikeys.description'].create({'name': 'b',}).make_key()['context']['default_key']
|
||||
|
||||
self._user.active = False
|
||||
|
||||
with self.assertRaises(AccessDenied):
|
||||
model.dispatch('execute_kw', [
|
||||
self.env.cr.dbname, self._user.id, 'ananananan',
|
||||
'res.users', 'context_get', []
|
||||
])
|
||||
|
||||
with self.assertRaises(AccessDenied):
|
||||
model.dispatch('execute_kw', [
|
||||
self.env.cr.dbname, self._user.id, k,
|
||||
'res.users', 'context_get', []
|
||||
])
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 2 KiB |
Loading…
Add table
Add a link
Reference in a new issue