mirror of
https://github.com/bringout/oca-technical.git
synced 2026-04-19 06:52:09 +02:00
Initial commit: OCA Technical packages (595 packages)
This commit is contained in:
commit
2cc02aac6e
24950 changed files with 2318079 additions and 0 deletions
|
|
@ -0,0 +1,9 @@
|
|||
from . import test_autovacuum
|
||||
from . import test_delayable
|
||||
from . import test_dependencies
|
||||
from . import test_job
|
||||
from . import test_job_auto_delay
|
||||
from . import test_job_channels
|
||||
from . import test_job_function
|
||||
from . import test_related_actions
|
||||
from . import test_delay_mocks
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# Copyright 2016-2019 Camptocamp SA
|
||||
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
from odoo.tests import common
|
||||
|
||||
from odoo.addons.queue_job.job import Job
|
||||
|
||||
|
||||
class JobCommonCase(common.TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.queue_job = cls.env["queue.job"]
|
||||
cls.user = cls.env["res.users"]
|
||||
cls.method = cls.env["test.queue.job"].testing_method
|
||||
|
||||
def _create_job(self):
|
||||
test_job = Job(self.method)
|
||||
test_job.store()
|
||||
stored = Job.db_record_from_uuid(self.env, test_job.uuid)
|
||||
self.assertEqual(len(stored), 1)
|
||||
return stored
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# Copyright 2019 Versada UAB
|
||||
# License LGPL-3 or later (https://www.gnu.org/licenses/lgpl).
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from .common import JobCommonCase
|
||||
|
||||
|
||||
class TestQueueJobAutovacuumCronJob(JobCommonCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.cron_job = cls.env.ref("queue_job.ir_cron_autovacuum_queue_jobs")
|
||||
|
||||
def test_old_jobs_are_deleted_by_cron_job(self):
|
||||
"""Old jobs are deleted by the autovacuum cron job."""
|
||||
date_done = datetime.now() - timedelta(
|
||||
days=self.queue_job._removal_interval + 1
|
||||
)
|
||||
stored = self._create_job()
|
||||
stored.write({"date_done": date_done})
|
||||
self.cron_job.method_direct_trigger()
|
||||
self.assertFalse(stored.exists())
|
||||
|
||||
def test_autovacuum(self):
|
||||
# test default removal interval
|
||||
stored = self._create_job()
|
||||
date_done = datetime.now() - timedelta(days=29)
|
||||
stored.write({"date_done": date_done})
|
||||
self.env["queue.job"].autovacuum()
|
||||
self.assertEqual(len(self.env["queue.job"].search([])), 1)
|
||||
|
||||
date_done = datetime.now() - timedelta(days=31)
|
||||
stored.write({"date_done": date_done})
|
||||
self.env["queue.job"].autovacuum()
|
||||
self.assertEqual(len(self.env["queue.job"].search([])), 0)
|
||||
|
||||
def test_autovacuum_multi_channel(self):
|
||||
root_channel = self.env.ref("queue_job.channel_root")
|
||||
channel_60days = self.env["queue.job.channel"].create(
|
||||
{"name": "60days", "removal_interval": 60, "parent_id": root_channel.id}
|
||||
)
|
||||
date_done = datetime.now() - timedelta(days=31)
|
||||
job_root = self._create_job()
|
||||
job_root.write({"date_done": date_done})
|
||||
job_60days = self._create_job()
|
||||
job_60days.write(
|
||||
{"channel": channel_60days.complete_name, "date_done": date_done}
|
||||
)
|
||||
|
||||
self.assertEqual(len(self.env["queue.job"].search([])), 2)
|
||||
self.env["queue.job"].autovacuum()
|
||||
self.assertEqual(len(self.env["queue.job"].search([])), 1)
|
||||
|
||||
date_done = datetime.now() - timedelta(days=61)
|
||||
job_60days.write({"date_done": date_done})
|
||||
self.env["queue.job"].autovacuum()
|
||||
self.assertEqual(len(self.env["queue.job"].search([])), 0)
|
||||
|
|
@ -0,0 +1,373 @@
|
|||
# Copyright 2021 Guewen Baconnier
|
||||
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
import odoo.tests.common as common
|
||||
from odoo.tools import mute_logger
|
||||
|
||||
from odoo.addons.queue_job.delay import Delayable
|
||||
from odoo.addons.queue_job.job import identity_exact
|
||||
from odoo.addons.queue_job.tests.common import mock_with_delay, trap_jobs
|
||||
|
||||
|
||||
class TestDelayMocks(common.TransactionCase):
|
||||
def test_trap_jobs_on_with_delay_model(self):
|
||||
with trap_jobs() as trap:
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
trap.assert_jobs_count(1, only=self.env["test.queue.job"].testing_method)
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_with_delay_recordset(self):
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
recordset.button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
trap.assert_jobs_count(1, only=recordset.testing_method)
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
recordset.testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_with_delay_recordset_no_properties(self):
|
||||
"""Verify that trap_jobs() can omit properties"""
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
recordset.button_that_uses_with_delay()
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
recordset.testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_with_delay_recordset_partial_properties(self):
|
||||
"""Verify that trap_jobs() can check partially properties"""
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
recordset.button_that_uses_with_delay()
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
recordset.testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
description="Test",
|
||||
eta=15,
|
||||
),
|
||||
)
|
||||
|
||||
def test_trap_with_identity_key(self):
|
||||
with trap_jobs() as trap:
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
trap.assert_jobs_count(1, only=self.env["test.queue.job"].testing_method)
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
|
||||
# Should not enqueue again
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
|
||||
trap.perform_enqueued_jobs()
|
||||
# Should no longer be enqueued
|
||||
trap.assert_jobs_count(0)
|
||||
|
||||
# Can now requeue
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
|
||||
def test_trap_jobs_on_with_delay_assert_model_count_mismatch(self):
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
with self.assertRaises(AssertionError, msg="0 != 1"):
|
||||
trap.assert_jobs_count(1, only=recordset.testing_method)
|
||||
|
||||
def test_trap_jobs_on_with_delay_assert_recordset_count_mismatch(self):
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
recordset.button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
with self.assertRaises(AssertionError, msg="0 != 1"):
|
||||
trap.assert_jobs_count(
|
||||
1, only=self.env["test.queue.job"].testing_method
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_with_delay_assert_model_enqueued_mismatch(self):
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
recordset.button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
message = (
|
||||
r"Job <test\.queue.job\(\)>\.testing_method\(1, foo=2\) with "
|
||||
r"properties \(channel=root\.test, description=Test, eta=15, "
|
||||
"identity_key=<function identity_exact at 0x[0-9a-fA-F]+>, "
|
||||
"max_retries=1, priority=15\\) was not enqueued\\.\n"
|
||||
"Actual enqueued jobs:\n"
|
||||
r" \* <test.queue.job\(%s,\)>.testing_method\(1, foo=2\) with properties "
|
||||
r"\(priority=15, max_retries=1, eta=15, description=Test, channel=root.test, "
|
||||
r"identity_key=<function identity_exact at 0x[0-9a-fA-F]+>\)"
|
||||
) % (recordset.id,)
|
||||
with self.assertRaisesRegex(AssertionError, message):
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_with_delay_assert_recordset_enqueued_mismatch(self):
|
||||
recordset = self.env["test.queue.job"].create({"name": "test"})
|
||||
with trap_jobs() as trap:
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
trap.assert_jobs_count(1)
|
||||
message = (
|
||||
r"Job <test\.queue.job\(%s,\)>\.testing_method\(1, foo=2\) with "
|
||||
r"properties \(channel=root\.test, description=Test, eta=15, "
|
||||
"identity_key=<function identity_exact at 0x[0-9a-fA-F]+>, "
|
||||
"max_retries=1, priority=15\\) was not enqueued\\.\n"
|
||||
"Actual enqueued jobs:\n"
|
||||
r" \* <test.queue.job\(\)>.testing_method\(1, foo=2\) with properties "
|
||||
r"\(priority=15, max_retries=1, eta=15, description=Test, channel=root.test, "
|
||||
r"identity_key=<function identity_exact at 0x[0-9a-fA-F]+>\)"
|
||||
) % (recordset.id,)
|
||||
with self.assertRaisesRegex(AssertionError, message):
|
||||
trap.assert_enqueued_job(
|
||||
recordset.testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
|
||||
def test_trap_jobs_on_graph(self):
|
||||
with trap_jobs() as trap:
|
||||
self.env["test.queue.job"].button_that_uses_delayable_chain()
|
||||
trap.assert_jobs_count(3)
|
||||
trap.assert_jobs_count(2, only=self.env["test.queue.job"].testing_method)
|
||||
trap.assert_jobs_count(1, only=self.env["test.queue.job"].no_description)
|
||||
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].testing_method,
|
||||
args=(1,),
|
||||
kwargs={"foo": 2},
|
||||
properties=dict(
|
||||
channel="root.test",
|
||||
description="Test",
|
||||
eta=15,
|
||||
identity_key=identity_exact,
|
||||
max_retries=1,
|
||||
priority=15,
|
||||
),
|
||||
)
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].testing_method,
|
||||
args=("x",),
|
||||
kwargs={"foo": "y"},
|
||||
)
|
||||
trap.assert_enqueued_job(
|
||||
self.env["test.queue.job"].no_description,
|
||||
)
|
||||
|
||||
trap.perform_enqueued_jobs()
|
||||
|
||||
def test_trap_jobs_perform(self):
|
||||
with trap_jobs() as trap:
|
||||
model = self.env["test.queue.job"]
|
||||
model.with_delay(priority=1).create_ir_logging(
|
||||
"test_trap_jobs_perform single"
|
||||
)
|
||||
node = Delayable(model).create_ir_logging("test_trap_jobs_perform graph 1")
|
||||
node2 = Delayable(model).create_ir_logging("test_trap_jobs_perform graph 2")
|
||||
node3 = Delayable(model).create_ir_logging("test_trap_jobs_perform graph 3")
|
||||
node2.on_done(node3)
|
||||
node3.on_done(node)
|
||||
node2.delay()
|
||||
|
||||
# jobs are not executed
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 0)
|
||||
|
||||
trap.assert_jobs_count(4)
|
||||
|
||||
# perform the jobs
|
||||
trap.perform_enqueued_jobs()
|
||||
|
||||
trap.assert_jobs_count(0)
|
||||
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 4)
|
||||
|
||||
# check if they are executed in order
|
||||
self.assertEqual(logs[0].message, "test_trap_jobs_perform single")
|
||||
self.assertEqual(logs[1].message, "test_trap_jobs_perform graph 2")
|
||||
self.assertEqual(logs[2].message, "test_trap_jobs_perform graph 3")
|
||||
self.assertEqual(logs[3].message, "test_trap_jobs_perform graph 1")
|
||||
|
||||
def test_mock_with_delay(self):
|
||||
with mock_with_delay() as (delayable_cls, delayable):
|
||||
self.env["test.queue.job"].button_that_uses_with_delay()
|
||||
|
||||
self.assertEqual(delayable_cls.call_count, 1)
|
||||
# arguments passed in 'with_delay()'
|
||||
delay_args, delay_kwargs = delayable_cls.call_args
|
||||
self.assertEqual(delay_args, (self.env["test.queue.job"],))
|
||||
self.assertDictEqual(
|
||||
delay_kwargs,
|
||||
{
|
||||
"channel": "root.test",
|
||||
"description": "Test",
|
||||
"eta": 15,
|
||||
"identity_key": identity_exact,
|
||||
"max_retries": 1,
|
||||
"priority": 15,
|
||||
},
|
||||
)
|
||||
|
||||
# check what's passed to the job method 'testing_method'
|
||||
self.assertEqual(delayable.testing_method.call_count, 1)
|
||||
delay_args, delay_kwargs = delayable.testing_method.call_args
|
||||
self.assertEqual(delay_args, (1,))
|
||||
self.assertDictEqual(delay_kwargs, {"foo": 2})
|
||||
|
||||
@mute_logger("odoo.addons.queue_job.utils")
|
||||
@mock.patch.dict(os.environ, {"QUEUE_JOB__NO_DELAY": "1"})
|
||||
def test_delay_graph_direct_exec_env_var(self):
|
||||
node = Delayable(self.env["test.queue.job"]).create_ir_logging(
|
||||
"test_delay_graph_direct_exec 1"
|
||||
)
|
||||
node2 = Delayable(self.env["test.queue.job"]).create_ir_logging(
|
||||
"test_delay_graph_direct_exec 2"
|
||||
)
|
||||
node2.on_done(node)
|
||||
node2.delay()
|
||||
# jobs are executed directly
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 2)
|
||||
# check if they are executed in order
|
||||
self.assertEqual(logs[0].message, "test_delay_graph_direct_exec 2")
|
||||
self.assertEqual(logs[1].message, "test_delay_graph_direct_exec 1")
|
||||
|
||||
@mute_logger("odoo.addons.queue_job.utils")
|
||||
def test_delay_graph_direct_exec_context_key(self):
|
||||
node = Delayable(
|
||||
self.env["test.queue.job"].with_context(queue_job__no_delay=True)
|
||||
).create_ir_logging("test_delay_graph_direct_exec 1")
|
||||
node2 = Delayable(self.env["test.queue.job"]).create_ir_logging(
|
||||
"test_delay_graph_direct_exec 2"
|
||||
)
|
||||
node2.on_done(node)
|
||||
node2.delay()
|
||||
# jobs are executed directly
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 2)
|
||||
# check if they are executed in order
|
||||
self.assertEqual(logs[0].message, "test_delay_graph_direct_exec 2")
|
||||
self.assertEqual(logs[1].message, "test_delay_graph_direct_exec 1")
|
||||
|
||||
@mute_logger("odoo.addons.queue_job.utils")
|
||||
@mock.patch.dict(os.environ, {"QUEUE_JOB__NO_DELAY": "1"})
|
||||
def test_delay_with_delay_direct_exec_env_var(self):
|
||||
model = self.env["test.queue.job"]
|
||||
model.with_delay().create_ir_logging("test_delay_graph_direct_exec 1")
|
||||
# jobs are executed directly
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 1)
|
||||
self.assertEqual(logs[0].message, "test_delay_graph_direct_exec 1")
|
||||
|
||||
@mute_logger("odoo.addons.queue_job.utils")
|
||||
def test_delay_with_delay_direct_exec_context_key(self):
|
||||
model = self.env["test.queue.job"].with_context(queue_job__no_delay=True)
|
||||
model.with_delay().create_ir_logging("test_delay_graph_direct_exec 1")
|
||||
# jobs are executed directly
|
||||
logs = self.env["ir.logging"].search(
|
||||
[
|
||||
("name", "=", "test_queue_job"),
|
||||
("func", "=", "create_ir_logging"),
|
||||
],
|
||||
order="id asc",
|
||||
)
|
||||
self.assertEqual(len(logs), 1)
|
||||
self.assertEqual(logs[0].message, "test_delay_graph_direct_exec 1")
|
||||
|
|
@ -0,0 +1,304 @@
|
|||
# copyright 2019 Camptocamp
|
||||
# Copyright 2019 Guewen Baconnier
|
||||
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import odoo.tests.common as common
|
||||
|
||||
from odoo.addons.queue_job.delay import (
|
||||
Delayable,
|
||||
DelayableChain,
|
||||
DelayableGroup,
|
||||
chain,
|
||||
group,
|
||||
)
|
||||
|
||||
|
||||
class TestDelayable(common.TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.queue_job = cls.env["queue.job"]
|
||||
cls.test_model = cls.env["test.queue.job"]
|
||||
cls.method = cls.env["test.queue.job"].testing_method
|
||||
|
||||
def job_node(self, id_):
|
||||
return Delayable(self.test_model).testing_method(id_)
|
||||
|
||||
def assert_generated_job(self, *nodes):
|
||||
for node in nodes:
|
||||
self.assertTrue(node._generated_job)
|
||||
job = node._generated_job
|
||||
self.assertTrue(job.db_record().id)
|
||||
|
||||
def assert_depends_on(self, delayable, parent_delayables):
|
||||
self.assertEqual(
|
||||
delayable._generated_job._depends_on,
|
||||
{parent._generated_job for parent in parent_delayables},
|
||||
)
|
||||
|
||||
def assert_reverse_depends_on(self, delayable, child_delayables):
|
||||
self.assertEqual(
|
||||
set(delayable._generated_job._reverse_depends_on),
|
||||
{child._generated_job for child in child_delayables},
|
||||
)
|
||||
|
||||
def assert_dependencies(self, nodes):
|
||||
reverse_dependencies = {}
|
||||
for child, parents in nodes.items():
|
||||
self.assert_depends_on(child, parents)
|
||||
for parent in parents:
|
||||
reverse_dependencies.setdefault(parent, set()).add(child)
|
||||
for parent, children in reverse_dependencies.items():
|
||||
self.assert_reverse_depends_on(parent, children)
|
||||
|
||||
def test_delayable_delay_single(self):
|
||||
node = self.job_node(1)
|
||||
node.delay()
|
||||
self.assert_generated_job(node)
|
||||
|
||||
def test_delayable_delay_on_done(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node.on_done(node2).delay()
|
||||
self.assert_generated_job(node, node2)
|
||||
self.assert_dependencies({node: {}, node2: {node}})
|
||||
|
||||
def test_delayable_delay_done_multi(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node.on_done(node2, node3).delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies({node: {}, node2: {node}, node3: {node}})
|
||||
|
||||
def test_delayable_delay_group(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
DelayableGroup(node, node2, node3).delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies({node: {}, node2: {}, node3: {}})
|
||||
|
||||
def test_group_function(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
group(node, node2, node3).delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies({node: {}, node2: {}, node3: {}})
|
||||
|
||||
def test_delayable_delay_job_after_group(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
DelayableGroup(node, node2).on_done(node3).delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies({node: {}, node2: {}, node3: {node, node2}})
|
||||
|
||||
def test_delayable_delay_group_after_group(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
g1 = DelayableGroup(node, node2)
|
||||
g2 = DelayableGroup(node3, node4)
|
||||
g1.on_done(g2).delay()
|
||||
self.assert_generated_job(node, node2, node3, node4)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {},
|
||||
node3: {node, node2},
|
||||
node4: {node, node2},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_delay_implicit_group_after_group(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
g1 = DelayableGroup(node, node2).on_done(node3, node4)
|
||||
g1.delay()
|
||||
self.assert_generated_job(node, node2, node3, node4)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {},
|
||||
node3: {node, node2},
|
||||
node4: {node, node2},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_delay_group_after_group_after_group(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
g1 = DelayableGroup(node)
|
||||
g2 = DelayableGroup(node2)
|
||||
g3 = DelayableGroup(node3)
|
||||
g4 = DelayableGroup(node4)
|
||||
g1.on_done(g2.on_done(g3.on_done(g4))).delay()
|
||||
self.assert_generated_job(node, node2, node3, node4)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node2},
|
||||
node4: {node3},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_diamond(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
g1 = DelayableGroup(node2, node3)
|
||||
g1.on_done(node4)
|
||||
node.on_done(g1)
|
||||
node.delay()
|
||||
self.assert_generated_job(node, node2, node3, node4)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node},
|
||||
node4: {node2, node3},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_chain(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
c1 = DelayableChain(node, node2, node3)
|
||||
c1.delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node2},
|
||||
}
|
||||
)
|
||||
|
||||
def test_chain_function(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
c1 = chain(node, node2, node3)
|
||||
c1.delay()
|
||||
self.assert_generated_job(node, node2, node3)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node2},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_chain_after_job(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
c1 = DelayableChain(node2, node3, node4)
|
||||
node.on_done(c1)
|
||||
node.delay()
|
||||
self.assert_generated_job(node, node2, node3, node4)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node2},
|
||||
node4: {node3},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_chain_after_chain(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
node5 = self.job_node(5)
|
||||
node6 = self.job_node(6)
|
||||
chain1 = DelayableChain(node, node2, node3)
|
||||
chain2 = DelayableChain(node4, node5, node6)
|
||||
chain1.on_done(chain2)
|
||||
chain1.delay()
|
||||
self.assert_generated_job(node, node2, node3, node4, node5, node6)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node2: {node},
|
||||
node3: {node2},
|
||||
node4: {node3},
|
||||
node5: {node4},
|
||||
node6: {node5},
|
||||
}
|
||||
)
|
||||
|
||||
def test_delayable_group_of_chain(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node3 = self.job_node(3)
|
||||
node4 = self.job_node(4)
|
||||
node5 = self.job_node(5)
|
||||
node6 = self.job_node(6)
|
||||
node7 = self.job_node(7)
|
||||
node8 = self.job_node(8)
|
||||
chain1 = DelayableChain(node, node2)
|
||||
chain2 = DelayableChain(node3, node4)
|
||||
chain3 = DelayableChain(node5, node6)
|
||||
chain4 = DelayableChain(node7, node8)
|
||||
g1 = DelayableGroup(chain1, chain2).on_done(chain3, chain4)
|
||||
g1.delay()
|
||||
self.assert_generated_job(
|
||||
node,
|
||||
node2,
|
||||
node3,
|
||||
node4,
|
||||
node5,
|
||||
node6,
|
||||
node7,
|
||||
node8,
|
||||
)
|
||||
self.assert_dependencies(
|
||||
{
|
||||
node: {},
|
||||
node3: {},
|
||||
node2: {node},
|
||||
node4: {node3},
|
||||
node5: {node4, node2},
|
||||
node7: {node4, node2},
|
||||
node6: {node5},
|
||||
node8: {node7},
|
||||
}
|
||||
)
|
||||
|
||||
def test_log_not_delayed(self):
|
||||
logger_name = "odoo.addons.queue_job"
|
||||
with self.assertLogs(logger_name, level="WARN") as test:
|
||||
# When a Delayable never gets a delay() call,
|
||||
# when the GC collects it and calls __del__, a warning
|
||||
# will be displayed. We cannot test this is a scenario
|
||||
# using the GC as it isn't predictable. Call __del__
|
||||
# directly
|
||||
node = self.job_node(1)
|
||||
node.__del__()
|
||||
expected = (
|
||||
"WARNING:odoo.addons.queue_job.delay:Delayable "
|
||||
"Delayable(test.queue.job().testing_method((1,), {}))"
|
||||
" was prepared but never delayed"
|
||||
)
|
||||
self.assertEqual(test.output, [expected])
|
||||
|
||||
def test_delay_job_already_exists(self):
|
||||
node = self.job_node(1)
|
||||
node2 = self.job_node(2)
|
||||
node2.delay()
|
||||
node.on_done(node2).delay()
|
||||
self.assert_generated_job(node, node2)
|
||||
self.assert_dependencies({node: {}, node2: {node}})
|
||||
|
|
@ -0,0 +1,289 @@
|
|||
# Copyright 2019 Camptocamp SA
|
||||
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
|
||||
|
||||
import odoo.tests.common as common
|
||||
|
||||
from odoo.addons.queue_job.delay import DelayableGraph, chain, group
|
||||
from odoo.addons.queue_job.job import PENDING, WAIT_DEPENDENCIES, Job
|
||||
|
||||
|
||||
class TestJobDependencies(common.TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.queue_job = cls.env["queue.job"]
|
||||
cls.method = cls.env["test.queue.job"].testing_method
|
||||
|
||||
def test_depends_store(self):
|
||||
job_root = Job(self.method)
|
||||
job_lvl1_a = Job(self.method)
|
||||
job_lvl1_a.add_depends({job_root})
|
||||
job_lvl1_b = Job(self.method)
|
||||
job_lvl1_b.add_depends({job_root})
|
||||
job_lvl2_a = Job(self.method)
|
||||
job_lvl2_a.add_depends({job_lvl1_a})
|
||||
|
||||
# Jobs must be stored after the dependencies are set up.
|
||||
# (Or if not, a new store must be called on the parent)
|
||||
job_root.store()
|
||||
job_lvl1_a.store()
|
||||
job_lvl1_b.store()
|
||||
job_lvl2_a.store()
|
||||
|
||||
# test properties
|
||||
self.assertFalse(job_root.depends_on)
|
||||
|
||||
self.assertEqual(job_lvl1_a.depends_on, {job_root})
|
||||
self.assertEqual(job_lvl1_b.depends_on, {job_root})
|
||||
|
||||
self.assertEqual(job_lvl2_a.depends_on, {job_lvl1_a})
|
||||
|
||||
self.assertEqual(job_root.reverse_depends_on, {job_lvl1_a, job_lvl1_b})
|
||||
|
||||
self.assertEqual(job_lvl1_a.reverse_depends_on, {job_lvl2_a})
|
||||
self.assertFalse(job_lvl1_b.reverse_depends_on)
|
||||
|
||||
self.assertFalse(job_lvl2_a.reverse_depends_on)
|
||||
|
||||
# test DB state
|
||||
self.assertEqual(job_root.db_record().dependencies["depends_on"], [])
|
||||
self.assertEqual(
|
||||
sorted(job_root.db_record().dependencies["reverse_depends_on"]),
|
||||
sorted([job_lvl1_a.uuid, job_lvl1_b.uuid]),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
job_lvl1_a.db_record().dependencies["depends_on"], [job_root.uuid]
|
||||
)
|
||||
self.assertEqual(
|
||||
job_lvl1_a.db_record().dependencies["reverse_depends_on"], [job_lvl2_a.uuid]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
job_lvl1_b.db_record().dependencies["depends_on"], [job_root.uuid]
|
||||
)
|
||||
self.assertEqual(job_lvl1_b.db_record().dependencies["reverse_depends_on"], [])
|
||||
|
||||
self.assertEqual(
|
||||
job_lvl2_a.db_record().dependencies["depends_on"], [job_lvl1_a.uuid]
|
||||
)
|
||||
self.assertEqual(job_lvl2_a.db_record().dependencies["reverse_depends_on"], [])
|
||||
|
||||
def test_depends_store_after(self):
|
||||
job_root = Job(self.method)
|
||||
job_root.store()
|
||||
job_a = Job(self.method)
|
||||
job_a.add_depends({job_root})
|
||||
job_a.store()
|
||||
|
||||
# as the reverse dependency has been added after the root job has been
|
||||
# stored, it is not reflected in DB
|
||||
self.assertEqual(job_root.db_record().dependencies["reverse_depends_on"], [])
|
||||
|
||||
# a new store will write it
|
||||
job_root.store()
|
||||
self.assertEqual(
|
||||
job_root.db_record().dependencies["reverse_depends_on"], [job_a.uuid]
|
||||
)
|
||||
|
||||
def test_depends_load(self):
|
||||
job_root = Job(self.method)
|
||||
job_a = Job(self.method)
|
||||
job_a.add_depends({job_root})
|
||||
|
||||
job_root.store()
|
||||
job_a.store()
|
||||
|
||||
read_job_root = Job.load(self.env, job_root.uuid)
|
||||
self.assertEqual(read_job_root.reverse_depends_on, {job_a})
|
||||
|
||||
read_job_a = Job.load(self.env, job_a.uuid)
|
||||
self.assertEqual(read_job_a.depends_on, {job_root})
|
||||
|
||||
def test_depends_enqueue_waiting_single(self):
|
||||
job_root = Job(self.method)
|
||||
job_a = Job(self.method)
|
||||
job_a.add_depends({job_root})
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid([job_root, job_a])
|
||||
|
||||
job_root.store()
|
||||
job_a.store()
|
||||
|
||||
self.assertEqual(job_a.state, WAIT_DEPENDENCIES)
|
||||
|
||||
# these are the steps run by RunJobController
|
||||
job_root.perform()
|
||||
job_root.set_done()
|
||||
job_root.store()
|
||||
self.env.flush_all()
|
||||
|
||||
job_root.enqueue_waiting()
|
||||
|
||||
# Will be picked up by the jobrunner.
|
||||
# Warning: as the state has been changed in memory but
|
||||
# not in the job_a instance, here, we re-read it.
|
||||
# In practice, it won't be an issue for the jobrunner.
|
||||
self.assertEqual(Job.load(self.env, job_a.uuid).state, PENDING)
|
||||
|
||||
def test_dependency_graph(self):
|
||||
job_root = Job(self.method)
|
||||
job_lvl1_a = Job(self.method)
|
||||
job_lvl1_a.add_depends({job_root})
|
||||
job_lvl1_b = Job(self.method)
|
||||
job_lvl1_b.add_depends({job_root})
|
||||
job_lvl2_a = Job(self.method)
|
||||
job_lvl2_a.add_depends({job_lvl1_a})
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid(
|
||||
[
|
||||
job_root,
|
||||
job_lvl1_a,
|
||||
job_lvl1_b,
|
||||
job_lvl2_a,
|
||||
]
|
||||
)
|
||||
|
||||
job_2_root = Job(self.method)
|
||||
job_2_child = Job(self.method)
|
||||
job_2_child.add_depends({job_2_root})
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid([job_2_root, job_2_child])
|
||||
|
||||
# Jobs must be stored after the dependencies are set up.
|
||||
# (Or if not, a new store must be called on the parent)
|
||||
job_root.store()
|
||||
job_lvl1_a.store()
|
||||
job_lvl1_b.store()
|
||||
job_lvl2_a.store()
|
||||
|
||||
job_2_root.store()
|
||||
job_2_child.store()
|
||||
|
||||
record_root = job_root.db_record()
|
||||
record_lvl1_a = job_lvl1_a.db_record()
|
||||
record_lvl1_b = job_lvl1_b.db_record()
|
||||
record_lvl2_a = job_lvl2_a.db_record()
|
||||
|
||||
record_2_root = job_2_root.db_record()
|
||||
record_2_child = job_2_child.db_record()
|
||||
|
||||
expected_nodes = [
|
||||
{
|
||||
"id": record_root.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
{
|
||||
"id": record_lvl1_a.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
{
|
||||
"id": record_lvl1_b.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
{
|
||||
"id": record_lvl2_a.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
]
|
||||
expected_edges = sorted(
|
||||
[
|
||||
[record_root.id, record_lvl1_a.id],
|
||||
[record_lvl1_a.id, record_lvl2_a.id],
|
||||
[record_root.id, record_lvl1_b.id],
|
||||
]
|
||||
)
|
||||
|
||||
records = [record_root, record_lvl1_a, record_lvl1_b, record_lvl2_a]
|
||||
|
||||
for record in records:
|
||||
self.assertEqual(
|
||||
sorted(record.dependency_graph["nodes"], key=lambda d: d["id"]),
|
||||
expected_nodes,
|
||||
)
|
||||
self.assertEqual(sorted(record.dependency_graph["edges"]), expected_edges)
|
||||
|
||||
expected_nodes = [
|
||||
{
|
||||
"id": record_2_root.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
{
|
||||
"id": record_2_child.id,
|
||||
"title": "<strong>Method used for tests</strong><br/>"
|
||||
"test.queue.job().testing_method()",
|
||||
"color": "#D2E5FF",
|
||||
"border": "#2B7CE9",
|
||||
"shadow": True,
|
||||
},
|
||||
]
|
||||
expected_edges = sorted([[record_2_root.id, record_2_child.id]])
|
||||
|
||||
for record in [record_2_root, record_2_child]:
|
||||
self.assertEqual(
|
||||
sorted(record.dependency_graph["nodes"], key=lambda d: d["id"]),
|
||||
expected_nodes,
|
||||
)
|
||||
self.assertEqual(sorted(record.dependency_graph["edges"]), expected_edges)
|
||||
|
||||
def test_no_dependency_graph_single_job(self):
|
||||
"""A single job has no graph"""
|
||||
job = self.env["test.queue.job"].with_delay().testing_method()
|
||||
self.assertEqual(job.db_record().dependency_graph, {})
|
||||
self.assertIsNone(job.graph_uuid)
|
||||
|
||||
def test_depends_graph_uuid(self):
|
||||
"""All jobs in a graph share the same graph uuid"""
|
||||
model = self.env["test.queue.job"]
|
||||
delayable1 = model.delayable().testing_method(1)
|
||||
delayable2 = model.delayable().testing_method(2)
|
||||
delayable3 = model.delayable().testing_method(3)
|
||||
delayable4 = model.delayable().testing_method(4)
|
||||
group1 = group(delayable1, delayable2)
|
||||
group2 = group(delayable3, delayable4)
|
||||
chain_root = chain(group1, group2)
|
||||
chain_root.delay()
|
||||
|
||||
jobs = [
|
||||
delayable._generated_job
|
||||
for delayable in [delayable1, delayable2, delayable3, delayable4]
|
||||
]
|
||||
|
||||
self.assertTrue(jobs[0].graph_uuid)
|
||||
self.assertEqual(len({j.graph_uuid for j in jobs}), 1)
|
||||
for job in jobs:
|
||||
self.assertEqual(job.graph_uuid, job.db_record().graph_uuid)
|
||||
|
||||
def test_depends_graph_uuid_group(self):
|
||||
"""All jobs in a group share the same graph uuid"""
|
||||
g = group(
|
||||
self.env["test.queue.job"].delayable().testing_method(),
|
||||
self.env["test.queue.job"].delayable().testing_method(),
|
||||
)
|
||||
g.delay()
|
||||
|
||||
jobs = [delayable._generated_job for delayable in g._delayables]
|
||||
|
||||
self.assertTrue(jobs[0].graph_uuid)
|
||||
self.assertTrue(jobs[1].graph_uuid)
|
||||
self.assertEqual(jobs[0].graph_uuid, jobs[1].graph_uuid)
|
||||
|
|
@ -0,0 +1,796 @@
|
|||
# Copyright 2016 Camptocamp SA
|
||||
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta
|
||||
from unittest import mock
|
||||
|
||||
import odoo.tests.common as common
|
||||
|
||||
from odoo.addons.queue_job import identity_exact
|
||||
from odoo.addons.queue_job.delay import DelayableGraph
|
||||
from odoo.addons.queue_job.exception import (
|
||||
FailedJobError,
|
||||
NoSuchJobError,
|
||||
RetryableJobError,
|
||||
)
|
||||
from odoo.addons.queue_job.job import (
|
||||
CANCELLED,
|
||||
DONE,
|
||||
ENQUEUED,
|
||||
FAILED,
|
||||
PENDING,
|
||||
RETRY_INTERVAL,
|
||||
STARTED,
|
||||
WAIT_DEPENDENCIES,
|
||||
Job,
|
||||
)
|
||||
|
||||
from .common import JobCommonCase
|
||||
|
||||
|
||||
class TestJobsOnTestingMethod(JobCommonCase):
|
||||
"""Test Job"""
|
||||
|
||||
def test_new_job(self):
|
||||
"""
|
||||
Create a job
|
||||
"""
|
||||
test_job = Job(self.method)
|
||||
self.assertEqual(test_job.func.__func__, self.method.__func__)
|
||||
|
||||
def test_eta(self):
|
||||
"""When an `eta` is datetime, it uses it"""
|
||||
now = datetime.now()
|
||||
method = self.env["res.users"].mapped
|
||||
job_a = Job(method, eta=now)
|
||||
self.assertEqual(job_a.eta, now)
|
||||
|
||||
def test_eta_integer(self):
|
||||
"""When an `eta` is an integer, it adds n seconds up to now"""
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
job_a = Job(self.method, eta=60)
|
||||
self.assertEqual(job_a.eta, datetime(2015, 3, 15, 16, 42, 0))
|
||||
|
||||
def test_eta_timedelta(self):
|
||||
"""When an `eta` is a timedelta, it adds it up to now"""
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
delta = timedelta(hours=3)
|
||||
job_a = Job(self.method, eta=delta)
|
||||
self.assertEqual(job_a.eta, datetime(2015, 3, 15, 19, 41, 0))
|
||||
|
||||
def test_perform_args(self):
|
||||
test_job = Job(self.method, args=("o", "k"), kwargs={"c": "!"})
|
||||
result = test_job.perform()
|
||||
self.assertEqual(result, (("o", "k"), {"c": "!"}))
|
||||
|
||||
def test_retryable_error(self):
|
||||
test_job = Job(self.method, kwargs={"raise_retry": True}, max_retries=3)
|
||||
self.assertEqual(test_job.retry, 0)
|
||||
with self.assertRaises(RetryableJobError):
|
||||
test_job.perform()
|
||||
self.assertEqual(test_job.retry, 1)
|
||||
with self.assertRaises(RetryableJobError):
|
||||
test_job.perform()
|
||||
self.assertEqual(test_job.retry, 2)
|
||||
with self.assertRaises(FailedJobError):
|
||||
test_job.perform()
|
||||
self.assertEqual(test_job.retry, 3)
|
||||
|
||||
def test_infinite_retryable_error(self):
|
||||
test_job = Job(self.method, kwargs={"raise_retry": True}, max_retries=0)
|
||||
self.assertEqual(test_job.retry, 0)
|
||||
with self.assertRaises(RetryableJobError):
|
||||
test_job.perform()
|
||||
self.assertEqual(test_job.retry, 1)
|
||||
|
||||
def test_on_instance_method(self):
|
||||
class A:
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
Job(A.method)
|
||||
|
||||
def test_on_model_method(self):
|
||||
job_ = Job(self.env["test.queue.job"].testing_method)
|
||||
self.assertEqual(job_.model_name, "test.queue.job")
|
||||
self.assertEqual(job_.method_name, "testing_method")
|
||||
|
||||
def test_invalid_function(self):
|
||||
with self.assertRaises(TypeError):
|
||||
Job(1)
|
||||
|
||||
def test_set_pending(self):
|
||||
job_a = Job(self.method)
|
||||
job_a.set_pending(result="test")
|
||||
self.assertEqual(job_a.state, PENDING)
|
||||
self.assertFalse(job_a.date_enqueued)
|
||||
self.assertFalse(job_a.date_started)
|
||||
self.assertEqual(job_a.retry, 0)
|
||||
self.assertEqual(job_a.result, "test")
|
||||
|
||||
def test_set_enqueued(self):
|
||||
job_a = Job(self.method)
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
job_a.set_enqueued()
|
||||
|
||||
self.assertEqual(job_a.state, ENQUEUED)
|
||||
self.assertEqual(job_a.date_enqueued, datetime(2015, 3, 15, 16, 41, 0))
|
||||
self.assertFalse(job_a.date_started)
|
||||
|
||||
def test_set_started(self):
|
||||
job_a = Job(self.method)
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
job_a.set_started()
|
||||
|
||||
self.assertEqual(job_a.state, STARTED)
|
||||
self.assertEqual(job_a.date_started, datetime(2015, 3, 15, 16, 41, 0))
|
||||
|
||||
def test_worker_pid(self):
|
||||
"""When a job is started, it gets the PID of the worker that starts it"""
|
||||
method = self.env["res.users"].mapped
|
||||
job_a = Job(method)
|
||||
self.assertFalse(job_a.worker_pid)
|
||||
with mock.patch("os.getpid", autospec=True) as mock_getpid:
|
||||
mock_getpid.return_value = 99999
|
||||
job_a.set_started()
|
||||
self.assertEqual(job_a.worker_pid, 99999)
|
||||
|
||||
# reset the pid
|
||||
job_a.set_pending()
|
||||
self.assertFalse(job_a.worker_pid)
|
||||
|
||||
def test_set_done(self):
|
||||
job_a = Job(self.method)
|
||||
job_a.date_started = datetime(2015, 3, 15, 16, 40, 0)
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
job_a.set_done(result="test")
|
||||
|
||||
self.assertEqual(job_a.state, DONE)
|
||||
self.assertEqual(job_a.result, "test")
|
||||
self.assertEqual(job_a.date_done, datetime(2015, 3, 15, 16, 41, 0))
|
||||
self.assertEqual(job_a.exec_time, 60.0)
|
||||
self.assertFalse(job_a.exc_info)
|
||||
|
||||
def test_set_failed(self):
|
||||
job_a = Job(self.method)
|
||||
job_a.set_failed(
|
||||
exc_info="failed test",
|
||||
exc_name="FailedTest",
|
||||
exc_message="Sadly this job failed",
|
||||
)
|
||||
self.assertEqual(job_a.state, FAILED)
|
||||
self.assertEqual(job_a.exc_info, "failed test")
|
||||
self.assertEqual(job_a.exc_name, "FailedTest")
|
||||
self.assertEqual(job_a.exc_message, "Sadly this job failed")
|
||||
|
||||
def test_postpone(self):
|
||||
job_a = Job(self.method)
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0)
|
||||
job_a.postpone(result="test", seconds=60)
|
||||
|
||||
self.assertEqual(job_a.eta, datetime(2015, 3, 15, 16, 42, 0))
|
||||
self.assertEqual(job_a.result, "test")
|
||||
self.assertFalse(job_a.exc_info)
|
||||
|
||||
def test_company_simple(self):
|
||||
company = self.env.ref("base.main_company")
|
||||
eta = datetime.now() + timedelta(hours=5)
|
||||
test_job = Job(
|
||||
self.env["test.queue.job"].with_company(company).testing_method,
|
||||
args=("o", "k"),
|
||||
kwargs={"return_context": 1},
|
||||
priority=15,
|
||||
eta=eta,
|
||||
description="My description",
|
||||
)
|
||||
test_job.worker_pid = 99999 # normally set on "set_start"
|
||||
test_job.store()
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(test_job.func.__func__, job_read.func.__func__)
|
||||
result_ctx = job_read.func(*tuple(test_job.args), **test_job.kwargs)
|
||||
self.assertEqual(result_ctx.get("allowed_company_ids"), company.ids)
|
||||
|
||||
def test_company_complex(self):
|
||||
company1 = self.env.ref("base.main_company")
|
||||
company2 = company1.create({"name": "Queue job company"})
|
||||
companies = company1 | company2
|
||||
self.env.user.write({"company_ids": [(6, False, companies.ids)]})
|
||||
# Ensure the main company still the first
|
||||
self.assertEqual(self.env.user.company_id, company1)
|
||||
eta = datetime.now() + timedelta(hours=5)
|
||||
test_job = Job(
|
||||
self.env["test.queue.job"].with_company(company2).testing_method,
|
||||
args=("o", "k"),
|
||||
kwargs={"return_context": 1},
|
||||
priority=15,
|
||||
eta=eta,
|
||||
description="My description",
|
||||
)
|
||||
test_job.worker_pid = 99999 # normally set on "set_start"
|
||||
test_job.store()
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(test_job.func.__func__, job_read.func.__func__)
|
||||
result_ctx = job_read.func(*tuple(test_job.args), **test_job.kwargs)
|
||||
self.assertEqual(result_ctx.get("allowed_company_ids"), company2.ids)
|
||||
|
||||
def test_store(self):
|
||||
test_job = Job(self.method)
|
||||
test_job.store()
|
||||
stored = self.queue_job.search([("uuid", "=", test_job.uuid)])
|
||||
self.assertEqual(len(stored), 1)
|
||||
|
||||
def test_store_extra_data(self):
|
||||
test_job = Job(self.method)
|
||||
test_job.store()
|
||||
stored = self.queue_job.search([("uuid", "=", test_job.uuid)])
|
||||
self.assertEqual(stored.additional_info, "JUST_TESTING")
|
||||
test_job.set_failed(exc_info="failed test", exc_name="FailedTest")
|
||||
test_job.store()
|
||||
stored.invalidate_recordset()
|
||||
self.assertEqual(stored.additional_info, "JUST_TESTING_BUT_FAILED")
|
||||
|
||||
def test_read(self):
|
||||
eta = datetime.now() + timedelta(hours=5)
|
||||
test_job = Job(
|
||||
self.method,
|
||||
args=("o", "k"),
|
||||
kwargs={"c": "!"},
|
||||
priority=15,
|
||||
eta=eta,
|
||||
description="My description",
|
||||
)
|
||||
test_job.worker_pid = 99999 # normally set on "set_start"
|
||||
test_job.company_id = self.env.ref("base.main_company").id
|
||||
test_job.store()
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(test_job.uuid, job_read.uuid)
|
||||
self.assertEqual(test_job.model_name, job_read.model_name)
|
||||
self.assertEqual(test_job.func.__func__, job_read.func.__func__)
|
||||
self.assertEqual(test_job.args, job_read.args)
|
||||
self.assertEqual(test_job.kwargs, job_read.kwargs)
|
||||
self.assertEqual(test_job.method_name, job_read.method_name)
|
||||
self.assertEqual(test_job.description, job_read.description)
|
||||
self.assertEqual(test_job.state, job_read.state)
|
||||
self.assertEqual(test_job.priority, job_read.priority)
|
||||
self.assertEqual(test_job.exc_info, job_read.exc_info)
|
||||
self.assertEqual(test_job.result, job_read.result)
|
||||
self.assertEqual(test_job.user_id, job_read.user_id)
|
||||
self.assertEqual(test_job.company_id, job_read.company_id)
|
||||
self.assertEqual(test_job.worker_pid, 99999)
|
||||
delta = timedelta(seconds=1) # DB does not keep milliseconds
|
||||
self.assertAlmostEqual(
|
||||
test_job.date_created, job_read.date_created, delta=delta
|
||||
)
|
||||
self.assertAlmostEqual(
|
||||
test_job.date_started, job_read.date_started, delta=delta
|
||||
)
|
||||
self.assertAlmostEqual(
|
||||
test_job.date_enqueued, job_read.date_enqueued, delta=delta
|
||||
)
|
||||
self.assertAlmostEqual(test_job.date_done, job_read.date_done, delta=delta)
|
||||
self.assertAlmostEqual(test_job.eta, job_read.eta, delta=delta)
|
||||
|
||||
test_date = datetime(2015, 3, 15, 21, 7, 0)
|
||||
job_read.date_enqueued = test_date
|
||||
job_read.date_started = test_date
|
||||
job_read.date_done = test_date
|
||||
job_read.store()
|
||||
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertAlmostEqual(job_read.date_started, test_date, delta=delta)
|
||||
self.assertAlmostEqual(job_read.date_enqueued, test_date, delta=delta)
|
||||
self.assertAlmostEqual(job_read.date_done, test_date, delta=delta)
|
||||
self.assertAlmostEqual(job_read.exec_time, 0.0)
|
||||
|
||||
def test_job_unlinked(self):
|
||||
test_job = Job(self.method, args=("o", "k"), kwargs={"c": "!"})
|
||||
test_job.store()
|
||||
stored = self.queue_job.search([("uuid", "=", test_job.uuid)])
|
||||
stored.unlink()
|
||||
with self.assertRaises(NoSuchJobError):
|
||||
Job.load(self.env, test_job.uuid)
|
||||
|
||||
def test_unicode(self):
|
||||
test_job = Job(
|
||||
self.method,
|
||||
args=("öô¿‽", "ñě"),
|
||||
kwargs={"c": "ßø"},
|
||||
priority=15,
|
||||
description="My dé^Wdescription",
|
||||
)
|
||||
test_job.store()
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(test_job.args, job_read.args)
|
||||
self.assertEqual(job_read.args, ("öô¿‽", "ñě"))
|
||||
self.assertEqual(test_job.kwargs, job_read.kwargs)
|
||||
self.assertEqual(job_read.kwargs, {"c": "ßø"})
|
||||
self.assertEqual(test_job.description, job_read.description)
|
||||
self.assertEqual(job_read.description, "My dé^Wdescription")
|
||||
|
||||
def test_accented_bytestring(self):
|
||||
test_job = Job(
|
||||
self.method,
|
||||
args=("öô¿‽", "ñě"),
|
||||
kwargs={"c": "ßø"},
|
||||
priority=15,
|
||||
description="My dé^Wdescription",
|
||||
)
|
||||
test_job.store()
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(job_read.args, ("öô¿‽", "ñě"))
|
||||
self.assertEqual(job_read.kwargs, {"c": "ßø"})
|
||||
self.assertEqual(job_read.description, "My dé^Wdescription")
|
||||
|
||||
def test_job_delay(self):
|
||||
self.cr.execute("delete from queue_job")
|
||||
job_ = self.env["test.queue.job"].with_delay().testing_method()
|
||||
stored = self.queue_job.search([])
|
||||
self.assertEqual(len(stored), 1)
|
||||
self.assertEqual(stored.uuid, job_.uuid, "Incorrect returned Job UUID")
|
||||
|
||||
def test_job_delay_model_method(self):
|
||||
self.cr.execute("delete from queue_job")
|
||||
delayable = self.env["test.queue.job"].with_delay()
|
||||
job_instance = delayable.testing_method("a", k=1)
|
||||
self.assertTrue(job_instance)
|
||||
result = job_instance.perform()
|
||||
self.assertEqual(result, (("a",), {"k": 1}))
|
||||
|
||||
def test_job_identity_key_str(self):
|
||||
id_key = "e294e8444453b09d59bdb6efbfec1323"
|
||||
test_job_1 = Job(
|
||||
self.method,
|
||||
priority=15,
|
||||
description="Test I am the first one",
|
||||
identity_key=id_key,
|
||||
)
|
||||
test_job_1.store()
|
||||
job1 = Job.load(self.env, test_job_1.uuid)
|
||||
self.assertEqual(job1.identity_key, id_key)
|
||||
|
||||
def test_job_identity_key_func_exact(self):
|
||||
hasher = hashlib.sha1()
|
||||
hasher.update(b"test.queue.job")
|
||||
hasher.update(b"testing_method")
|
||||
hasher.update(str(sorted([])).encode("utf-8"))
|
||||
hasher.update(str((1, "foo")).encode("utf-8"))
|
||||
hasher.update(str(sorted({"bar": "baz"}.items())).encode("utf-8"))
|
||||
expected_key = hasher.hexdigest()
|
||||
|
||||
test_job_1 = Job(
|
||||
self.method,
|
||||
args=[1, "foo"],
|
||||
kwargs={"bar": "baz"},
|
||||
identity_key=identity_exact,
|
||||
)
|
||||
self.assertEqual(test_job_1.identity_key, expected_key)
|
||||
test_job_1.store()
|
||||
|
||||
job1 = Job.load(self.env, test_job_1.uuid)
|
||||
self.assertEqual(job1.identity_key, expected_key)
|
||||
|
||||
|
||||
class TestJobs(JobCommonCase):
|
||||
"""Test jobs on other methods or with different job configuration"""
|
||||
|
||||
def test_description(self):
|
||||
"""If no description is given to the job, it
|
||||
should be computed from the function
|
||||
"""
|
||||
# if a docstring is defined for the function
|
||||
# it's used as description
|
||||
job_a = Job(self.env["test.queue.job"].testing_method)
|
||||
self.assertEqual(job_a.description, "Method used for tests")
|
||||
# if no docstring, the description is computed
|
||||
job_b = Job(self.env["test.queue.job"].no_description)
|
||||
self.assertEqual(job_b.description, "test.queue.job.no_description")
|
||||
# case when we explicitly specify the description
|
||||
description = "My description"
|
||||
job_a = Job(self.env["test.queue.job"].testing_method, description=description)
|
||||
self.assertEqual(job_a.description, description)
|
||||
|
||||
def test_retry_pattern(self):
|
||||
"""When we specify a retry pattern, the eta must follow it"""
|
||||
datetime_path = "odoo.addons.queue_job.job.datetime"
|
||||
method = self.env["test.queue.job"].job_with_retry_pattern
|
||||
with mock.patch(datetime_path, autospec=True) as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2015, 6, 1, 15, 10, 0)
|
||||
test_job = Job(method, max_retries=0)
|
||||
test_job.retry += 1
|
||||
test_job.postpone(self.env)
|
||||
self.assertEqual(test_job.retry, 1)
|
||||
self.assertEqual(test_job.eta, datetime(2015, 6, 1, 15, 11, 0))
|
||||
test_job.retry += 1
|
||||
test_job.postpone(self.env)
|
||||
self.assertEqual(test_job.retry, 2)
|
||||
self.assertEqual(test_job.eta, datetime(2015, 6, 1, 15, 13, 0))
|
||||
test_job.retry += 1
|
||||
test_job.postpone(self.env)
|
||||
self.assertEqual(test_job.retry, 3)
|
||||
self.assertEqual(test_job.eta, datetime(2015, 6, 1, 15, 10, 10))
|
||||
test_job.retry += 1
|
||||
test_job.postpone(self.env)
|
||||
self.assertEqual(test_job.retry, 4)
|
||||
self.assertEqual(test_job.eta, datetime(2015, 6, 1, 15, 10, 10))
|
||||
test_job.retry += 1
|
||||
test_job.postpone(self.env)
|
||||
self.assertEqual(test_job.retry, 5)
|
||||
self.assertEqual(test_job.eta, datetime(2015, 6, 1, 15, 15, 0))
|
||||
|
||||
def test_retry_pattern_no_zero(self):
|
||||
"""When we specify a retry pattern without 0, uses RETRY_INTERVAL"""
|
||||
method = self.env["test.queue.job"].job_with_retry_pattern__no_zero
|
||||
test_job = Job(method, max_retries=0)
|
||||
test_job.retry += 1
|
||||
self.assertEqual(test_job.retry, 1)
|
||||
self.assertEqual(test_job._get_retry_seconds(), RETRY_INTERVAL)
|
||||
test_job.retry += 1
|
||||
self.assertEqual(test_job.retry, 2)
|
||||
self.assertEqual(test_job._get_retry_seconds(), RETRY_INTERVAL)
|
||||
test_job.retry += 1
|
||||
self.assertEqual(test_job.retry, 3)
|
||||
self.assertEqual(test_job._get_retry_seconds(), 180)
|
||||
test_job.retry += 1
|
||||
self.assertEqual(test_job.retry, 4)
|
||||
self.assertEqual(test_job._get_retry_seconds(), 180)
|
||||
|
||||
def test_job_delay_model_method_multi(self):
|
||||
rec1 = self.env["test.queue.job"].create({"name": "test1"})
|
||||
rec2 = self.env["test.queue.job"].create({"name": "test2"})
|
||||
recs = rec1 + rec2
|
||||
job_instance = recs.with_delay().mapped("name")
|
||||
self.assertTrue(job_instance)
|
||||
self.assertEqual(job_instance.args, ("name",))
|
||||
self.assertEqual(job_instance.recordset, recs)
|
||||
self.assertEqual(job_instance.model_name, "test.queue.job")
|
||||
self.assertEqual(job_instance.method_name, "mapped")
|
||||
self.assertEqual(["test1", "test2"], job_instance.perform())
|
||||
|
||||
def test_job_identity_key_no_duplicate(self):
|
||||
"""If a job with same identity key in queue do not add a new one"""
|
||||
id_key = "e294e8444453b09d59bdb6efbfec1323"
|
||||
rec1 = self.env["test.queue.job"].create({"name": "test1"})
|
||||
job_1 = rec1.with_delay(identity_key=id_key).mapped("name")
|
||||
|
||||
self.assertTrue(job_1)
|
||||
job_2 = rec1.with_delay(identity_key=id_key).mapped("name")
|
||||
self.assertEqual(job_2.uuid, job_1.uuid)
|
||||
|
||||
def test_job_with_mutable_arguments(self):
|
||||
"""Job with mutable arguments do not mutate on perform()"""
|
||||
delayable = self.env["test.queue.job"].with_delay()
|
||||
job_instance = delayable.job_alter_mutable([1], mutable_kwarg={"a": 1})
|
||||
self.assertTrue(job_instance)
|
||||
result = job_instance.perform()
|
||||
self.assertEqual(result, ([1, 2], {"a": 1, "b": 2}))
|
||||
job_instance.set_done()
|
||||
# at this point, the 'args' and 'kwargs' of the job instance
|
||||
# might have been modified, but they must never be modified in
|
||||
# the queue_job table after their creation, so a new 'load' will
|
||||
# get the initial values.
|
||||
job_instance.store()
|
||||
# jobs are always loaded before being performed, so we simulate
|
||||
# this behavior here to check if we have the correct initial arguments
|
||||
job_instance = Job.load(self.env, job_instance.uuid)
|
||||
self.assertEqual(([1],), job_instance.args)
|
||||
self.assertEqual({"mutable_kwarg": {"a": 1}}, job_instance.kwargs)
|
||||
|
||||
def test_store_env_su_no_sudo(self):
|
||||
demo_user = self.env.ref("base.user_demo")
|
||||
self.env = self.env(user=demo_user)
|
||||
delayable = self.env["test.queue.job"].with_delay()
|
||||
test_job = delayable.testing_method()
|
||||
stored = test_job.db_record()
|
||||
job_instance = Job.load(self.env, stored.uuid)
|
||||
self.assertFalse(job_instance.recordset.env.su)
|
||||
self.assertTrue(job_instance.user_id, demo_user)
|
||||
|
||||
def test_store_env_su_sudo(self):
|
||||
demo_user = self.env.ref("base.user_demo")
|
||||
self.env = self.env(user=demo_user)
|
||||
delayable = self.env["test.queue.job"].sudo().with_delay()
|
||||
test_job = delayable.testing_method()
|
||||
stored = test_job.db_record()
|
||||
job_instance = Job.load(self.env, stored.uuid)
|
||||
self.assertTrue(job_instance.recordset.env.su)
|
||||
self.assertTrue(job_instance.user_id, demo_user)
|
||||
|
||||
|
||||
class TestJobModel(JobCommonCase):
|
||||
def test_job_change_state(self):
|
||||
stored = self._create_job()
|
||||
stored._change_job_state(DONE, result="test")
|
||||
self.assertEqual(stored.state, DONE)
|
||||
self.assertEqual(stored.result, "test")
|
||||
stored._change_job_state(PENDING, result="test2")
|
||||
self.assertEqual(stored.state, PENDING)
|
||||
self.assertEqual(stored.result, "test2")
|
||||
with self.assertRaises(ValueError):
|
||||
# only PENDING and DONE supported
|
||||
stored._change_job_state(STARTED)
|
||||
|
||||
def test_button_done(self):
|
||||
stored = self._create_job()
|
||||
stored.button_done()
|
||||
self.assertEqual(stored.state, DONE)
|
||||
self.assertEqual(
|
||||
stored.result, "Manually set to done by %s" % self.env.user.name
|
||||
)
|
||||
|
||||
def test_button_done_enqueue_waiting_dependencies(self):
|
||||
job_root = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child.add_depends({job_root})
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid([job_root, job_child])
|
||||
job_root.store()
|
||||
job_child.store()
|
||||
|
||||
self.assertEqual(job_child.state, WAIT_DEPENDENCIES)
|
||||
record_root = job_root.db_record()
|
||||
record_child = job_child.db_record()
|
||||
# Trigger button done
|
||||
record_root.button_done()
|
||||
# Check the state
|
||||
self.assertEqual(record_root.state, DONE)
|
||||
self.assertEqual(record_child.state, PENDING)
|
||||
|
||||
def test_button_cancel_dependencies(self):
|
||||
job_root = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child.add_depends({job_root})
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid([job_root, job_child])
|
||||
job_root.store()
|
||||
job_child.store()
|
||||
|
||||
self.assertEqual(job_child.state, WAIT_DEPENDENCIES)
|
||||
record_root = job_root.db_record()
|
||||
record_child = job_child.db_record()
|
||||
# Trigger button cancelled
|
||||
record_root.button_cancelled()
|
||||
# Check the state
|
||||
self.assertEqual(record_root.state, CANCELLED)
|
||||
self.assertEqual(record_child.state, CANCELLED)
|
||||
|
||||
def test_requeue(self):
|
||||
stored = self._create_job()
|
||||
stored.write({"state": "failed"})
|
||||
stored.requeue()
|
||||
self.assertEqual(stored.state, PENDING)
|
||||
|
||||
def test_requeue_wait_dependencies_not_touched(self):
|
||||
job_root = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child = Job(self.env["test.queue.job"].testing_method)
|
||||
job_child.add_depends({job_root})
|
||||
job_root.store()
|
||||
job_child.store()
|
||||
|
||||
DelayableGraph._ensure_same_graph_uuid([job_root, job_child])
|
||||
|
||||
record_root = job_root.db_record()
|
||||
record_child = job_child.db_record()
|
||||
self.assertEqual(record_root.state, PENDING)
|
||||
self.assertEqual(record_child.state, WAIT_DEPENDENCIES)
|
||||
record_root.write({"state": "failed"})
|
||||
|
||||
(record_root + record_child).requeue()
|
||||
self.assertEqual(record_root.state, PENDING)
|
||||
self.assertEqual(record_child.state, WAIT_DEPENDENCIES)
|
||||
|
||||
def test_message_when_write_fail(self):
|
||||
stored = self._create_job()
|
||||
stored.write({"state": "failed"})
|
||||
self.assertEqual(stored.state, FAILED)
|
||||
messages = stored.message_ids
|
||||
self.assertEqual(len(messages), 1)
|
||||
|
||||
def test_follower_when_write_fail(self):
|
||||
"""Check that inactive users doesn't are not followers even if
|
||||
they are linked to an active partner"""
|
||||
group = self.env.ref("queue_job.group_queue_job_manager")
|
||||
vals = {
|
||||
"name": "xx",
|
||||
"login": "xx",
|
||||
"groups_id": [(6, 0, [group.id])],
|
||||
"active": False,
|
||||
}
|
||||
inactiveusr = self.user.create(vals)
|
||||
inactiveusr.partner_id.active = True
|
||||
self.assertFalse(inactiveusr in group.users)
|
||||
stored = self._create_job()
|
||||
stored.write({"state": "failed"})
|
||||
followers = stored.message_follower_ids.mapped("partner_id")
|
||||
self.assertFalse(inactiveusr.partner_id in followers)
|
||||
self.assertFalse({u.partner_id for u in group.users} - set(followers))
|
||||
|
||||
def test_wizard_requeue(self):
|
||||
stored = self._create_job()
|
||||
stored.write({"state": "failed"})
|
||||
model = self.env["queue.requeue.job"]
|
||||
model = model.with_context(active_model="queue.job", active_ids=stored.ids)
|
||||
model.create({}).requeue()
|
||||
self.assertEqual(stored.state, PENDING)
|
||||
|
||||
def test_context_uuid(self):
|
||||
delayable = self.env["test.queue.job"].with_delay()
|
||||
test_job = delayable.testing_method(return_context=True)
|
||||
result = test_job.perform()
|
||||
key_present = "job_uuid" in result
|
||||
self.assertTrue(key_present)
|
||||
self.assertEqual(result["job_uuid"], test_job._uuid)
|
||||
|
||||
def test_override_channel(self):
|
||||
delayable = self.env["test.queue.job"].with_delay(channel="root.sub.sub")
|
||||
test_job = delayable.testing_method(return_context=True)
|
||||
self.assertEqual("root.sub.sub", test_job.channel)
|
||||
|
||||
def test_job_change_user_id(self):
|
||||
demo_user = self.env.ref("base.user_demo")
|
||||
stored = self._create_job()
|
||||
stored.user_id = demo_user
|
||||
self.assertEqual(stored.records.env.uid, demo_user.id)
|
||||
|
||||
|
||||
class TestJobStorageMultiCompany(common.TransactionCase):
|
||||
"""Test storage of jobs"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.queue_job = self.env["queue.job"]
|
||||
grp_queue_job_manager = self.ref("queue_job.group_queue_job_manager")
|
||||
User = self.env["res.users"]
|
||||
Company = self.env["res.company"]
|
||||
Partner = self.env["res.partner"]
|
||||
|
||||
main_company = self.env.ref("base.main_company")
|
||||
|
||||
self.partner_user = Partner.create(
|
||||
{"name": "Simple User", "email": "simple.user@example.com"}
|
||||
)
|
||||
self.simple_user = User.create(
|
||||
{
|
||||
"partner_id": self.partner_user.id,
|
||||
"company_ids": [(4, main_company.id)],
|
||||
"login": "simple_user",
|
||||
"name": "simple user",
|
||||
"groups_id": [],
|
||||
}
|
||||
)
|
||||
|
||||
self.other_partner_a = Partner.create(
|
||||
{"name": "My Company a", "is_company": True, "email": "test@tes.ttest"}
|
||||
)
|
||||
self.other_company_a = Company.create(
|
||||
{
|
||||
"name": "My Company a",
|
||||
"partner_id": self.other_partner_a.id,
|
||||
"currency_id": self.ref("base.EUR"),
|
||||
}
|
||||
)
|
||||
self.other_user_a = User.create(
|
||||
{
|
||||
"partner_id": self.other_partner_a.id,
|
||||
"company_id": self.other_company_a.id,
|
||||
"company_ids": [(4, self.other_company_a.id)],
|
||||
"login": "my_login a",
|
||||
"name": "my user A",
|
||||
"groups_id": [(4, grp_queue_job_manager)],
|
||||
}
|
||||
)
|
||||
self.other_partner_b = Partner.create(
|
||||
{"name": "My Company b", "is_company": True, "email": "test@tes.ttest"}
|
||||
)
|
||||
self.other_company_b = Company.create(
|
||||
{
|
||||
"name": "My Company b",
|
||||
"partner_id": self.other_partner_b.id,
|
||||
"currency_id": self.ref("base.EUR"),
|
||||
}
|
||||
)
|
||||
self.other_user_b = User.create(
|
||||
{
|
||||
"partner_id": self.other_partner_b.id,
|
||||
"company_id": self.other_company_b.id,
|
||||
"company_ids": [(4, self.other_company_b.id)],
|
||||
"login": "my_login_b",
|
||||
"name": "my user B",
|
||||
"groups_id": [(4, grp_queue_job_manager)],
|
||||
}
|
||||
)
|
||||
|
||||
def _create_job(self, env):
|
||||
self.cr.execute("delete from queue_job")
|
||||
env["test.queue.job"].with_delay().testing_method()
|
||||
stored = self.queue_job.search([])
|
||||
self.assertEqual(len(stored), 1)
|
||||
return stored
|
||||
|
||||
def test_job_default_company_id(self):
|
||||
"""the default company is the one from the current user_id"""
|
||||
stored = self._create_job(self.env)
|
||||
self.assertEqual(
|
||||
stored.company_id.id,
|
||||
self.ref("base.main_company"),
|
||||
"Incorrect default company_id",
|
||||
)
|
||||
env = self.env(user=self.other_user_b.id)
|
||||
stored = self._create_job(env)
|
||||
self.assertEqual(
|
||||
stored.company_id.id,
|
||||
self.other_company_b.id,
|
||||
"Incorrect default company_id",
|
||||
)
|
||||
|
||||
def test_job_no_company_id(self):
|
||||
"""if we put an empty company_id in the context
|
||||
jobs are created without company_id
|
||||
"""
|
||||
env = self.env(context={"company_id": None})
|
||||
stored = self._create_job(env)
|
||||
self.assertFalse(stored.company_id, "Company_id should be empty")
|
||||
|
||||
def test_job_specific_company_id(self):
|
||||
"""If a company_id specified in the context
|
||||
it's used by default for the job creation"""
|
||||
env = self.env(context={"company_id": self.other_company_a.id})
|
||||
stored = self._create_job(env)
|
||||
self.assertEqual(
|
||||
stored.company_id.id, self.other_company_a.id, "Incorrect company_id"
|
||||
)
|
||||
|
||||
def test_job_subscription(self):
|
||||
# if the job is created without company_id, all members of
|
||||
# queue_job.group_queue_job_manager must be followers
|
||||
User = self.env["res.users"]
|
||||
no_company_context = dict(self.env.context, company_id=None)
|
||||
no_company_env = self.env(user=self.simple_user, context=no_company_context)
|
||||
stored = self._create_job(no_company_env)
|
||||
stored._message_post_on_failure()
|
||||
users = (
|
||||
User.search(
|
||||
[("groups_id", "=", self.ref("queue_job.group_queue_job_manager"))]
|
||||
)
|
||||
+ stored.user_id
|
||||
)
|
||||
self.assertEqual(len(stored.message_follower_ids), len(users))
|
||||
expected_partners = [u.partner_id for u in users]
|
||||
self.assertSetEqual(
|
||||
set(stored.message_follower_ids.mapped("partner_id")),
|
||||
set(expected_partners),
|
||||
)
|
||||
followers_id = stored.message_follower_ids.mapped("partner_id.id")
|
||||
self.assertIn(self.other_partner_a.id, followers_id)
|
||||
self.assertIn(self.other_partner_b.id, followers_id)
|
||||
# jobs created for a specific company_id are followed only by
|
||||
# company's members
|
||||
company_a_context = dict(self.env.context, company_id=self.other_company_a.id)
|
||||
company_a_env = self.env(user=self.simple_user, context=company_a_context)
|
||||
stored = self._create_job(company_a_env)
|
||||
stored.with_user(self.other_user_a.id)
|
||||
stored._message_post_on_failure()
|
||||
# 2 because simple_user (creator of job) + self.other_partner_a
|
||||
self.assertEqual(len(stored.message_follower_ids), 2)
|
||||
users = self.simple_user + self.other_user_a
|
||||
expected_partners = [u.partner_id for u in users]
|
||||
self.assertSetEqual(
|
||||
set(stored.message_follower_ids.mapped("partner_id")),
|
||||
set(expected_partners),
|
||||
)
|
||||
followers_id = stored.message_follower_ids.mapped("partner_id.id")
|
||||
self.assertIn(self.other_partner_a.id, followers_id)
|
||||
self.assertNotIn(self.other_partner_b.id, followers_id)
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
# Copyright 2020 Camptocamp SA
|
||||
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
from odoo.tests.common import tagged
|
||||
|
||||
from odoo.addons.queue_job.job import Job
|
||||
|
||||
from .common import JobCommonCase
|
||||
|
||||
|
||||
@tagged("post_install", "-at_install")
|
||||
class TestJobAutoDelay(JobCommonCase):
|
||||
"""Test auto delay of jobs"""
|
||||
|
||||
def test_auto_delay(self):
|
||||
"""method decorated by @job_auto_delay is automatically delayed"""
|
||||
result = self.env["test.queue.job"].delay_me(1, kwarg=2)
|
||||
self.assertTrue(isinstance(result, Job))
|
||||
self.assertEqual(result.args, (1,))
|
||||
self.assertEqual(result.kwargs, {"kwarg": 2})
|
||||
|
||||
def test_auto_delay_options(self):
|
||||
"""method automatically delayed une <method>_job_options arguments"""
|
||||
result = self.env["test.queue.job"].delay_me_options()
|
||||
self.assertTrue(isinstance(result, Job))
|
||||
self.assertEqual(result.identity_key, "my_job_identity")
|
||||
|
||||
def test_auto_delay_inside_job(self):
|
||||
"""when a delayed job is processed, it must not delay itself"""
|
||||
job_ = self.env["test.queue.job"].delay_me(1, kwarg=2)
|
||||
self.assertTrue(job_.perform(), (1, 2))
|
||||
|
||||
def test_auto_delay_force_sync(self):
|
||||
"""method forced to run synchronously"""
|
||||
with self.assertLogs(level="WARNING") as log_catcher:
|
||||
result = (
|
||||
self.env["test.queue.job"]
|
||||
.with_context(_job_force_sync=True)
|
||||
.delay_me(1, kwarg=2)
|
||||
)
|
||||
self.assertEqual(
|
||||
len(log_catcher.output), 1, "Exactly one warning should be logged"
|
||||
)
|
||||
self.assertIn(" ctx key found. NO JOB scheduled. ", log_catcher.output[0])
|
||||
self.assertTrue(result, (1, 2))
|
||||
|
||||
def test_auto_delay_context_key_set(self):
|
||||
"""patched with context_key delays only if context keys is set"""
|
||||
result = (
|
||||
self.env["test.queue.job"]
|
||||
.with_context(auto_delay_delay_me_context_key=True)
|
||||
.delay_me_context_key()
|
||||
)
|
||||
self.assertTrue(isinstance(result, Job))
|
||||
|
||||
def test_auto_delay_context_key_unset(self):
|
||||
"""patched with context_key do not delay if context keys is not set"""
|
||||
result = self.env["test.queue.job"].delay_me_context_key()
|
||||
self.assertEqual(result, "ok")
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
# Copyright 2016 Camptocamp SA
|
||||
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import odoo.tests.common as common
|
||||
from odoo import exceptions
|
||||
|
||||
from odoo.addons.queue_job.job import Job
|
||||
|
||||
|
||||
class TestJobChannels(common.TransactionCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.function_model = cls.env["queue.job.function"]
|
||||
cls.channel_model = cls.env["queue.job.channel"]
|
||||
cls.test_model = cls.env["test.queue.channel"]
|
||||
cls.root_channel = cls.env.ref("queue_job.channel_root")
|
||||
|
||||
def test_channel_complete_name(self):
|
||||
channel = self.channel_model.create(
|
||||
{"name": "number", "parent_id": self.root_channel.id}
|
||||
)
|
||||
subchannel = self.channel_model.create(
|
||||
{"name": "five", "parent_id": channel.id}
|
||||
)
|
||||
self.assertEqual(channel.complete_name, "root.number")
|
||||
self.assertEqual(subchannel.complete_name, "root.number.five")
|
||||
|
||||
def test_channel_tree(self):
|
||||
with self.assertRaises(exceptions.ValidationError):
|
||||
self.channel_model.create({"name": "sub"})
|
||||
|
||||
def test_channel_root(self):
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
self.root_channel.unlink()
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
self.root_channel.name = "leaf"
|
||||
|
||||
def test_channel_on_job(self):
|
||||
method = self.env["test.queue.channel"].job_a
|
||||
path_a = self.env["queue.job.function"].job_function_name(
|
||||
"test.queue.channel", "job_a"
|
||||
)
|
||||
job_func = self.function_model.search([("name", "=", path_a)])
|
||||
|
||||
self.assertEqual(job_func.channel, "root")
|
||||
|
||||
test_job = Job(method)
|
||||
test_job.store()
|
||||
stored = test_job.db_record()
|
||||
self.assertEqual(stored.channel, "root")
|
||||
job_read = Job.load(self.env, test_job.uuid)
|
||||
self.assertEqual(job_read.channel, "root")
|
||||
|
||||
sub_channel = self.env.ref("test_queue_job.channel_sub")
|
||||
job_func.channel_id = sub_channel
|
||||
|
||||
test_job = Job(method)
|
||||
test_job.store()
|
||||
stored = test_job.db_record()
|
||||
self.assertEqual(stored.channel, "root.sub")
|
||||
|
||||
# it's also possible to override the channel
|
||||
test_job = Job(method, channel="root.sub")
|
||||
test_job.store()
|
||||
stored = test_job.db_record()
|
||||
self.assertEqual(stored.channel, test_job.channel)
|
||||
|
||||
def test_default_channel_no_xml(self):
|
||||
"""Channel on job is root if there is no queue.job.function record"""
|
||||
test_job = Job(self.env["res.users"].browse)
|
||||
test_job.store()
|
||||
stored = test_job.db_record()
|
||||
self.assertEqual(stored.channel, "root")
|
||||
|
||||
def test_set_channel_from_record(self):
|
||||
func_name = self.env["queue.job.function"].job_function_name(
|
||||
"test.queue.channel", "job_sub_channel"
|
||||
)
|
||||
job_func = self.function_model.search([("name", "=", func_name)])
|
||||
self.assertEqual(job_func.channel, "root.sub.subsub")
|
||||
|
||||
channel = job_func.channel_id
|
||||
self.assertEqual(channel.name, "subsub")
|
||||
self.assertEqual(channel.parent_id.name, "sub")
|
||||
self.assertEqual(channel.parent_id.parent_id.name, "root")
|
||||
self.assertEqual(job_func.channel, "root.sub.subsub")
|
||||
|
||||
def test_default_removal_interval(self):
|
||||
channel = self.channel_model.create(
|
||||
{"name": "number", "parent_id": self.root_channel.id}
|
||||
)
|
||||
self.assertEqual(channel.removal_interval, 30)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
import odoo.tests.common as common
|
||||
from odoo import exceptions
|
||||
|
||||
|
||||
class TestJobFunction(common.TransactionCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.test_function_model = self.env.ref(
|
||||
"queue_job.job_function_queue_job__test_job"
|
||||
)
|
||||
|
||||
def test_check_retry_pattern_randomized_case(self):
|
||||
randomized_pattern = "{1: (10, 20), 2: (20, 40)}"
|
||||
self.test_function_model.edit_retry_pattern = randomized_pattern
|
||||
self.assertEqual(
|
||||
self.test_function_model.edit_retry_pattern, randomized_pattern
|
||||
)
|
||||
|
||||
def test_check_retry_pattern_fixed_case(self):
|
||||
fixed_pattern = "{1: 10, 2: 20}"
|
||||
self.test_function_model.edit_retry_pattern = fixed_pattern
|
||||
self.assertEqual(self.test_function_model.edit_retry_pattern, fixed_pattern)
|
||||
|
||||
def test_check_retry_pattern_invalid_cases(self):
|
||||
invalid_time_value_pattern = "{1: a, 2: 20}"
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
self.test_function_model.edit_retry_pattern = invalid_time_value_pattern
|
||||
|
||||
invalid_retry_count_pattern = "{a: 10, 2: 20}"
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
self.test_function_model.edit_retry_pattern = invalid_retry_count_pattern
|
||||
|
||||
invalid_randomized_pattern = "{1: (1, 2, 3), 2: 20}"
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
self.test_function_model.edit_retry_pattern = invalid_randomized_pattern
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# copyright 2022 Guewen Baconnier
|
||||
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import json
|
||||
|
||||
from odoo.tests import common
|
||||
|
||||
# pylint: disable=odoo-addons-relative-import
|
||||
# we are testing, we want to test as if we were an external consumer of the API
|
||||
from odoo.addons.queue_job.fields import JobEncoder
|
||||
|
||||
|
||||
class TestJsonField(common.TransactionCase):
|
||||
|
||||
# TODO: when migrating to 16.0, adapt the checks in queue_job/tests/test_json_field.py
|
||||
# to verify the context keys are encoded and remove these
|
||||
def test_encoder_recordset_store_context(self):
|
||||
demo_user = self.env.ref("base.user_demo")
|
||||
user_context = {"lang": "en_US", "tz": "Europe/Brussels"}
|
||||
test_model = self.env(user=demo_user, context=user_context)["test.queue.job"]
|
||||
value_json = json.dumps(test_model, cls=JobEncoder)
|
||||
self.assertEqual(json.loads(value_json)["context"], user_context)
|
||||
|
||||
def test_encoder_recordset_context_filter_keys(self):
|
||||
demo_user = self.env.ref("base.user_demo")
|
||||
user_context = {"lang": "en_US", "tz": "Europe/Brussels"}
|
||||
tampered_context = dict(user_context, foo=object())
|
||||
test_model = self.env(user=demo_user, context=tampered_context)[
|
||||
"test.queue.job"
|
||||
]
|
||||
value_json = json.dumps(test_model, cls=JobEncoder)
|
||||
self.assertEqual(json.loads(value_json)["context"], user_context)
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
# Copyright 2014-2016 Camptocamp SA
|
||||
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
import odoo.tests.common as common
|
||||
from odoo import exceptions
|
||||
|
||||
|
||||
class TestRelatedAction(common.TransactionCase):
|
||||
"""Test Related Actions"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.model = cls.env["test.related.action"]
|
||||
cls.record = cls.model.create({})
|
||||
cls.records = cls.record + cls.model.create({})
|
||||
|
||||
def test_attributes(self):
|
||||
"""Job with related action check if action returns correctly"""
|
||||
job_ = self.record.with_delay().testing_related_action__kwargs()
|
||||
act_job, act_kwargs = job_.related_action()
|
||||
self.assertEqual(act_job, job_.db_record())
|
||||
self.assertEqual(act_kwargs, {"b": 4})
|
||||
|
||||
def test_decorator_empty(self):
|
||||
"""Job with decorator without value disable the default action
|
||||
|
||||
The ``related_action`` configuration is: ``{"enable": False}``
|
||||
"""
|
||||
# default action returns None
|
||||
job_ = self.record.with_delay().testing_related_action__return_none()
|
||||
self.assertIsNone(job_.related_action())
|
||||
|
||||
def test_model_no_action(self):
|
||||
"""Model shows an error when no action exist"""
|
||||
job_ = self.record.with_delay().testing_related_action__return_none()
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
# db_record is the 'job.queue' record on which we click on the
|
||||
# button to open the related action
|
||||
job_.db_record().open_related_action()
|
||||
|
||||
def test_default_no_record(self):
|
||||
"""Default related action called when no decorator is set
|
||||
|
||||
When called on no record.
|
||||
|
||||
The ``related_action`` configuration is: ``{}``
|
||||
"""
|
||||
job_ = self.model.with_delay().testing_related_action__no()
|
||||
expected = None
|
||||
self.assertEqual(job_.related_action(), expected)
|
||||
|
||||
def test_model_default_no_record(self):
|
||||
"""Model shows an error when using the default action and we have no
|
||||
record linke to the job"""
|
||||
job_ = self.model.with_delay().testing_related_action__no()
|
||||
with self.assertRaises(exceptions.UserError):
|
||||
# db_record is the 'job.queue' record on which we click on the
|
||||
# button to open the related action
|
||||
job_.db_record().open_related_action()
|
||||
|
||||
def test_default_one_record(self):
|
||||
"""Default related action called when no decorator is set
|
||||
|
||||
When called on one record.
|
||||
|
||||
The ``related_action`` configuration is: ``{}``
|
||||
"""
|
||||
job_ = self.record.with_delay().testing_related_action__no()
|
||||
expected = {
|
||||
"name": "Related Record",
|
||||
"res_id": self.record.id,
|
||||
"res_model": self.record._name,
|
||||
"type": "ir.actions.act_window",
|
||||
"view_mode": "form",
|
||||
}
|
||||
self.assertEqual(job_.related_action(), expected)
|
||||
|
||||
def test_default_several_record(self):
|
||||
"""Default related action called when no decorator is set
|
||||
|
||||
When called on several record.
|
||||
|
||||
The ``related_action`` configuration is: ``{}``
|
||||
"""
|
||||
job_ = self.records.with_delay().testing_related_action__no()
|
||||
expected = {
|
||||
"name": "Related Records",
|
||||
"domain": [("id", "in", self.records.ids)],
|
||||
"res_model": self.record._name,
|
||||
"type": "ir.actions.act_window",
|
||||
"view_mode": "tree,form",
|
||||
}
|
||||
self.assertEqual(job_.related_action(), expected)
|
||||
|
||||
def test_decorator(self):
|
||||
"""Call the related action on the model
|
||||
|
||||
The function is::
|
||||
|
||||
The ``related_action`` configuration is::
|
||||
|
||||
{
|
||||
"func_name": "testing_related__url",
|
||||
"kwargs": {"url": "https://en.wikipedia.org/wiki/{subject}"}
|
||||
}
|
||||
"""
|
||||
job_ = self.record.with_delay().testing_related_action__store("Discworld")
|
||||
expected = {
|
||||
"type": "ir.actions.act_url",
|
||||
"target": "new",
|
||||
"url": "https://en.wikipedia.org/wiki/Discworld",
|
||||
}
|
||||
self.assertEqual(job_.related_action(), expected)
|
||||
Loading…
Add table
Add a link
Reference in a new issue