mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-21 02:12:08 +02:00
Initial commit: Core packages
This commit is contained in:
commit
12c29a983b
9512 changed files with 8379910 additions and 0 deletions
|
|
@ -0,0 +1,57 @@
|
|||
/** @odoo-module */
|
||||
|
||||
import { busService } from "@bus/services/bus_service";
|
||||
import { presenceService } from "@bus/services/presence_service";
|
||||
import { multiTabService } from "@bus/multi_tab_service";
|
||||
import { getPyEnv } from '@bus/../tests/helpers/mock_python_environment';
|
||||
|
||||
import { createWebClient } from "@web/../tests/webclient/helpers";
|
||||
import { assetsWatchdogService } from "@bus/services/assets_watchdog_service";
|
||||
import { click, getFixture, patchWithCleanup } from "@web/../tests/helpers/utils";
|
||||
import { browser } from "@web/core/browser/browser";
|
||||
import { registry } from "@web/core/registry";
|
||||
|
||||
const serviceRegistry = registry.category("services");
|
||||
|
||||
QUnit.module("Bus Assets WatchDog", (hooks) => {
|
||||
let target;
|
||||
hooks.beforeEach((assert) => {
|
||||
serviceRegistry.add("assetsWatchdog", assetsWatchdogService);
|
||||
serviceRegistry.add("bus_service", busService);
|
||||
serviceRegistry.add("presence", presenceService);
|
||||
serviceRegistry.add("multi_tab", multiTabService);
|
||||
patchWithCleanup(browser, {
|
||||
setTimeout(fn) {
|
||||
return this._super(fn, 0);
|
||||
},
|
||||
location: {
|
||||
reload: () => assert.step("reloadPage"),
|
||||
},
|
||||
});
|
||||
|
||||
target = getFixture();
|
||||
});
|
||||
|
||||
QUnit.test("can listen on bus and displays notifications in DOM", async (assert) => {
|
||||
assert.expect(4);
|
||||
|
||||
await createWebClient({});
|
||||
const pyEnv = await getPyEnv();
|
||||
const { afterNextRender } = owl.App;
|
||||
await afterNextRender(() => {
|
||||
pyEnv['bus.bus']._sendone("broadcast", "bundle_changed", {
|
||||
server_version: "NEW_MAJOR_VERSION"
|
||||
});
|
||||
});
|
||||
|
||||
assert.containsOnce(target, ".o_notification_body");
|
||||
assert.strictEqual(
|
||||
target.querySelector(".o_notification_body .o_notification_content").textContent,
|
||||
"The page appears to be out of date."
|
||||
);
|
||||
|
||||
// reload by clicking on the reload button
|
||||
await click(target, ".o_notification_buttons .btn-primary");
|
||||
assert.verifySteps(["reloadPage"]);
|
||||
});
|
||||
});
|
||||
595
odoo-bringout-oca-ocb-bus/bus/static/tests/bus_tests.js
Normal file
595
odoo-bringout-oca-ocb-bus/bus/static/tests/bus_tests.js
Normal file
|
|
@ -0,0 +1,595 @@
|
|||
odoo.define('web.bus_tests', function (require) {
|
||||
"use strict";
|
||||
|
||||
var { busService } = require('@bus/services/bus_service');
|
||||
const { presenceService } = require('@bus/services/presence_service');
|
||||
const { multiTabService } = require('@bus/multi_tab_service');
|
||||
const { WEBSOCKET_CLOSE_CODES } = require("@bus/workers/websocket_worker");
|
||||
const { startServer } = require('@bus/../tests/helpers/mock_python_environment');
|
||||
const { patchWebsocketWorkerWithCleanup } = require("@bus/../tests/helpers/mock_websocket");
|
||||
const { waitForChannels } = require('@bus/../tests/helpers/websocket_event_deferred');
|
||||
|
||||
const { browser } = require("@web/core/browser/browser");
|
||||
const { registry } = require("@web/core/registry");
|
||||
const { session } = require('@web/session');
|
||||
const { makeDeferred, nextTick, patchWithCleanup } = require("@web/../tests/helpers/utils");
|
||||
const { makeTestEnv } = require('@web/../tests/helpers/mock_env');
|
||||
const legacySession = require('web.session');
|
||||
|
||||
QUnit.module('Bus', {
|
||||
beforeEach: function () {
|
||||
const customMultiTabService = {
|
||||
...multiTabService,
|
||||
start() {
|
||||
const originalMultiTabService = multiTabService.start(...arguments);
|
||||
originalMultiTabService.TAB_HEARTBEAT_PERIOD = 10;
|
||||
originalMultiTabService.MAIN_TAB_HEARTBEAT_PERIOD = 1;
|
||||
return originalMultiTabService;
|
||||
},
|
||||
};
|
||||
registry.category('services').add('bus_service', busService);
|
||||
registry.category('services').add('presence', presenceService);
|
||||
registry.category('services').add('multi_tab', customMultiTabService);
|
||||
},
|
||||
}, function () {
|
||||
QUnit.test('notifications received from the channel', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const env = await makeTestEnv({ activateMockServer: true });
|
||||
await env.services['bus_service'].start();
|
||||
env.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
env.services['bus_service'].addChannel('lambda');
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'beta');
|
||||
await nextTick();
|
||||
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'epsilon');
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'notification - beta',
|
||||
'notification - epsilon',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('notifications not received after stoping the service', async function (assert) {
|
||||
assert.expect(4);
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const firstTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
const secondTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
await firstTabEnv.services['bus_service'].start();
|
||||
await secondTabEnv.services['bus_service'].start();
|
||||
|
||||
firstTabEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('1 - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
firstTabEnv.services['bus_service'].addChannel('lambda');
|
||||
secondTabEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('2 - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
// both tabs should receive the notification
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'beta');
|
||||
await nextTick();
|
||||
secondTabEnv.services['bus_service'].stop();
|
||||
await nextTick();
|
||||
// only first tab should receive the notification since the
|
||||
// second tab has called the stop method.
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'epsilon');
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'1 - notification - beta',
|
||||
'2 - notification - beta',
|
||||
'1 - notification - epsilon',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('notifications still received after disconnect/reconnect', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
const oldSetTimeout = window.setTimeout;
|
||||
patchWithCleanup(
|
||||
window,
|
||||
{
|
||||
setTimeout: callback => oldSetTimeout(callback, 0)
|
||||
},
|
||||
{ pure: true },
|
||||
)
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const env = await makeTestEnv({ activateMockServer: true });
|
||||
await env.services["bus_service"].start();
|
||||
await nextTick();
|
||||
env.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
env.services['bus_service'].addChannel('lambda');
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'beta');
|
||||
pyEnv.simulateConnectionLost(WEBSOCKET_CLOSE_CODES.ABNORMAL_CLOSURE);
|
||||
// Give websocket worker a tick to try to restart
|
||||
await nextTick();
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'gamma');
|
||||
// Give bus service a tick to receive the notification from
|
||||
// postMessage.
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
"notification - beta",
|
||||
"notification - gamma",
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('tabs share message from a channel', async function (assert) {
|
||||
assert.expect(1);
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const steps = new Set();
|
||||
// main
|
||||
const mainEnv = await makeTestEnv({ activateMockServer: true });
|
||||
mainEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
steps.add('main - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
await mainEnv.services['bus_service'].addChannel('lambda');
|
||||
|
||||
// slave
|
||||
const slaveEnv = await makeTestEnv();
|
||||
await slaveEnv.services['bus_service'].start();
|
||||
|
||||
slaveEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
steps.add('slave - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
await slaveEnv.services['bus_service'].addChannel('lambda');
|
||||
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'beta');
|
||||
// Wait one tick for the worker `postMessage` to reach the bus_service.
|
||||
await nextTick();
|
||||
// Wait another tick for the `bus.trigger` to reach the listeners.
|
||||
await nextTick();
|
||||
|
||||
assert.deepEqual(
|
||||
[...steps],
|
||||
["slave - notification - beta", "main - notification - beta"]
|
||||
);
|
||||
});
|
||||
|
||||
QUnit.test('second tab still receives notifications after main pagehide', async function (assert) {
|
||||
assert.expect(1);
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const steps = new Set();
|
||||
// main
|
||||
const mainEnv = await makeTestEnv({ activateMockServer: true });
|
||||
await mainEnv.services['bus_service'].start();
|
||||
mainEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
steps.add('main - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
mainEnv.services['bus_service'].addChannel('lambda');
|
||||
|
||||
// second env
|
||||
// prevent second tab from receiving pagehide event.
|
||||
patchWithCleanup(browser, {
|
||||
addEventListener(eventName, callback) {
|
||||
if (eventName === 'pagehide') {
|
||||
return;
|
||||
}
|
||||
this._super(eventName, callback);
|
||||
},
|
||||
});
|
||||
const secondEnv = await makeTestEnv({ activateMockServer: true });
|
||||
secondEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
steps.add('slave - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
secondEnv.services['bus_service'].addChannel('lambda');
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'beta');
|
||||
await nextTick();
|
||||
|
||||
// simulate unloading main
|
||||
window.dispatchEvent(new Event('pagehide'));
|
||||
await nextTick();
|
||||
|
||||
pyEnv['bus.bus']._sendone('lambda', 'notifType', 'gamma');
|
||||
await nextTick();
|
||||
|
||||
assert.deepEqual(
|
||||
[...steps],
|
||||
[
|
||||
'slave - notification - beta',
|
||||
'main - notification - beta',
|
||||
'slave - notification - gamma',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('two tabs calling addChannel simultaneously', async function (assert) {
|
||||
assert.expect(5);
|
||||
|
||||
const channelPatch = {
|
||||
addChannel(channel) {
|
||||
assert.step('Tab ' + this.__tabId__ + ': addChannel ' + channel);
|
||||
this._super.apply(this, arguments);
|
||||
},
|
||||
deleteChannel(channel) {
|
||||
assert.step('Tab ' + this.__tabId__ + ': deleteChannel ' + channel);
|
||||
this._super.apply(this, arguments);
|
||||
},
|
||||
};
|
||||
const firstTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
const secondTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
firstTabEnv.services['bus_service'].__tabId__ = 1;
|
||||
secondTabEnv.services['bus_service'].__tabId__ = 2;
|
||||
patchWithCleanup(firstTabEnv.services['bus_service'], channelPatch);
|
||||
patchWithCleanup(secondTabEnv.services['bus_service'], channelPatch);
|
||||
firstTabEnv.services['bus_service'].addChannel('alpha');
|
||||
secondTabEnv.services['bus_service'].addChannel('alpha');
|
||||
firstTabEnv.services['bus_service'].addChannel('beta');
|
||||
secondTabEnv.services['bus_service'].addChannel('beta');
|
||||
|
||||
assert.verifySteps([
|
||||
"Tab 1: addChannel alpha",
|
||||
"Tab 2: addChannel alpha",
|
||||
"Tab 1: addChannel beta",
|
||||
"Tab 2: addChannel beta",
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('two tabs adding a different channel', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
const pyEnv = await startServer();
|
||||
const firstTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
const secondTabEnv = await makeTestEnv({ activateMockServer: true });
|
||||
firstTabEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('first - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
secondTabEnv.services['bus_service'].addEventListener('notification', ({ detail: notifications }) => {
|
||||
assert.step('second - notification - ' + notifications.map(notif => notif.payload).toString());
|
||||
});
|
||||
firstTabEnv.services['bus_service'].addChannel("alpha");
|
||||
secondTabEnv.services['bus_service'].addChannel("beta");
|
||||
await nextTick();
|
||||
pyEnv['bus.bus']._sendmany([
|
||||
['alpha', 'notifType', 'alpha'],
|
||||
['beta', 'notifType', 'beta']
|
||||
]);
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'first - notification - alpha,beta',
|
||||
'second - notification - alpha,beta',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('channel management from multiple tabs', async function (assert) {
|
||||
patchWebsocketWorkerWithCleanup({
|
||||
_sendToServer({ event_name, data }) {
|
||||
assert.step(`${event_name} - [${data.channels.toString()}]`);
|
||||
},
|
||||
});
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
const secTabEnv = await makeTestEnv();
|
||||
firstTabEnv.services['bus_service'].addChannel('channel1');
|
||||
await waitForChannels(["channel1"]);
|
||||
// this should not trigger a subscription since the channel1 was
|
||||
// aleady known.
|
||||
secTabEnv.services['bus_service'].addChannel('channel1');
|
||||
await waitForChannels(["channel1"]);
|
||||
// removing channel1 from first tab should not trigger
|
||||
// re-subscription since the second tab still listens to this
|
||||
// channel.
|
||||
firstTabEnv.services['bus_service'].deleteChannel('channel1');
|
||||
await waitForChannels(["channel1"], { operation: "delete" });
|
||||
// this should trigger a subscription since the channel2 was not
|
||||
// known.
|
||||
secTabEnv.services['bus_service'].addChannel('channel2');
|
||||
await waitForChannels(["channel2"]);
|
||||
|
||||
assert.verifySteps([
|
||||
'subscribe - [channel1]',
|
||||
'subscribe - [channel1,channel2]',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('channels subscription after disconnection', async function (assert) {
|
||||
// Patch setTimeout in order for the worker to reconnect immediatly.
|
||||
patchWithCleanup(window, {
|
||||
setTimeout: fn => fn(),
|
||||
});
|
||||
const firstSubscribeDeferred = makeDeferred();
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
_sendToServer({ event_name, data }) {
|
||||
assert.step(`${event_name} - [${data.channels.toString()}]`);
|
||||
if (event_name === 'subscribe') {
|
||||
firstSubscribeDeferred.resolve();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const env = await makeTestEnv();
|
||||
env.services["bus_service"].start();
|
||||
// wait for the websocket to connect and the first subscription
|
||||
// to occur.
|
||||
await firstSubscribeDeferred;
|
||||
worker.websocket.close(WEBSOCKET_CLOSE_CODES.KEEP_ALIVE_TIMEOUT);
|
||||
// wait for the websocket to re-connect.
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'subscribe - []',
|
||||
'subscribe - []',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('Last notification id is passed to the worker on service start', async function (assert) {
|
||||
const pyEnv = await startServer();
|
||||
let updateLastNotificationDeferred = makeDeferred();
|
||||
patchWebsocketWorkerWithCleanup({
|
||||
_onClientMessage(_, { action, data }) {
|
||||
if (action === 'initialize_connection') {
|
||||
assert.step(`${action} - ${data['lastNotificationId']}`);
|
||||
updateLastNotificationDeferred.resolve();
|
||||
}
|
||||
return this._super(...arguments);
|
||||
},
|
||||
});
|
||||
const env1 = await makeTestEnv();
|
||||
await env1.services['bus_service'].start();
|
||||
await updateLastNotificationDeferred;
|
||||
// First bus service has never received notifications thus the
|
||||
// default is 0.
|
||||
assert.verifySteps(['initialize_connection - 0']);
|
||||
|
||||
pyEnv['bus.bus']._sendmany([
|
||||
['lambda', 'notifType', 'beta'],
|
||||
['lambda', 'notifType', 'beta'],
|
||||
]);
|
||||
// let the bus service store the last notification id.
|
||||
await nextTick();
|
||||
|
||||
updateLastNotificationDeferred = makeDeferred();
|
||||
const env2 = await makeTestEnv();
|
||||
await env2.services['bus_service'].start();
|
||||
await updateLastNotificationDeferred;
|
||||
// Second bus service sends the last known notification id.
|
||||
assert.verifySteps([`initialize_connection - 1`]);
|
||||
});
|
||||
|
||||
QUnit.test('Websocket disconnects upon user log out', async function (assert) {
|
||||
// first tab connects to the worker with user logged.
|
||||
patchWithCleanup(session, {
|
||||
user_id: 1,
|
||||
});
|
||||
const connectionInitializedDeferred = makeDeferred();
|
||||
let connectionOpenedDeferred = makeDeferred();
|
||||
patchWebsocketWorkerWithCleanup({
|
||||
_initializeConnection(client, data) {
|
||||
this._super(client, data);
|
||||
connectionInitializedDeferred.resolve();
|
||||
},
|
||||
});
|
||||
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
await firstTabEnv.services["bus_service"].start();
|
||||
firstTabEnv.services['bus_service'].addEventListener('connect', () => {
|
||||
if (session.user_id) {
|
||||
assert.step('connect');
|
||||
}
|
||||
connectionOpenedDeferred.resolve();
|
||||
connectionOpenedDeferred = makeDeferred();
|
||||
});
|
||||
firstTabEnv.services['bus_service'].addEventListener('disconnect', () => {
|
||||
assert.step('disconnect');
|
||||
});
|
||||
await connectionInitializedDeferred;
|
||||
await connectionOpenedDeferred;
|
||||
|
||||
// second tab connects to the worker after disconnection: user_id
|
||||
// is now false.
|
||||
patchWithCleanup(session, {
|
||||
user_id: false,
|
||||
});
|
||||
const env2 = await makeTestEnv();
|
||||
await env2.services['bus_service'].start();
|
||||
|
||||
assert.verifySteps([
|
||||
'connect',
|
||||
'disconnect',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('Websocket reconnects upon user log in', async function (assert) {
|
||||
// first tab connects to the worker with no user logged.
|
||||
patchWithCleanup(session, {
|
||||
user_id: false,
|
||||
});
|
||||
const connectionInitializedDeferred = makeDeferred();
|
||||
let websocketConnectedDeferred = makeDeferred();
|
||||
patchWebsocketWorkerWithCleanup({
|
||||
_initializeConnection(client, data) {
|
||||
this._super(client, data);
|
||||
connectionInitializedDeferred.resolve();
|
||||
},
|
||||
});
|
||||
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
await firstTabEnv.services['bus_service'].start();
|
||||
firstTabEnv.services['bus_service'].addEventListener('connect', () => {
|
||||
assert.step("connect");
|
||||
websocketConnectedDeferred.resolve();
|
||||
websocketConnectedDeferred = makeDeferred();
|
||||
});
|
||||
firstTabEnv.services['bus_service'].addEventListener('disconnect', () => {
|
||||
assert.step('disconnect');
|
||||
});
|
||||
await connectionInitializedDeferred;
|
||||
await websocketConnectedDeferred;
|
||||
|
||||
// second tab connects to the worker after connection: user_id
|
||||
// is now set.
|
||||
patchWithCleanup(session, {
|
||||
user_id: 1,
|
||||
});
|
||||
const env = await makeTestEnv();
|
||||
await env.services["bus_service"].start();
|
||||
await websocketConnectedDeferred;
|
||||
assert.verifySteps([
|
||||
'connect',
|
||||
'disconnect',
|
||||
'connect',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test("WebSocket connects with URL corresponding to session prefix", async function (assert) {
|
||||
patchWebsocketWorkerWithCleanup();
|
||||
const origin = "http://random-website.com";
|
||||
patchWithCleanup(legacySession, {
|
||||
prefix: origin,
|
||||
});
|
||||
const websocketCreatedDeferred = makeDeferred();
|
||||
patchWithCleanup(window, {
|
||||
WebSocket: function (url) {
|
||||
assert.step(url);
|
||||
websocketCreatedDeferred.resolve();
|
||||
return new EventTarget();
|
||||
},
|
||||
}, { pure: true });
|
||||
const env = await makeTestEnv();
|
||||
env.services["bus_service"].start();
|
||||
await websocketCreatedDeferred;
|
||||
assert.verifySteps([`${origin.replace("http", "ws")}/websocket`]);
|
||||
});
|
||||
|
||||
QUnit.test("Disconnect on offline, re-connect on online", async function (assert) {
|
||||
patchWebsocketWorkerWithCleanup();
|
||||
let websocketConnectedDeferred = makeDeferred();
|
||||
const env = await makeTestEnv();
|
||||
env.services["bus_service"].addEventListener("connect", () => {
|
||||
assert.step("connect");
|
||||
websocketConnectedDeferred.resolve();
|
||||
websocketConnectedDeferred = makeDeferred();
|
||||
});
|
||||
env.services["bus_service"].addEventListener("disconnect", () => assert.step("disconnect"));
|
||||
await env.services["bus_service"].start();
|
||||
await websocketConnectedDeferred;
|
||||
window.dispatchEvent(new Event("offline"));
|
||||
await nextTick();
|
||||
window.dispatchEvent(new Event("online"));
|
||||
await websocketConnectedDeferred;
|
||||
assert.verifySteps(["connect", "disconnect", "connect"]);
|
||||
});
|
||||
|
||||
QUnit.test("No disconnect on change offline/online when bus inactive", async function (assert) {
|
||||
patchWebsocketWorkerWithCleanup();
|
||||
const env = await makeTestEnv();
|
||||
env.services["bus_service"].addEventListener("connect", () => assert.step("connect"));
|
||||
env.services["bus_service"].addEventListener("disconnect", () => assert.step("disconnect"));
|
||||
window.dispatchEvent(new Event("offline"));
|
||||
await nextTick();
|
||||
window.dispatchEvent(new Event("online"));
|
||||
await nextTick();
|
||||
assert.verifySteps([]);
|
||||
});
|
||||
|
||||
QUnit.test("Can reconnect after late close event", async function (assert) {
|
||||
let subscribeSent = 0;
|
||||
const closeDeferred = makeDeferred();
|
||||
let openDeferred = makeDeferred();
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
_onWebsocketOpen() {
|
||||
this._super();
|
||||
openDeferred.resolve();
|
||||
},
|
||||
_sendToServer({ event_name }) {
|
||||
if (event_name === "subscribe") {
|
||||
subscribeSent++;
|
||||
}
|
||||
},
|
||||
});
|
||||
const pyEnv = await startServer();
|
||||
const env = await makeTestEnv();
|
||||
env.services["bus_service"].start();
|
||||
await openDeferred;
|
||||
patchWithCleanup(worker.websocket, {
|
||||
close(code = WEBSOCKET_CLOSE_CODES.CLEAN, reason) {
|
||||
this.readyState = 2;
|
||||
const _super = this._super;
|
||||
if (code === WEBSOCKET_CLOSE_CODES.CLEAN) {
|
||||
closeDeferred.then(() => {
|
||||
// Simulate that the connection could not be closed cleanly.
|
||||
_super(WEBSOCKET_CLOSE_CODES.ABNORMAL_CLOSURE, reason);
|
||||
});
|
||||
} else {
|
||||
_super(code, reason);
|
||||
}
|
||||
},
|
||||
});
|
||||
env.services["bus_service"].addEventListener("connect", () => assert.step("connect"));
|
||||
env.services["bus_service"].addEventListener("disconnect", () => assert.step("disconnect"));
|
||||
env.services["bus_service"].addEventListener("reconnecting", () => assert.step("reconnecting"));
|
||||
env.services["bus_service"].addEventListener("reconnect", () => assert.step("reconnect"));
|
||||
// Connection will be closed when passing offline. But the close event
|
||||
// will be delayed to come after the next open event. The connection
|
||||
// will thus be in the closing state in the meantime.
|
||||
window.dispatchEvent(new Event("offline"));
|
||||
await nextTick();
|
||||
openDeferred = makeDeferred();
|
||||
// Worker reconnects upon the reception of the online event.
|
||||
window.dispatchEvent(new Event("online"));
|
||||
await openDeferred;
|
||||
closeDeferred.resolve();
|
||||
// Trigger the close event, it shouldn't have any effect since it is
|
||||
// related to an old connection that is no longer in use.
|
||||
await nextTick();
|
||||
openDeferred = makeDeferred();
|
||||
// Server closes the connection, the worker should reconnect.
|
||||
pyEnv.simulateConnectionLost(WEBSOCKET_CLOSE_CODES.KEEP_ALIVE_TIMEOUT);
|
||||
await openDeferred;
|
||||
await nextTick();
|
||||
// 3 connections were opened, so 3 subscriptions are expected.
|
||||
assert.strictEqual(subscribeSent, 3);
|
||||
assert.verifySteps([
|
||||
"connect",
|
||||
"disconnect",
|
||||
"connect",
|
||||
"disconnect",
|
||||
"reconnecting",
|
||||
"reconnect",
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test(
|
||||
"Fallback on simple worker when shared worker failed to initialize",
|
||||
async function (assert) {
|
||||
const originalSharedWorker = browser.SharedWorker;
|
||||
const originalWorker = browser.Worker;
|
||||
patchWithCleanup(browser, {
|
||||
SharedWorker: function (url, options) {
|
||||
assert.step("shared-worker creation");
|
||||
const sw = new originalSharedWorker(url, options);
|
||||
// Simulate error during shared worker creation.
|
||||
setTimeout(() => sw.dispatchEvent(new Event("error")));
|
||||
return sw;
|
||||
},
|
||||
Worker: function (url, options) {
|
||||
assert.step("worker creation");
|
||||
return new originalWorker(url, options);
|
||||
},
|
||||
}, { pure: true });
|
||||
patchWithCleanup(window.console, {
|
||||
warn(message) {
|
||||
assert.step(message);
|
||||
},
|
||||
})
|
||||
const env = await makeTestEnv();
|
||||
await env.services['bus_service'].start();
|
||||
assert.verifySteps([
|
||||
"shared-worker creation",
|
||||
"Error while loading \"bus_service\" SharedWorker, fallback on Worker.",
|
||||
"worker creation",
|
||||
]);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -0,0 +1,217 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { TEST_USER_IDS } from '@bus/../tests/helpers/test_constants';
|
||||
|
||||
import { registry } from '@web/core/registry';
|
||||
import { registerCleanup } from "@web/../tests/helpers/cleanup";
|
||||
import { makeMockServer } from "@web/../tests/helpers/mock_server";
|
||||
import core from 'web.core';
|
||||
|
||||
const modelDefinitionsPromise = new Promise(resolve => {
|
||||
QUnit.begin(() => resolve(getModelDefinitions()));
|
||||
});
|
||||
|
||||
/**
|
||||
* Fetch model definitions from the server then insert fields present in the
|
||||
* `bus.model.definitions` registry. Use `addModelNamesToFetch`/`insertModelFields`
|
||||
* helpers in order to add models to be fetched, default values to the fields,
|
||||
* fields to a model definition.
|
||||
*
|
||||
* @return {Map<string, Object>} A map from model names to model fields definitions.
|
||||
* @see model_definitions_setup.js
|
||||
*/
|
||||
async function getModelDefinitions() {
|
||||
const modelDefinitionsRegistry = registry.category('bus.model.definitions');
|
||||
const modelNamesToFetch = modelDefinitionsRegistry.get('modelNamesToFetch');
|
||||
const fieldsToInsertRegistry = modelDefinitionsRegistry.category('fieldsToInsert');
|
||||
|
||||
// fetch the model definitions.
|
||||
const formData = new FormData();
|
||||
formData.append('csrf_token', core.csrf_token);
|
||||
formData.append('model_names_to_fetch', JSON.stringify(modelNamesToFetch));
|
||||
const response = await window.fetch('/bus/get_model_definitions', { body: formData, method: 'POST' });
|
||||
if (response.status !== 200) {
|
||||
throw new Error('Error while fetching required models');
|
||||
}
|
||||
const modelDefinitions = new Map(Object.entries(await response.json()));
|
||||
|
||||
for (const [modelName, fields] of modelDefinitions) {
|
||||
// insert fields present in the fieldsToInsert registry : if the field
|
||||
// exists, update its default value according to the one in the
|
||||
// registry; If it does not exist, add it to the model definition.
|
||||
const fieldNamesToFieldToInsert = fieldsToInsertRegistry.category(modelName).getEntries();
|
||||
for (const [fname, fieldToInsert] of fieldNamesToFieldToInsert) {
|
||||
if (fname in fields) {
|
||||
fields[fname].default = fieldToInsert.default;
|
||||
} else {
|
||||
fields[fname] = fieldToInsert;
|
||||
}
|
||||
}
|
||||
// apply default values for date like fields if none was passed.
|
||||
for (const fname in fields) {
|
||||
const field = fields[fname];
|
||||
if (['date', 'datetime'].includes(field.type) && !field.default) {
|
||||
const defaultFieldValue = field.type === 'date'
|
||||
? () => moment.utc().format('YYYY-MM-DD')
|
||||
: () => moment.utc().format("YYYY-MM-DD HH:mm:ss");
|
||||
field.default = defaultFieldValue;
|
||||
} else if (fname === 'active' && !('default' in field)) {
|
||||
// records are active by default.
|
||||
field.default = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
// add models present in the fake models registry to the model definitions.
|
||||
const fakeModels = modelDefinitionsRegistry.category('fakeModels').getEntries();
|
||||
for (const [modelName, fields] of fakeModels) {
|
||||
modelDefinitions.set(modelName, fields);
|
||||
}
|
||||
return modelDefinitions;
|
||||
}
|
||||
|
||||
let pyEnv;
|
||||
/**
|
||||
* Creates an environment that can be used to setup test data as well as
|
||||
* creating data after test start.
|
||||
*
|
||||
* @param {Object} serverData serverData to pass to the mockServer.
|
||||
* @param {Object} [serverData.action] actions to be passed to the mock
|
||||
* server.
|
||||
* @param {Object} [serverData.views] views to be passed to the mock
|
||||
* server.
|
||||
* @returns {Object} An environment that can be used to interact with
|
||||
* the mock server (creation, deletion, update of records...)
|
||||
*/
|
||||
export async function startServer({ actions, views = {} } = {}) {
|
||||
const models = {};
|
||||
const modelDefinitions = await modelDefinitionsPromise;
|
||||
const recordsToInsertRegistry = registry.category('bus.model.definitions').category('recordsToInsert');
|
||||
for (const [modelName, fields] of modelDefinitions) {
|
||||
const records = [];
|
||||
if (recordsToInsertRegistry.contains(modelName)) {
|
||||
// prevent tests from mutating the records.
|
||||
records.push(...JSON.parse(JSON.stringify(recordsToInsertRegistry.get(modelName))));
|
||||
}
|
||||
models[modelName] = { fields: { ...fields }, records };
|
||||
|
||||
// generate default views for this model if none were passed.
|
||||
const viewArchsSubRegistries = registry.category('bus.view.archs').subRegistries;
|
||||
for (const [viewType, archsRegistry] of Object.entries(viewArchsSubRegistries)) {
|
||||
views[`${modelName},false,${viewType}`] =
|
||||
views[`${modelName},false,${viewType}`] ||
|
||||
archsRegistry.get(modelName, archsRegistry.get('default'));
|
||||
}
|
||||
}
|
||||
pyEnv = new Proxy(
|
||||
{
|
||||
get currentPartner() {
|
||||
return this.mockServer.currentPartner;
|
||||
},
|
||||
getData() {
|
||||
return this.mockServer.models;
|
||||
},
|
||||
getViews() {
|
||||
return views;
|
||||
},
|
||||
simulateConnectionLost(closeCode) {
|
||||
this.mockServer._simulateConnectionLost(closeCode);
|
||||
},
|
||||
...TEST_USER_IDS,
|
||||
},
|
||||
{
|
||||
get(target, name) {
|
||||
if (target[name]) {
|
||||
return target[name];
|
||||
}
|
||||
const modelAPI = {
|
||||
/**
|
||||
* Simulate a 'create' operation on a model.
|
||||
*
|
||||
* @param {Object[]|Object} values records to be created.
|
||||
* @returns {integer[]|integer} array of ids if more than one value was passed,
|
||||
* id of created record otherwise.
|
||||
*/
|
||||
create(values) {
|
||||
if (!values) {
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(values)) {
|
||||
values = [values];
|
||||
}
|
||||
const recordIds = values.map(value => target.mockServer.mockCreate(name, value));
|
||||
return recordIds.length === 1 ? recordIds[0] : recordIds;
|
||||
},
|
||||
/**
|
||||
* Simulate a 'search' operation on a model.
|
||||
*
|
||||
* @param {Array} domain
|
||||
* @param {Object} context
|
||||
* @returns {integer[]} array of ids corresponding to the given domain.
|
||||
*/
|
||||
search(domain, context = {}) {
|
||||
return target.mockServer.mockSearch(name, [domain], context);
|
||||
},
|
||||
/**
|
||||
* Simulate a `search_count` operation on a model.
|
||||
*
|
||||
* @param {Array} domain
|
||||
* @return {number} count of records matching the given domain.
|
||||
*/
|
||||
searchCount(domain) {
|
||||
return this.search(domain).length;
|
||||
},
|
||||
/**
|
||||
* Simulate a 'search_read' operation on a model.
|
||||
*
|
||||
* @param {Array} domain
|
||||
* @param {Object} kwargs
|
||||
* @returns {Object[]} array of records corresponding to the given domain.
|
||||
*/
|
||||
searchRead(domain, kwargs = {}) {
|
||||
return target.mockServer.mockSearchRead(name, [domain], kwargs);
|
||||
},
|
||||
/**
|
||||
* Simulate an 'unlink' operation on a model.
|
||||
*
|
||||
* @param {integer[]} ids
|
||||
* @returns {boolean} mockServer 'unlink' method always returns true.
|
||||
*/
|
||||
unlink(ids) {
|
||||
return target.mockServer.mockUnlink(name, [ids]);
|
||||
},
|
||||
/**
|
||||
* Simulate a 'write' operation on a model.
|
||||
*
|
||||
* @param {integer[]} ids ids of records to write on.
|
||||
* @param {Object} values values to write on the records matching given ids.
|
||||
* @returns {boolean} mockServer 'write' method always returns true.
|
||||
*/
|
||||
write(ids, values) {
|
||||
return target.mockServer.mockWrite(name, [ids, values]);
|
||||
},
|
||||
};
|
||||
if (name === 'bus.bus') {
|
||||
modelAPI['_sendone'] = target.mockServer._mockBusBus__sendone.bind(target.mockServer);
|
||||
modelAPI['_sendmany'] = target.mockServer._mockBusBus__sendmany.bind(target.mockServer);
|
||||
}
|
||||
return modelAPI;
|
||||
},
|
||||
set(target, name, value) {
|
||||
return target[name] = value;
|
||||
},
|
||||
},
|
||||
);
|
||||
pyEnv['mockServer'] = await makeMockServer({ actions, models, views });
|
||||
pyEnv['mockServer'].pyEnv = pyEnv;
|
||||
registerCleanup(() => pyEnv = undefined);
|
||||
return pyEnv;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Object} An environment that can be used to interact with the mock
|
||||
* server (creation, deletion, update of records...)
|
||||
*/
|
||||
export function getPyEnv() {
|
||||
return pyEnv || startServer();
|
||||
}
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { TEST_USER_IDS } from "@bus/../tests/helpers/test_constants";
|
||||
import { patchWebsocketWorkerWithCleanup } from '@bus/../tests/helpers/mock_websocket';
|
||||
|
||||
import { patch } from "@web/core/utils/patch";
|
||||
import { MockServer } from "@web/../tests/helpers/mock_server";
|
||||
|
||||
patch(MockServer.prototype, 'bus', {
|
||||
init() {
|
||||
this._super(...arguments);
|
||||
Object.assign(this, TEST_USER_IDS);
|
||||
const self = this;
|
||||
this.websocketWorker = patchWebsocketWorkerWithCleanup({
|
||||
_sendToServer(message) {
|
||||
self._performWebsocketRequest(message);
|
||||
this._super(message);
|
||||
},
|
||||
});
|
||||
this.pendingLongpollingPromise = null;
|
||||
this.notificationsToBeResolved = [];
|
||||
this.lastBusNotificationId = 0;
|
||||
},
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
// Private
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* @param {Object} message Message sent through the websocket to the
|
||||
* server.
|
||||
* @param {string} [message.event_name]
|
||||
* @param {any} [message.data]
|
||||
*/
|
||||
_performWebsocketRequest({ event_name, data }) {
|
||||
if (event_name === 'update_presence') {
|
||||
const { inactivity_period, im_status_ids_by_model } = data;
|
||||
this._mockIrWebsocket__updatePresence(inactivity_period, im_status_ids_by_model);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Simulates `_sendone` on `bus.bus`.
|
||||
*
|
||||
* @param {string} channel
|
||||
* @param {string} notificationType
|
||||
* @param {any} message
|
||||
*/
|
||||
_mockBusBus__sendone(channel, notificationType, message) {
|
||||
this._mockBusBus__sendmany([[channel, notificationType, message]]);
|
||||
},
|
||||
/**
|
||||
* Simulates `_sendmany` on `bus.bus`.
|
||||
*
|
||||
* @param {Array} notifications
|
||||
*/
|
||||
_mockBusBus__sendmany(notifications) {
|
||||
if (!notifications.length) {
|
||||
return;
|
||||
}
|
||||
const values = [];
|
||||
for (const notification of notifications) {
|
||||
const [type, payload] = notification.slice(1, notification.length);
|
||||
values.push({ id: this.lastBusNotificationId++, message: { payload, type }});
|
||||
if (this.debug) {
|
||||
console.log("%c[bus]", "color: #c6e; font-weight: bold;", type, payload);
|
||||
}
|
||||
}
|
||||
this.websocketWorker.broadcast('notification', values);
|
||||
|
||||
},
|
||||
/**
|
||||
* Simulate the lost of the connection by simulating a closeEvent on
|
||||
* the worker websocket.
|
||||
*
|
||||
* @param {number} clodeCode the code to close the connection with.
|
||||
*/
|
||||
_simulateConnectionLost(closeCode) {
|
||||
this.websocketWorker.websocket.close(closeCode);
|
||||
},
|
||||
});
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { patch } from "@web/core/utils/patch";
|
||||
import { MockServer } from "@web/../tests/helpers/mock_server";
|
||||
|
||||
patch(MockServer.prototype, 'bus/models/ir_websocket', {
|
||||
/**
|
||||
* Simulates `_update_presence` on `ir.websocket`.
|
||||
*
|
||||
* @param inactivityPeriod
|
||||
* @param imStatusIdsByModel
|
||||
*/
|
||||
_mockIrWebsocket__updatePresence(inactivityPeriod, imStatusIdsByModel) {
|
||||
const imStatusNotifications = this._mockIrWebsocket__getImStatus(imStatusIdsByModel);
|
||||
if (Object.keys(imStatusNotifications).length > 0) {
|
||||
this._mockBusBus__sendone(this.currentPartnerId, 'bus/im_status', imStatusNotifications);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Simulates `_get_im_status` on `ir.websocket`.
|
||||
*
|
||||
* @param {Object} imStatusIdsByModel
|
||||
* @param {Number[]|undefined} res.partner ids of res.partners whose im_status
|
||||
* should be monitored.
|
||||
*/
|
||||
_mockIrWebsocket__getImStatus(imStatusIdsByModel) {
|
||||
const imStatus = {};
|
||||
const { 'res.partner': partnerIds } = imStatusIdsByModel;
|
||||
if (partnerIds) {
|
||||
imStatus['partners'] = this.mockSearchRead('res.partner', [[['id', 'in', partnerIds]]], { context: { 'active_test': false }, fields: ['im_status'] })
|
||||
}
|
||||
return imStatus;
|
||||
},
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { presenceService } from '@bus/services/presence_service';
|
||||
|
||||
export function makeFakePresenceService(params = {}) {
|
||||
return {
|
||||
...presenceService,
|
||||
start(env) {
|
||||
return {
|
||||
...presenceService.start(env),
|
||||
...params,
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { WebsocketWorker } from "@bus/workers/websocket_worker";
|
||||
import { browser } from "@web/core/browser/browser";
|
||||
import { patchWithCleanup } from "@web/../tests/helpers/utils";
|
||||
import { registerCleanup } from "@web/../tests/helpers/cleanup";
|
||||
|
||||
class WebSocketMock extends EventTarget {
|
||||
constructor() {
|
||||
super();
|
||||
this.readyState = 0;
|
||||
|
||||
queueMicrotask(() => {
|
||||
this.readyState = 1;
|
||||
const openEv = new Event('open');
|
||||
this.onopen(openEv);
|
||||
this.dispatchEvent(openEv);
|
||||
});
|
||||
}
|
||||
|
||||
close(code = 1000, reason) {
|
||||
this.readyState = 3;
|
||||
const closeEv = new CloseEvent('close', {
|
||||
code,
|
||||
reason,
|
||||
wasClean: code === 1000,
|
||||
});
|
||||
this.onclose(closeEv);
|
||||
this.dispatchEvent(closeEv);
|
||||
}
|
||||
|
||||
onclose(closeEv) {}
|
||||
onerror(errorEv) {}
|
||||
onopen(openEv) {}
|
||||
|
||||
send(data) {
|
||||
if (this.readyState !== 1) {
|
||||
const errorEv = new Event('error');
|
||||
this.onerror(errorEv);
|
||||
this.dispatchEvent(errorEv);
|
||||
throw new DOMException("Failed to execute 'send' on 'WebSocket': State is not OPEN");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class SharedWorkerMock extends EventTarget {
|
||||
constructor(websocketWorker) {
|
||||
super();
|
||||
this._websocketWorker = websocketWorker;
|
||||
this._messageChannel = new MessageChannel();
|
||||
this.port = this._messageChannel.port1;
|
||||
// port 1 should be started by the service itself.
|
||||
this._messageChannel.port2.start();
|
||||
this._websocketWorker.registerClient(this._messageChannel.port2);
|
||||
}
|
||||
}
|
||||
|
||||
class WorkerMock extends SharedWorkerMock {
|
||||
constructor(websocketWorker) {
|
||||
super(websocketWorker);
|
||||
this.port.start();
|
||||
this.postMessage = this.port.postMessage.bind(this.port);
|
||||
}
|
||||
}
|
||||
|
||||
let websocketWorker;
|
||||
/**
|
||||
* @param {*} params Parameters used to patch the websocket worker.
|
||||
* @returns {WebsocketWorker} Instance of the worker which will run during the
|
||||
* test. Usefull to interact with the worker in order to test the
|
||||
* websocket behavior.
|
||||
*/
|
||||
export function patchWebsocketWorkerWithCleanup(params = {}) {
|
||||
patchWithCleanup(window, {
|
||||
WebSocket: function () {
|
||||
return new WebSocketMock();
|
||||
},
|
||||
}, { pure: true });
|
||||
patchWithCleanup(websocketWorker || WebsocketWorker.prototype, params);
|
||||
websocketWorker = websocketWorker || new WebsocketWorker();
|
||||
patchWithCleanup(browser, {
|
||||
SharedWorker: function () {
|
||||
const sharedWorker = new SharedWorkerMock(websocketWorker);
|
||||
registerCleanup(() => {
|
||||
sharedWorker._messageChannel.port1.close();
|
||||
sharedWorker._messageChannel.port2.close();
|
||||
});
|
||||
return sharedWorker;
|
||||
},
|
||||
Worker: function () {
|
||||
const worker = new WorkerMock(websocketWorker);
|
||||
registerCleanup(() => {
|
||||
worker._messageChannel.port1.close();
|
||||
worker._messageChannel.port2.close();
|
||||
});
|
||||
return worker;
|
||||
},
|
||||
}, { pure: true });
|
||||
registerCleanup(() => {
|
||||
if (websocketWorker) {
|
||||
clearTimeout(websocketWorker.connectTimeout);
|
||||
websocketWorker = null;
|
||||
}
|
||||
});
|
||||
return websocketWorker;
|
||||
}
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { registry } from '@web/core/registry';
|
||||
|
||||
const modelDefinitionsRegistry = registry.category('bus.model.definitions');
|
||||
const customModelFieldsRegistry = modelDefinitionsRegistry.category('fieldsToInsert');
|
||||
const recordsToInsertRegistry = modelDefinitionsRegistry.category('recordsToInsert');
|
||||
const fakeModelsRegistry = modelDefinitionsRegistry.category('fakeModels');
|
||||
/**
|
||||
* Add models whose definitions need to be fetched on the server.
|
||||
*
|
||||
* @param {string[]} modelName
|
||||
*/
|
||||
export function addModelNamesToFetch(modelNames) {
|
||||
if (!modelDefinitionsRegistry.contains('modelNamesToFetch')) {
|
||||
modelDefinitionsRegistry.add('modelNamesToFetch', []);
|
||||
}
|
||||
modelDefinitionsRegistry.get('modelNamesToFetch').push(...modelNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add models that will be added to the model definitions. We should
|
||||
* avoid to rely on fake models and use real models instead.
|
||||
*
|
||||
* @param {string} modelName
|
||||
* @param {Object} fields
|
||||
*/
|
||||
export function addFakeModel(modelName, fields) {
|
||||
fakeModelsRegistry.add(modelName, fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add model fields that are not present on the server side model's definitions
|
||||
* but are required to ease testing or add default values for existing fields.
|
||||
*
|
||||
* @param {string} modelName
|
||||
* @param {Object} fieldNamesToFields
|
||||
*/
|
||||
export function insertModelFields(modelName, fieldNamesToFields) {
|
||||
const modelCustomFieldsRegistry = customModelFieldsRegistry.category(modelName);
|
||||
for (const fname in fieldNamesToFields) {
|
||||
modelCustomFieldsRegistry.add(fname, fieldNamesToFields[fname]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add records to the initial server data.
|
||||
*
|
||||
* @param {string} modelName
|
||||
* @param {Object[]} records
|
||||
*/
|
||||
export function insertRecords(modelName, records) {
|
||||
if (!recordsToInsertRegistry.contains(modelName)) {
|
||||
recordsToInsertRegistry.add(modelName, []);
|
||||
}
|
||||
recordsToInsertRegistry.get(modelName).push(...records);
|
||||
}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { TEST_GROUP_IDS, TEST_USER_IDS } from '@bus/../tests/helpers/test_constants';
|
||||
import {
|
||||
addModelNamesToFetch,
|
||||
insertModelFields,
|
||||
insertRecords
|
||||
} from '@bus/../tests/helpers/model_definitions_helpers';
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
// Models
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
addModelNamesToFetch([
|
||||
'ir.attachment', 'ir.model', 'ir.model.fields', 'res.company', 'res.country',
|
||||
'res.groups', 'res.partner', 'res.users'
|
||||
]);
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
// Insertion of fields
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
insertModelFields('res.partner', {
|
||||
description: { string: 'description', type: 'text' },
|
||||
});
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
// Insertion of records
|
||||
//--------------------------------------------------------------------------
|
||||
|
||||
insertRecords('res.company', [{ id: 1 }]);
|
||||
insertRecords('res.groups', [
|
||||
{ id: TEST_GROUP_IDS.groupUserId, name: "Internal User" },
|
||||
]);
|
||||
insertRecords('res.users', [
|
||||
{ display_name: "Your Company, Mitchell Admin", id: TEST_USER_IDS.currentUserId, name: "Mitchell Admin", partner_id: TEST_USER_IDS.currentPartnerId, },
|
||||
{ active: false, display_name: "Public user", id: TEST_USER_IDS.publicUserId, name: "Public user", partner_id: TEST_USER_IDS.publicPartnerId, },
|
||||
]);
|
||||
insertRecords('res.partner', [
|
||||
{ active: false, display_name: "Public user", id: TEST_USER_IDS.publicPartnerId, is_public: true },
|
||||
{ display_name: "Your Company, Mitchell Admin", id: TEST_USER_IDS.currentPartnerId, name: "Mitchell Admin", },
|
||||
{ active: false, display_name: "OdooBot", id: TEST_USER_IDS.partnerRootId, name: "OdooBot" },
|
||||
]);
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
export const TEST_GROUP_IDS = {
|
||||
groupUserId: 11,
|
||||
};
|
||||
|
||||
export const TEST_USER_IDS = {
|
||||
partnerRootId: 2,
|
||||
currentPartnerId: 3,
|
||||
currentUserId: 2,
|
||||
publicPartnerId: 4,
|
||||
publicUserId: 3,
|
||||
};
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { registry } from '@web/core/registry';
|
||||
|
||||
const viewArchsRegistry = registry.category('bus.view.archs');
|
||||
const activityArchsRegistry = viewArchsRegistry.category('activity');
|
||||
const formArchsRegistry = viewArchsRegistry.category('form');
|
||||
const kanbanArchsRegistry = viewArchsRegistry.category('kanban');
|
||||
const listArchsRegistry = viewArchsRegistry.category('list');
|
||||
const searchArchsRegistry = viewArchsRegistry.category('search');
|
||||
|
||||
activityArchsRegistry.add('default', '<activity><templates></templates></activity>');
|
||||
formArchsRegistry.add('default', '<form/>');
|
||||
kanbanArchsRegistry.add('default', '<kanban><templates></templates>');
|
||||
listArchsRegistry.add('default', '<tree/>');
|
||||
searchArchsRegistry.add('default', '<search/>');
|
||||
|
||||
formArchsRegistry.add(
|
||||
'res.partner',
|
||||
`<form>
|
||||
<sheet>
|
||||
<field name="name"/>
|
||||
</sheet>
|
||||
<div class="oe_chatter">
|
||||
<field name="activity_ids"/>
|
||||
<field name="message_follower_ids"/>
|
||||
<field name="message_ids"/>
|
||||
</div>
|
||||
</form>`
|
||||
);
|
||||
formArchsRegistry.add(
|
||||
'res.fake',
|
||||
`<form>
|
||||
<div class="oe_chatter">
|
||||
<field name="message_ids"/>
|
||||
</div>
|
||||
</form>`
|
||||
);
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/* @odoo-module */
|
||||
|
||||
import { patchWebsocketWorkerWithCleanup } from "@bus/../tests/helpers/mock_websocket";
|
||||
|
||||
import { makeDeferred } from "@web/../tests/helpers/utils";
|
||||
import { registerCleanup } from "@web/../tests/helpers/cleanup";
|
||||
import { patch, unpatch } from "@web/core/utils/patch";
|
||||
|
||||
// Should be enough to decide whether or not notifications/channel
|
||||
// subscriptions... are received.
|
||||
const TIMEOUT = 500;
|
||||
|
||||
/**
|
||||
* Returns a deferred that resolves when the given channel(s) addition/deletion
|
||||
* is notified to the websocket worker.
|
||||
*
|
||||
* @param {string[]} channels
|
||||
* @param {object} [options={}]
|
||||
* @param {"add"|"delete"} [options.operation="add"]
|
||||
*
|
||||
* @returns {import("@web/core/utils/concurrency").Deferred} */
|
||||
export function waitForChannels(channels, { operation = "add" } = {}) {
|
||||
const uuid = String(Date.now() + Math.random());
|
||||
const missingChannels = new Set(channels);
|
||||
const deferred = makeDeferred();
|
||||
function check({ crashOnFail = false } = {}) {
|
||||
const success = missingChannels.size === 0;
|
||||
if (!success && !crashOnFail) {
|
||||
return;
|
||||
}
|
||||
unpatch(worker, uuid);
|
||||
clearTimeout(failTimeout);
|
||||
const msg = success
|
||||
? `Channel(s) [${channels.join(", ")}] ${operation === "add" ? "added" : "deleted"}.`
|
||||
: `Waited ${TIMEOUT}ms for [${channels.join(", ")}] to be ${
|
||||
operation === "add" ? "added" : "deleted"
|
||||
}`;
|
||||
QUnit.assert.ok(success, msg);
|
||||
if (success) {
|
||||
deferred.resolve();
|
||||
} else {
|
||||
deferred.reject(new Error(msg));
|
||||
}
|
||||
}
|
||||
const failTimeout = setTimeout(() => check({ crashOnFail: true }), TIMEOUT);
|
||||
registerCleanup(() => {
|
||||
if (missingChannels.length > 0) {
|
||||
check({ crashOnFail: true });
|
||||
}
|
||||
});
|
||||
const worker = patchWebsocketWorkerWithCleanup();
|
||||
patch(worker, uuid, {
|
||||
async [operation === "add" ? "_addChannel" : "_deleteChannel"](client, channel) {
|
||||
await this._super(client, channel);
|
||||
missingChannels.delete(channel);
|
||||
check();
|
||||
},
|
||||
});
|
||||
return deferred;
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
/** @odoo-module **/
|
||||
|
||||
import { multiTabService } from '../src/multi_tab_service';
|
||||
|
||||
import { browser } from '@web/core/browser/browser';
|
||||
import { registry } from '@web/core/registry';
|
||||
import { makeTestEnv } from '@web/../tests/helpers/mock_env';
|
||||
import { patchWithCleanup, nextTick } from '@web/../tests/helpers/utils';
|
||||
|
||||
QUnit.module('bus', function () {
|
||||
QUnit.module('multi_tab_service_tests.js');
|
||||
|
||||
QUnit.test('multi tab service elects new master on pagehide', async function (assert) {
|
||||
assert.expect(5);
|
||||
|
||||
registry.category('services').add('multi_tab', multiTabService);
|
||||
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
assert.ok(firstTabEnv.services['multi_tab'].isOnMainTab(), 'only tab should be the main one');
|
||||
|
||||
// prevent second tab from receiving pagehide event.
|
||||
patchWithCleanup(browser, {
|
||||
addEventListener(eventName, callback) {
|
||||
if (eventName === 'pagehide') {
|
||||
return;
|
||||
}
|
||||
this._super(eventName, callback);
|
||||
},
|
||||
});
|
||||
const secondTabEnv = await makeTestEnv();
|
||||
firstTabEnv.services["multi_tab"].bus.addEventListener("no_longer_main_tab", () =>
|
||||
assert.step("tab1 no_longer_main_tab")
|
||||
);
|
||||
secondTabEnv.services["multi_tab"].bus.addEventListener("no_longer_main_tab", () =>
|
||||
assert.step("tab2 no_longer_main_tab")
|
||||
);
|
||||
window.dispatchEvent(new Event('pagehide'));
|
||||
|
||||
// Let the multi tab elect a new main.
|
||||
await nextTick();
|
||||
assert.notOk(firstTabEnv.services['multi_tab'].isOnMainTab());
|
||||
assert.ok(secondTabEnv.services['multi_tab'].isOnMainTab());
|
||||
assert.verifySteps(['tab1 no_longer_main_tab']);
|
||||
});
|
||||
|
||||
QUnit.test('multi tab allow to share values between tabs', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
registry.category('services').add('multi_tab', multiTabService);
|
||||
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
const secondTabEnv = await makeTestEnv();
|
||||
|
||||
firstTabEnv.services['multi_tab'].setSharedValue('foo', 1);
|
||||
assert.deepEqual(secondTabEnv.services['multi_tab'].getSharedValue('foo'), 1);
|
||||
firstTabEnv.services['multi_tab'].setSharedValue('foo', 2);
|
||||
assert.deepEqual(secondTabEnv.services['multi_tab'].getSharedValue('foo'), 2);
|
||||
|
||||
firstTabEnv.services['multi_tab'].removeSharedValue('foo');
|
||||
assert.notOk(secondTabEnv.services['multi_tab'].getSharedValue('foo'));
|
||||
});
|
||||
|
||||
QUnit.test('multi tab triggers shared_value_updated', async function (assert) {
|
||||
assert.expect(4);
|
||||
|
||||
registry.category('services').add('multi_tab', multiTabService);
|
||||
|
||||
const firstTabEnv = await makeTestEnv();
|
||||
const secondTabEnv = await makeTestEnv();
|
||||
|
||||
secondTabEnv.services['multi_tab'].bus.addEventListener('shared_value_updated', ({ detail }) => {
|
||||
assert.step(`${detail.key} - ${JSON.parse(detail.newValue)}`);
|
||||
});
|
||||
firstTabEnv.services['multi_tab'].setSharedValue('foo', 'bar');
|
||||
firstTabEnv.services['multi_tab'].setSharedValue('foo', 'foo');
|
||||
firstTabEnv.services['multi_tab'].removeSharedValue('foo');
|
||||
|
||||
await nextTick();
|
||||
assert.verifySteps([
|
||||
'foo - bar',
|
||||
'foo - foo',
|
||||
'foo - null',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('multi tab triggers become_master', async function (assert) {
|
||||
registry.category('services').add('multi_tab', multiTabService);
|
||||
|
||||
await makeTestEnv();
|
||||
// prevent second tab from receiving pagehide event.
|
||||
patchWithCleanup(browser, {
|
||||
addEventListener(eventName, callback) {
|
||||
if (eventName === 'pagehide') {
|
||||
return;
|
||||
}
|
||||
this._super(eventName, callback);
|
||||
},
|
||||
});
|
||||
const secondTabEnv = await makeTestEnv();
|
||||
secondTabEnv.services['multi_tab'].bus.addEventListener('become_main_tab', () => assert.step('become_main_tab'));
|
||||
window.dispatchEvent(new Event('pagehide'));
|
||||
|
||||
// Let the multi tab elect a new main.
|
||||
await nextTick();
|
||||
assert.verifySteps(['become_main_tab']);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,103 @@
|
|||
/** @odoo-module */
|
||||
|
||||
import { WEBSOCKET_CLOSE_CODES } from "@bus/workers/websocket_worker";
|
||||
import { patchWebsocketWorkerWithCleanup } from '@bus/../tests/helpers/mock_websocket';
|
||||
|
||||
import { nextTick, patchWithCleanup } from "@web/../tests/helpers/utils";
|
||||
|
||||
QUnit.module('Websocket Worker');
|
||||
|
||||
QUnit.test('connect event is broadcasted after calling start', async function (assert) {
|
||||
assert.expect(2);
|
||||
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
broadcast(type) {
|
||||
assert.step(`broadcast ${type}`);
|
||||
},
|
||||
});
|
||||
worker._start();
|
||||
// Wait for the websocket to connect.
|
||||
await nextTick();
|
||||
assert.verifySteps(['broadcast connect']);
|
||||
});
|
||||
|
||||
QUnit.test('disconnect event is broadcasted', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
broadcast(type) {
|
||||
assert.step(`broadcast ${type}`);
|
||||
},
|
||||
});
|
||||
worker._start()
|
||||
// Wait for the websocket to connect.
|
||||
await nextTick();
|
||||
worker.websocket.close(WEBSOCKET_CLOSE_CODES.CLEAN);
|
||||
// Wait for the websocket to disconnect.
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'broadcast connect',
|
||||
'broadcast disconnect',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('reconnecting/reconnect event is broadcasted', async function (assert) {
|
||||
assert.expect(5);
|
||||
|
||||
// Patch setTimeout in order for the worker to reconnect immediatly.
|
||||
patchWithCleanup(window, {
|
||||
setTimeout: fn => fn(),
|
||||
});
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
broadcast(type) {
|
||||
assert.step(`broadcast ${type}`);
|
||||
},
|
||||
});
|
||||
worker._start()
|
||||
// Wait for the websocket to connect.
|
||||
await nextTick();
|
||||
worker.websocket.close(WEBSOCKET_CLOSE_CODES.ABNORMAL_CLOSURE);
|
||||
// Wait for the disconnect/reconnecting/reconnect events.
|
||||
await nextTick();
|
||||
|
||||
assert.verifySteps([
|
||||
'broadcast connect',
|
||||
'broadcast disconnect',
|
||||
'broadcast reconnecting',
|
||||
'broadcast reconnect',
|
||||
]);
|
||||
});
|
||||
|
||||
QUnit.test('notification event is broadcasted', async function (assert) {
|
||||
assert.expect(3);
|
||||
|
||||
const notifications = [{
|
||||
id: 70,
|
||||
message: {
|
||||
type: "bundle_changed",
|
||||
payload: {
|
||||
server_version: '15.5alpha1+e',
|
||||
},
|
||||
},
|
||||
}];
|
||||
const worker = patchWebsocketWorkerWithCleanup({
|
||||
broadcast(type, message) {
|
||||
if (type === 'notification') {
|
||||
assert.step(`broadcast ${type}`);
|
||||
assert.deepEqual(message, notifications);
|
||||
}
|
||||
},
|
||||
});
|
||||
worker._start()
|
||||
// Wait for the websocket to connect.
|
||||
await nextTick();
|
||||
|
||||
worker.websocket.dispatchEvent(new MessageEvent('message', {
|
||||
data: JSON.stringify(notifications),
|
||||
}));
|
||||
|
||||
assert.verifySteps([
|
||||
'broadcast notification',
|
||||
]);
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue