19.0 vanilla

This commit is contained in:
Ernad Husremovic 2026-03-09 09:30:27 +01:00
parent d1963a3c3a
commit 2d3ee4855a
7430 changed files with 2687981 additions and 2965473 deletions

View file

@ -5,25 +5,28 @@ import json
import logging
import math
import os
import random
import selectors
import threading
import time
from psycopg2 import InterfaceError, sql
from psycopg2 import InterfaceError
from psycopg2.pool import PoolError
import odoo
from ..tools import orjson
from odoo import api, fields, models
from odoo.service.server import CommonServer
from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
from odoo.tools import date_utils
from odoo.tools import json_default, SQL
from odoo.tools.constants import GC_UNLINK_LIMIT
from odoo.tools.misc import OrderedSet
_logger = logging.getLogger(__name__)
# longpolling timeout connection
TIMEOUT = 50
DEFAULT_GC_RETENTION_SECONDS = 60 * 60 * 24 # 24 hours
# custom function to call instead of NOTIFY postgresql command (opt-in)
ODOO_NOTIFY_FUNCTION = os.environ.get('ODOO_NOTIFY_FUNCTION')
# custom function to call instead of default PostgreSQL's `pg_notify`
ODOO_NOTIFY_FUNCTION = os.getenv('ODOO_NOTIFY_FUNCTION', 'pg_notify')
def get_notify_payload_max_length(default=8000):
@ -40,11 +43,11 @@ def get_notify_payload_max_length(default=8000):
NOTIFY_PAYLOAD_MAX_LENGTH = get_notify_payload_max_length()
#----------------------------------------------------------
# ---------------------------------------------------------
# Bus
#----------------------------------------------------------
# ---------------------------------------------------------
def json_dump(v):
return json.dumps(v, separators=(',', ':'), default=date_utils.json_default)
return json.dumps(v, separators=(',', ':'), default=json_default)
def hashable(key):
if isinstance(key, list):
@ -55,6 +58,8 @@ def hashable(key):
def channel_with_db(dbname, channel):
if isinstance(channel, models.Model):
return (dbname, channel._name, channel.id)
if isinstance(channel, tuple) and len(channel) == 2 and isinstance(channel[0], models.Model):
return (dbname, channel[0]._name, channel[0].id, channel[1])
if isinstance(channel, str):
return (dbname, channel)
return channel
@ -80,9 +85,9 @@ def get_notify_payloads(channels):
get_notify_payloads(channels[pivot:]))
class ImBus(models.Model):
class BusBus(models.Model):
_name = 'bus.bus'
_description = 'Communication Bus'
channel = fields.Char('Channel')
@ -90,75 +95,107 @@ class ImBus(models.Model):
@api.autovacuum
def _gc_messages(self):
timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT*2)
domain = [('create_date', '<', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))]
records = self.search(domain, limit=models.GC_UNLINK_LIMIT)
if len(records) >= models.GC_UNLINK_LIMIT:
self.env.ref('base.autovacuum_job')._trigger()
return records.unlink()
gc_retention_seconds = int(
self.env["ir.config_parameter"]
.sudo()
.get_param("bus.gc_retention_seconds", DEFAULT_GC_RETENTION_SECONDS)
)
timeout_ago = fields.Datetime.now() - datetime.timedelta(seconds=gc_retention_seconds)
# Direct SQL to avoid ORM overhead; this way we can delete millions of rows quickly.
# This is a low-level table with no expected references, and doing this avoids
# the need to split or reschedule this GC job.
self.env.cr.execute("DELETE FROM bus_bus WHERE create_date < %s", (timeout_ago,))
@api.model
def _sendmany(self, notifications):
channels = set()
values = []
for target, notification_type, message in notifications:
channel = channel_with_db(self.env.cr.dbname, target)
channels.add(channel)
values.append({
'channel': json_dump(channel),
'message': json_dump({
'type': notification_type,
'payload': message,
})
})
self.sudo().create(values)
if channels:
def _sendone(self, target, notification_type, message):
"""Low-level method to send ``notification_type`` and ``message`` to ``target``.
Using ``_bus_send()`` from ``bus.listener.mixin`` is recommended for simplicity and
security.
When using ``_sendone`` directly, ``target`` (if str) should not be guessable by an
attacker.
"""
self._ensure_hooks()
channel = channel_with_db(self.env.cr.dbname, target)
self.env.cr.precommit.data["bus.bus.values"].append(
{
"channel": json_dump(channel),
"message": json_dump(
{
"type": notification_type,
"payload": message,
}
),
}
)
self.env.cr.postcommit.data["bus.bus.channels"].add(channel)
def _ensure_hooks(self):
if "bus.bus.values" not in self.env.cr.precommit.data:
self.env.cr.precommit.data["bus.bus.values"] = []
@self.env.cr.precommit.add
def create_bus():
self.sudo().create(self.env.cr.precommit.data.pop("bus.bus.values"))
if "bus.bus.channels" not in self.env.cr.postcommit.data:
self.env.cr.postcommit.data["bus.bus.channels"] = OrderedSet()
# We have to wait until the notifications are commited in database.
# When calling `NOTIFY imbus`, notifications will be fetched in the
# bus table. If the transaction is not commited yet, there will be
# nothing to fetch, and the websocket will return no notification.
@self.env.cr.postcommit.add
def notify():
with odoo.sql_db.db_connect('postgres').cursor() as cr:
if ODOO_NOTIFY_FUNCTION:
query = sql.SQL("SELECT {}('imbus', %s)").format(sql.Identifier(ODOO_NOTIFY_FUNCTION))
else:
query = "NOTIFY imbus, %s"
payloads = get_notify_payloads(list(channels))
if len(payloads) > 1:
_logger.info("The imbus notification payload was too large, "
"it's been split into %d payloads.", len(payloads))
payloads = get_notify_payloads(
list(self.env.cr.postcommit.data.pop("bus.bus.channels"))
)
if len(payloads) > 1:
_logger.info(
"The imbus notification payload was too large, it's been split into %d payloads.",
len(payloads),
)
with odoo.sql_db.db_connect("postgres").cursor() as cr:
for payload in payloads:
cr.execute(query, (payload,))
cr.execute(
SQL(
"SELECT %s('imbus', %s)",
SQL.identifier(ODOO_NOTIFY_FUNCTION),
payload,
)
)
@api.model
def _sendone(self, channel, notification_type, message):
self._sendmany([[channel, notification_type, message]])
@api.model
def _poll(self, channels, last=0):
def _poll(self, channels, last=0, ignore_ids=None):
# first poll return the notification in the 'buffer'
if last == 0:
timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT)
domain = [('create_date', '>', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))]
timeout_ago = fields.Datetime.now() - datetime.timedelta(seconds=TIMEOUT)
domain = [('create_date', '>', timeout_ago)]
else: # else returns the unread notifications
domain = [('id', '>', last)]
if ignore_ids:
domain.append(("id", "not in", ignore_ids))
channels = [json_dump(channel_with_db(self.env.cr.dbname, c)) for c in channels]
domain.append(('channel', 'in', channels))
notifications = self.sudo().search_read(domain)
notifications = self.sudo().search_read(domain, ["message"])
# list of notification to return
result = []
for notif in notifications:
result.append({
'id': notif['id'],
'message': json.loads(notif['message']),
'message': orjson.loads(notif['message']),
})
return result
def _bus_last_id(self):
last = self.env['bus.bus'].search([], order='id desc', limit=1)
return last.id if last else 0
#----------------------------------------------------------
# ---------------------------------------------------------
# Dispatcher
#----------------------------------------------------------
# ---------------------------------------------------------
class BusSubscription:
def __init__(self, channels, last):
@ -211,7 +248,7 @@ class ImDispatch(threading.Thread):
conn.poll()
channels = []
while conn.notifies:
channels.extend(json.loads(conn.notifies.pop().payload))
channels.extend(orjson.loads(conn.notifies.pop().payload))
# relay notifications to websockets that have
# subscribed to the corresponding channels.
websockets = set()
@ -225,7 +262,7 @@ class ImDispatch(threading.Thread):
try:
self.loop()
except Exception as exc:
if isinstance(exc, InterfaceError) and stop_event.is_set():
if isinstance(exc, (InterfaceError, PoolError)) and stop_event.is_set():
continue
_logger.exception("Bus.loop error, sleep and retry")
time.sleep(TIMEOUT)