mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-20 12:32:02 +02:00
19.0 vanilla
This commit is contained in:
parent
d1963a3c3a
commit
2d3ee4855a
7430 changed files with 2687981 additions and 2965473 deletions
|
|
@ -1,5 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import alias_error
|
||||
from . import discuss
|
||||
from . import link_preview
|
||||
from . import mail_validation
|
||||
from . import credentials
|
||||
from . import parser
|
||||
from . import web_push
|
||||
|
|
|
|||
18
odoo-bringout-oca-ocb-mail/mail/tools/alias_error.py
Normal file
18
odoo-bringout-oca-ocb-mail/mail/tools/alias_error.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AliasError:
|
||||
"""Alias error description.
|
||||
|
||||
Arguments:
|
||||
code (str): error code
|
||||
message (str): translated user message
|
||||
is_config_error (bool): whether the error was caused by a mis-configured alias or not
|
||||
"""
|
||||
code: str
|
||||
message: str = field(default='', compare=False)
|
||||
is_config_error: bool = field(default=False, compare=False)
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
def get_twilio_credentials(env):
|
||||
"""
|
||||
To be overridable if we need to obtain credentials from another source.
|
||||
:return: tuple(account_sid: str, auth_token: str)
|
||||
"""
|
||||
params = env["ir.config_parameter"].sudo()
|
||||
account_sid = params.get_param("mail.twilio_account_sid")
|
||||
auth_token = params.get_param("mail.twilio_account_token")
|
||||
return account_sid, auth_token
|
||||
564
odoo-bringout-oca-ocb-mail/mail/tools/discuss.py
Normal file
564
odoo-bringout-oca-ocb-mail/mail/tools/discuss.py
Normal file
|
|
@ -0,0 +1,564 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from datetime import date, datetime
|
||||
from functools import wraps
|
||||
from markupsafe import Markup
|
||||
|
||||
import odoo
|
||||
from odoo import models
|
||||
from odoo.exceptions import MissingError
|
||||
from odoo.http import request
|
||||
from odoo.tools import groupby
|
||||
from odoo.addons.bus.websocket import wsrequest
|
||||
|
||||
def add_guest_to_context(func):
|
||||
""" Decorate a function to extract the guest from the request.
|
||||
The guest is then available on the context of the current
|
||||
request.
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
req = request or wsrequest
|
||||
token = (
|
||||
req.cookies.get(req.env["mail.guest"]._cookie_name, "")
|
||||
)
|
||||
guest = req.env["mail.guest"]._get_guest_from_token(token)
|
||||
if guest and not guest.timezone and not req.env.cr.readonly:
|
||||
timezone = req.env["mail.guest"]._get_timezone_from_request(req)
|
||||
if timezone:
|
||||
guest._update_timezone(timezone)
|
||||
if guest:
|
||||
req.update_context(guest=guest)
|
||||
if isinstance(self, models.BaseModel):
|
||||
self = self.with_context(guest=guest)
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def get_twilio_credentials(env) -> tuple[str | None, str | None]:
|
||||
"""
|
||||
To be overridable if we need to obtain credentials from another source.
|
||||
:return: tuple(account_sid: str, auth_token: str) or (None, None) if Twilio is disabled
|
||||
"""
|
||||
params = env["ir.config_parameter"].sudo()
|
||||
if not params.get_param("mail.use_twilio_rtc_servers"):
|
||||
return None, None
|
||||
account_sid = params.get_param("mail.twilio_account_sid")
|
||||
auth_token = params.get_param("mail.twilio_account_token")
|
||||
return account_sid, auth_token
|
||||
|
||||
|
||||
def get_sfu_url(env) -> str | None:
|
||||
params = env["ir.config_parameter"].sudo()
|
||||
sfu_url = params.get_param("mail.sfu_server_url") if params.get_param("mail.use_sfu_server") else None
|
||||
if not sfu_url:
|
||||
sfu_url = os.getenv("ODOO_SFU_URL")
|
||||
if sfu_url:
|
||||
return sfu_url.rstrip("/")
|
||||
|
||||
|
||||
def get_sfu_key(env) -> str | None:
|
||||
sfu_key = env['ir.config_parameter'].sudo().get_param('mail.sfu_server_key')
|
||||
if not sfu_key:
|
||||
return os.getenv("ODOO_SFU_KEY")
|
||||
return sfu_key
|
||||
|
||||
|
||||
ids_by_model = defaultdict(lambda: ("id",))
|
||||
ids_by_model.update(
|
||||
{
|
||||
"DiscussApp": (),
|
||||
"mail.thread": ("model", "id"),
|
||||
"MessageReactions": ("message", "content"),
|
||||
"Rtc": (),
|
||||
"Store": (),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Store:
|
||||
"""Helper to build a dict of data for sending to web client.
|
||||
It supports merging of data from multiple sources, either through list extend or dict update.
|
||||
The keys of data are the name of models as defined in mail JS code, and the values are any
|
||||
format supported by store.insert() method (single dict or list of dict for each model name)."""
|
||||
|
||||
def __init__(self, bus_channel=None, bus_subchannel=None):
|
||||
self.data = {}
|
||||
self.data_id = None
|
||||
self.target = Store.Target(bus_channel, bus_subchannel)
|
||||
|
||||
def add(self, records, fields=None, extra_fields=None, as_thread=False, **kwargs):
|
||||
"""Add records to the store. Data is coming from _to_store() method of the model if it is
|
||||
defined, and fallbacks to _read_format() otherwise.
|
||||
Relations are defined with Store.One() or Store.Many() instead of a field name as string.
|
||||
|
||||
Use case: to add records and their fields to store. This is the preferred method.
|
||||
"""
|
||||
if not records:
|
||||
return self
|
||||
assert isinstance(records, models.Model)
|
||||
if fields is None:
|
||||
if as_thread:
|
||||
fields = []
|
||||
else:
|
||||
fields = (
|
||||
records._to_store_defaults(self.target)
|
||||
if hasattr(records, "_to_store_defaults")
|
||||
else []
|
||||
)
|
||||
fields = self._format_fields(records, fields) + self._format_fields(records, extra_fields)
|
||||
if as_thread:
|
||||
if hasattr(records, "_thread_to_store"):
|
||||
records._thread_to_store(self, fields, **kwargs)
|
||||
else:
|
||||
assert not kwargs
|
||||
self.add_records_fields(records, fields, as_thread=True)
|
||||
else:
|
||||
if hasattr(records, "_to_store"):
|
||||
records._to_store(self, fields, **kwargs)
|
||||
else:
|
||||
assert not kwargs
|
||||
self.add_records_fields(records, fields)
|
||||
return self
|
||||
|
||||
def add_global_values(self, **values):
|
||||
"""Add global values to the store. Global values are stored in the Store singleton
|
||||
(mail.store service) in the client side.
|
||||
|
||||
Use case: to add global values."""
|
||||
self.add_singleton_values("Store", values)
|
||||
return self
|
||||
|
||||
def add_model_values(self, model_name, values):
|
||||
"""Add values to a model in the store.
|
||||
|
||||
Use case: to add values to JS records that don't have a corresponding Python record.
|
||||
Note: for python records adding model values is discouraged in favor of using Store.add().
|
||||
"""
|
||||
if not values:
|
||||
return self
|
||||
index = self._get_record_index(model_name, values)
|
||||
self._ensure_record_at_index(model_name, index)
|
||||
self._add_values(values, model_name, index)
|
||||
if "_DELETE" in self.data[model_name][index]:
|
||||
del self.data[model_name][index]["_DELETE"]
|
||||
return self
|
||||
|
||||
def add_records_fields(self, records, fields, as_thread=False):
|
||||
"""Same as Store.add() but without calling _to_store().
|
||||
|
||||
Use case: to add fields from inside _to_store() methods to avoid recursive code.
|
||||
Note: in all other cases, Store.add() should be called instead.
|
||||
"""
|
||||
if not records:
|
||||
return self
|
||||
assert isinstance(records, models.Model)
|
||||
if not fields:
|
||||
return self
|
||||
fields = self._format_fields(records, fields)
|
||||
for record, record_data_list in zip(records, self._get_records_data_list(records, fields)):
|
||||
for record_data in record_data_list:
|
||||
if as_thread:
|
||||
self.add_model_values(
|
||||
"mail.thread", {"id": record.id, "model": record._name, **record_data},
|
||||
)
|
||||
else:
|
||||
self.add_model_values(record._name, {"id": record.id, **record_data})
|
||||
return self
|
||||
|
||||
def add_singleton_values(self, model_name, values):
|
||||
"""Add values to the store for a singleton model."""
|
||||
if not values:
|
||||
return self
|
||||
ids = ids_by_model[model_name]
|
||||
assert not ids
|
||||
assert isinstance(values, dict)
|
||||
if model_name not in self.data:
|
||||
self.data[model_name] = {}
|
||||
self._add_values(values, model_name)
|
||||
return self
|
||||
|
||||
def delete(self, records, as_thread=False):
|
||||
"""Delete records from the store."""
|
||||
if not records:
|
||||
return self
|
||||
assert isinstance(records, models.Model)
|
||||
model_name = "mail.thread" if as_thread else records._name
|
||||
for record in records:
|
||||
values = (
|
||||
{"id": record.id} if not as_thread else {"id": record.id, "model": record._name}
|
||||
)
|
||||
index = self._get_record_index(model_name, values)
|
||||
self._ensure_record_at_index(model_name, index)
|
||||
self._add_values(values, model_name, index)
|
||||
self.data[model_name][index]["_DELETE"] = True
|
||||
return self
|
||||
|
||||
def get_result(self):
|
||||
"""Gets resulting data built from adding all data together."""
|
||||
res = {}
|
||||
for model_name, records in sorted(self.data.items()):
|
||||
if not ids_by_model[model_name]: # singleton
|
||||
res[model_name] = dict(sorted(records.items()))
|
||||
else:
|
||||
res[model_name] = [dict(sorted(record.items())) for record in records.values()]
|
||||
return res
|
||||
|
||||
def bus_send(self, notification_type="mail.record/insert", /):
|
||||
assert self.target.channel is not None, (
|
||||
"Missing `bus_channel`. Pass it to the `Store` constructor to use `bus_send`."
|
||||
)
|
||||
if res := self.get_result():
|
||||
self.target.channel._bus_send(notification_type, res, subchannel=self.target.subchannel)
|
||||
|
||||
def resolve_data_request(self, **values):
|
||||
"""Add values to the store for the current data request.
|
||||
|
||||
Use case: resolve a specific data request from a client."""
|
||||
if not self.data_id:
|
||||
return self
|
||||
self.add_model_values("DataResponse", {"id": self.data_id, "_resolve": True, **values})
|
||||
return self
|
||||
|
||||
def _add_values(self, values, model_name, index=None):
|
||||
"""Adds values to the store for a given model name and index."""
|
||||
target = self.data[model_name][index] if index else self.data[model_name]
|
||||
for key, val in values.items():
|
||||
assert key != "_DELETE", f"invalid key {key} in {model_name}: {values}"
|
||||
if isinstance(val, Store.Relation):
|
||||
val._add_to_store(self, target, key)
|
||||
elif isinstance(val, datetime):
|
||||
target[key] = odoo.fields.Datetime.to_string(val)
|
||||
elif isinstance(val, date):
|
||||
target[key] = odoo.fields.Date.to_string(val)
|
||||
elif isinstance(val, Markup):
|
||||
target[key] = ["markup", str(val)]
|
||||
else:
|
||||
target[key] = val
|
||||
|
||||
def _ensure_record_at_index(self, model_name, index):
|
||||
if model_name not in self.data:
|
||||
self.data[model_name] = {}
|
||||
if index not in self.data[model_name]:
|
||||
self.data[model_name][index] = {}
|
||||
|
||||
def _format_fields(self, records, fields):
|
||||
fields = Store._static_format_fields(fields)
|
||||
if hasattr(records, "_field_store_repr"):
|
||||
return [f for field in fields for f in records._field_store_repr(field)]
|
||||
return fields
|
||||
|
||||
@staticmethod
|
||||
def _static_format_fields(fields):
|
||||
if fields is None:
|
||||
return []
|
||||
if isinstance(fields, dict):
|
||||
return [Store.Attr(key, value) for key, value in fields.items()]
|
||||
if not isinstance(fields, list):
|
||||
return [fields]
|
||||
return list(fields) # prevent mutation of original list
|
||||
|
||||
def _get_records_data_list(self, records, fields):
|
||||
abstract_fields = [field for field in fields if isinstance(field, (dict, Store.Attr))]
|
||||
records_data_list = [
|
||||
[data_dict]
|
||||
for data_dict in records._read_format(
|
||||
[f for f in fields if f not in abstract_fields], load=False,
|
||||
)
|
||||
]
|
||||
for record, record_data_list in zip(records, records_data_list):
|
||||
for field in abstract_fields:
|
||||
if isinstance(field, dict):
|
||||
record_data_list.append(field)
|
||||
elif not field.predicate or field.predicate(record):
|
||||
try:
|
||||
record_data_list.append({field.field_name: field._get_value(record)})
|
||||
except MissingError:
|
||||
break
|
||||
return records_data_list
|
||||
|
||||
def _get_record_index(self, model_name, values):
|
||||
ids = ids_by_model[model_name]
|
||||
for i in ids:
|
||||
assert values.get(i), f"missing id {i} in {model_name}: {values}"
|
||||
return tuple(values[i] for i in ids)
|
||||
|
||||
class Target:
|
||||
"""Target of the current store. Useful when information have to be added contextually
|
||||
depending on who is going to receive it."""
|
||||
|
||||
def __init__(self, channel=None, subchannel=None):
|
||||
assert channel is None or isinstance(channel, models.Model), (
|
||||
f"channel should be None or a record: {channel}"
|
||||
)
|
||||
assert channel is None or len(channel) <= 1, (
|
||||
f"channel should be empty or should be a single record: {channel}"
|
||||
)
|
||||
self.channel = channel
|
||||
self.subchannel = subchannel
|
||||
|
||||
def is_current_user(self, env):
|
||||
"""Return whether the current target is the current user or guest of the given env.
|
||||
If there is no target at all, this is always True."""
|
||||
if self.channel is None and self.subchannel is None:
|
||||
return True
|
||||
user = self.get_user(env)
|
||||
guest = self.get_guest(env)
|
||||
return self.subchannel is None and (
|
||||
(user and user == env.user and not env.user._is_public())
|
||||
or (guest and guest == env["mail.guest"]._get_guest_from_context())
|
||||
)
|
||||
|
||||
def is_internal(self, env):
|
||||
"""Return whether the current target implies the information will only be sent to
|
||||
internal users. If there is no target at all, the check is based on the current
|
||||
user of the env."""
|
||||
bus_record = self.channel
|
||||
if bus_record is None and self.subchannel is None:
|
||||
bus_record = env.user
|
||||
return (
|
||||
(
|
||||
isinstance(bus_record, env.registry["res.users"])
|
||||
and self.subchannel is None
|
||||
and bus_record._is_internal()
|
||||
)
|
||||
or (
|
||||
isinstance(bus_record, env.registry["discuss.channel"])
|
||||
and (
|
||||
self.subchannel == "internal_users"
|
||||
or (
|
||||
bus_record.channel_type == "channel"
|
||||
and env.ref("base.group_user")
|
||||
in bus_record.group_public_id.all_implied_ids
|
||||
)
|
||||
)
|
||||
)
|
||||
or (
|
||||
isinstance(self.channel, env.registry["res.groups"])
|
||||
and env.ref("base.group_user") in self.channel.implied_ids
|
||||
)
|
||||
)
|
||||
|
||||
def get_guest(self, env):
|
||||
"""Return target guest (if any). Target guest is either the current bus target if the
|
||||
bus is actually targetting a guest, or the current guest from env if there is no bus
|
||||
target at all but there is a guest in the env.
|
||||
"""
|
||||
records = self.channel
|
||||
if self.channel is None and self.subchannel is None:
|
||||
records = env["mail.guest"]._get_guest_from_context()
|
||||
return records if isinstance(records, env.registry["mail.guest"]) else env["mail.guest"]
|
||||
|
||||
def get_user(self, env):
|
||||
"""Return target user (if any). Target user is either the current bus target if the
|
||||
bus is actually targetting a user, or the current user from env if there is no bus
|
||||
target at all but there is a user in the env."""
|
||||
records = self.channel
|
||||
if self.channel is None and self.subchannel is None:
|
||||
records = env.user
|
||||
return records if isinstance(records, env.registry["res.users"]) else env["res.users"]
|
||||
|
||||
class Attr:
|
||||
"""Attribute to be added for each record. The value can be a static value or a function
|
||||
to compute the value, receiving the record as argument.
|
||||
|
||||
Use case: to add a value when it does not come directly from a field.
|
||||
Note: when a static value is given to a recordset, the same value is set on all records.
|
||||
"""
|
||||
|
||||
def __init__(self, field_name, value=None, *, predicate=None, sudo=False):
|
||||
self.field_name = field_name
|
||||
self.predicate = predicate
|
||||
self.sudo = sudo
|
||||
self.value = value
|
||||
|
||||
def _get_value(self, record):
|
||||
if self.value is None and self.field_name in record._fields:
|
||||
return (record.sudo() if self.sudo else record)[self.field_name]
|
||||
if callable(self.value):
|
||||
return self.value(record)
|
||||
return self.value
|
||||
|
||||
class Relation(Attr):
|
||||
"""Flags a record or field name to be added to the store in a relation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
records_or_field_name,
|
||||
fields=None,
|
||||
*,
|
||||
as_thread=False,
|
||||
dynamic_fields=None,
|
||||
only_data=False,
|
||||
predicate=None,
|
||||
sudo=False,
|
||||
value=None,
|
||||
**kwargs,
|
||||
):
|
||||
field_name = records_or_field_name if isinstance(records_or_field_name, str) else None
|
||||
super().__init__(field_name, predicate=predicate, sudo=sudo, value=value)
|
||||
assert (
|
||||
not records_or_field_name
|
||||
or isinstance(records_or_field_name, (str, models.Model))
|
||||
), f"expected recordset, field name, or empty value for Relation: {records_or_field_name}"
|
||||
self.records = (
|
||||
records_or_field_name if isinstance(records_or_field_name, models.Model) else None
|
||||
)
|
||||
assert self.records is None or dynamic_fields is None, (
|
||||
"""dynamic_fields can only be set when field name is provided, not records. """
|
||||
f"""Records: {self.records}, dynamic_fields: {dynamic_fields}"""
|
||||
)
|
||||
self.as_thread = as_thread
|
||||
self.dynamic_fields = dynamic_fields
|
||||
self.fields = fields
|
||||
self.only_data = only_data
|
||||
self.kwargs = kwargs
|
||||
|
||||
def _get_value(self, record):
|
||||
target = super()._get_value(record)
|
||||
if target is None:
|
||||
res_model_field = "res_model" if "res_model" in record._fields else "model"
|
||||
if self.field_name == "thread" and "thread" not in record._fields:
|
||||
if (res_model := record[res_model_field]) and (res_id := record["res_id"]):
|
||||
target = record.env[res_model].browse(res_id)
|
||||
return self._copy_with_records(target, calling_record=record)
|
||||
|
||||
def _copy_with_records(self, records, calling_record):
|
||||
"""Returns a new relation with the given records instead of the field name."""
|
||||
assert self.field_name and self.records is None
|
||||
assert not self.dynamic_fields or calling_record
|
||||
extra_fields = Store._static_format_fields(self.kwargs.get("extra_fields"))
|
||||
if self.dynamic_fields:
|
||||
extra_fields += self.dynamic_fields(calling_record)
|
||||
params = {
|
||||
"as_thread": self.as_thread,
|
||||
"extra_fields": extra_fields,
|
||||
"fields": self.fields,
|
||||
"only_data": self.only_data,
|
||||
**{key: val for key, val in self.kwargs.items() if key != "extra_fields"},
|
||||
}
|
||||
return self.__class__(records, **params)
|
||||
|
||||
def _add_to_store(self, store: "Store", target, key):
|
||||
"""Add the current relation to the given store at target[key]."""
|
||||
store.add(self.records, self.fields, as_thread=self.as_thread, **self.kwargs)
|
||||
|
||||
class One(Relation):
|
||||
"""Flags a record or field name to be added to the store in a One relation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
record_or_field_name,
|
||||
fields=None,
|
||||
*,
|
||||
as_thread=False,
|
||||
dynamic_fields=None,
|
||||
only_data=False,
|
||||
predicate=None,
|
||||
sudo=False,
|
||||
value=None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(
|
||||
record_or_field_name,
|
||||
fields,
|
||||
as_thread=as_thread,
|
||||
dynamic_fields=dynamic_fields,
|
||||
only_data=only_data,
|
||||
predicate=predicate,
|
||||
sudo=sudo,
|
||||
value=value,
|
||||
**kwargs,
|
||||
)
|
||||
assert not self.records or len(self.records) == 1
|
||||
|
||||
def _add_to_store(self, store: "Store", target, key):
|
||||
super()._add_to_store(store, target, key)
|
||||
if not self.only_data:
|
||||
target[key] = self._get_id()
|
||||
|
||||
def _get_id(self):
|
||||
"""Return the id that can be used to insert the current relation in the store."""
|
||||
if not self.records:
|
||||
return False
|
||||
if self.as_thread:
|
||||
return {"id": self.records.id, "model": self.records._name}
|
||||
if self.records._name == "discuss.channel":
|
||||
return {"id": self.records.id, "model": "discuss.channel"}
|
||||
return self.records.id
|
||||
|
||||
class Many(Relation):
|
||||
"""Flags records or field name to be added to the store in a Many relation.
|
||||
- mode: "REPLACE" (default), "ADD", or "DELETE"."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
records_or_field_name,
|
||||
fields=None,
|
||||
*,
|
||||
mode="REPLACE",
|
||||
as_thread=False,
|
||||
dynamic_fields=None,
|
||||
only_data=False,
|
||||
predicate=None,
|
||||
sort=None,
|
||||
sudo=False,
|
||||
value=None,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(
|
||||
records_or_field_name,
|
||||
fields,
|
||||
as_thread=as_thread,
|
||||
dynamic_fields=dynamic_fields,
|
||||
only_data=only_data,
|
||||
predicate=predicate,
|
||||
sudo=sudo,
|
||||
value=value,
|
||||
**kwargs,
|
||||
)
|
||||
self.mode = mode
|
||||
self.sort = sort
|
||||
|
||||
def _copy_with_records(self, *args, **kwargs):
|
||||
res = super()._copy_with_records(*args, **kwargs)
|
||||
res.mode = self.mode
|
||||
res.sort = self.sort
|
||||
return res
|
||||
|
||||
def _add_to_store(self, store: "Store", target, key):
|
||||
self._sort_recods()
|
||||
super()._add_to_store(store, target, key)
|
||||
if not self.only_data:
|
||||
rel_val = self._get_id()
|
||||
target[key] = (
|
||||
target[key] + rel_val if key in target and self.mode != "REPLACE" else rel_val
|
||||
)
|
||||
|
||||
def _get_id(self):
|
||||
"""Return the ids that can be used to insert the current relation in the store."""
|
||||
self._sort_recods()
|
||||
if self.records._name == "mail.message.reaction":
|
||||
res = [
|
||||
{"message": message.id, "content": content}
|
||||
for (message, content), _ in groupby(
|
||||
self.records, lambda r: (r.message_id, r.content)
|
||||
)
|
||||
]
|
||||
else:
|
||||
res = [
|
||||
Store.One(record, as_thread=self.as_thread)._get_id() for record in self.records
|
||||
]
|
||||
if self.mode == "ADD":
|
||||
res = [("ADD", res)]
|
||||
elif self.mode == "DELETE":
|
||||
res = [("DELETE", res)]
|
||||
return res
|
||||
|
||||
def _sort_recods(self):
|
||||
if self.sort:
|
||||
self.records = self.records.sorted(self.sort)
|
||||
self.sort = None
|
||||
101
odoo-bringout-oca-ocb-mail/mail/tools/jwt.py
Normal file
101
odoo-bringout-oca-ocb-mail/mail/tools/jwt.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import binascii
|
||||
import time
|
||||
import enum
|
||||
import hmac
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, utils
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Errors specific to JWT
|
||||
# ------------------------------------------------------------
|
||||
|
||||
class InvalidVapidError(Exception):
|
||||
pass
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# JWT
|
||||
# ------------------------------------------------------------
|
||||
|
||||
class Algorithm(enum.Enum):
|
||||
ES256 = "ES256" # ECDSA SHA-256
|
||||
HS256 = "HS256" # HMAC SHA-256
|
||||
|
||||
|
||||
def _generate_keys(key_encoding, key_format) -> (bytes, bytes):
|
||||
private_object = ec.generate_private_key(ec.SECP256R1(), default_backend())
|
||||
private_int = private_object.private_numbers().private_value
|
||||
private_bytes = private_int.to_bytes(32, "big")
|
||||
public_object = private_object.public_key()
|
||||
public_bytes = public_object.public_bytes(
|
||||
encoding=key_encoding,
|
||||
format=key_format,
|
||||
)
|
||||
return private_bytes, public_bytes
|
||||
|
||||
|
||||
def generate_vapid_keys() -> (str, str):
|
||||
"""
|
||||
Generate the VAPID (Voluntary Application Server Identification) used for the Web Push
|
||||
This function generates a signing key pair usable with the Elliptic Curve Digital
|
||||
Signature Algorithm (ECDSA) over the P-256 curve.
|
||||
https://www.rfc-editor.org/rfc/rfc8292
|
||||
|
||||
:return: tuple (private_key, public_key)
|
||||
"""
|
||||
private, public = _generate_keys(serialization.Encoding.X962, serialization.PublicFormat.UncompressedPoint)
|
||||
private_string = base64.urlsafe_b64encode(private).decode("ascii").strip("=")
|
||||
public_string = base64.urlsafe_b64encode(public).decode("ascii").strip("=")
|
||||
return private_string, public_string
|
||||
|
||||
|
||||
def base64_decode_with_padding(value: str) -> bytes:
|
||||
return base64.urlsafe_b64decode(value + "==")
|
||||
|
||||
|
||||
def _generate_jwt(claims: dict, key: str, algorithm: Algorithm) -> str:
|
||||
JOSE_header = base64.urlsafe_b64encode(json.dumps({"typ": "JWT", "alg": algorithm.value}).encode())
|
||||
payload = base64.urlsafe_b64encode(json.dumps(claims).encode())
|
||||
unsigned_token = "{}.{}".format(JOSE_header.decode().strip("="), payload.decode().strip("="))
|
||||
key_decoded = base64_decode_with_padding(key)
|
||||
|
||||
match algorithm:
|
||||
case Algorithm.HS256:
|
||||
signature = hmac.new(key_decoded, unsigned_token.encode(), hashlib.sha256).digest()
|
||||
sig = base64.urlsafe_b64encode(signature)
|
||||
case Algorithm.ES256:
|
||||
# Retrieve the private key using a P256 elliptic curve
|
||||
private_key = ec.derive_private_key(
|
||||
int(binascii.hexlify(key_decoded), 16), ec.SECP256R1(), default_backend()
|
||||
)
|
||||
signature = private_key.sign(unsigned_token.encode(), ec.ECDSA(hashes.SHA256()))
|
||||
(r, s) = utils.decode_dss_signature(signature)
|
||||
sig = base64.urlsafe_b64encode(r.to_bytes(32, "big") + s.to_bytes(32, "big"))
|
||||
case _:
|
||||
raise ValueError(f"Unsupported algorithm: {algorithm}")
|
||||
|
||||
return "{}.{}".format(unsigned_token, sig.decode().strip("="))
|
||||
|
||||
|
||||
def sign(claims: dict, key: str, ttl: int, algorithm: Algorithm) -> str:
|
||||
"""
|
||||
A JSON Web Token is a signed pair of JSON objects, turned into base64 strings.
|
||||
|
||||
RFC: https://www.rfc-editor.org/rfc/rfc7519
|
||||
|
||||
:param claims: the payload of the jwt: https://www.rfc-editor.org/rfc/rfc7519#section-4.1
|
||||
:param key: base64 string
|
||||
:param ttl: the time to live of the token in seconds ('exp' claim)
|
||||
:param algorithm: to use to sign the token
|
||||
:return: JSON Web Token
|
||||
"""
|
||||
non_padded_key = key.strip("=")
|
||||
assert ttl
|
||||
claims["exp"] = int(time.time()) + ttl
|
||||
return _generate_jwt(claims, non_padded_key, algorithm=algorithm)
|
||||
108
odoo-bringout-oca-ocb-mail/mail/tools/link_preview.py
Normal file
108
odoo-bringout-oca-ocb-mail/mail/tools/link_preview.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import re
|
||||
from lxml import html
|
||||
import chardet
|
||||
import requests
|
||||
from urllib3.exceptions import LocationParseError
|
||||
|
||||
|
||||
def get_link_preview_from_url(url, request_session=None):
|
||||
"""
|
||||
Get the Open Graph properties of an url. (https://ogp.me/)
|
||||
If the url leads directly to an image mimetype, return
|
||||
the url as preview image else retrieve the properties from
|
||||
the html page.
|
||||
|
||||
Using a stream request to prevent loading the whole page
|
||||
as those properties are declared in the <head> tag.
|
||||
|
||||
The request session is optional as in some cases using
|
||||
a session could be beneficial performance wise
|
||||
(e.g. a lot of url could have the same domain).
|
||||
"""
|
||||
# Some websites are blocking non browser user agent.
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0',
|
||||
'Odoo-Link-Preview': 'True', # Used to identify coming from the link previewer
|
||||
}
|
||||
try:
|
||||
if request_session:
|
||||
response = request_session.get(url, timeout=3, headers=headers, allow_redirects=True, stream=True)
|
||||
else:
|
||||
response = requests.get(url, timeout=3, headers=headers, allow_redirects=True, stream=True)
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
except LocationParseError:
|
||||
return False
|
||||
if not response.ok or not response.headers.get('Content-Type'):
|
||||
return False
|
||||
# Content-Type header can return a charset, but we just need the
|
||||
# mimetype (eg: image/jpeg;charset=ISO-8859-1)
|
||||
content_type = response.headers['Content-Type'].split(';')
|
||||
if response.headers['Content-Type'].startswith('image/'):
|
||||
return {
|
||||
'image_mimetype': content_type[0],
|
||||
'og_image': url, # If the url mimetype is already an image type, set url as preview image
|
||||
'source_url': url,
|
||||
}
|
||||
elif response.headers['Content-Type'].startswith('text/html'):
|
||||
return get_link_preview_from_html(url, response)
|
||||
return False
|
||||
|
||||
def get_link_preview_from_html(url, response):
|
||||
"""
|
||||
Retrieve the Open Graph properties from the html page. (https://ogp.me/)
|
||||
Load the page with chunks of 8kb to prevent loading the whole
|
||||
html when we only need the <head> tag content.
|
||||
Fallback on the <title> tag if the html doesn't have
|
||||
any Open Graph title property.
|
||||
"""
|
||||
content = b""
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
content += chunk
|
||||
pos = content.find(b'</head>', -8196 * 2)
|
||||
# Stop reading once all the <head> data is found
|
||||
if pos != -1:
|
||||
content = content[:pos + 7]
|
||||
break
|
||||
|
||||
if not content:
|
||||
return False
|
||||
|
||||
encoding = response.encoding or chardet.detect(content).get("encoding", "utf-8")
|
||||
try:
|
||||
decoded_content = content.decode(encoding)
|
||||
except (UnicodeDecodeError, TypeError) as e:
|
||||
decoded_content = content.decode("utf-8", errors="ignore")
|
||||
|
||||
try:
|
||||
tree = html.fromstring(decoded_content)
|
||||
except ValueError:
|
||||
decoded_content = re.sub(
|
||||
r"^<\?xml[^>]+\?>\s*", "", decoded_content, flags=re.IGNORECASE
|
||||
)
|
||||
tree = html.fromstring(decoded_content)
|
||||
|
||||
og_title = tree.xpath('//meta[@property="og:title"]/@content')
|
||||
if og_title:
|
||||
og_title = og_title[0]
|
||||
elif tree.find('.//title') is not None:
|
||||
# Fallback on the <title> tag if it exists
|
||||
og_title = tree.find('.//title').text
|
||||
else:
|
||||
return False
|
||||
og_description = tree.xpath('//meta[@property="og:description"]/@content')
|
||||
og_type = tree.xpath('//meta[@property="og:type"]/@content')
|
||||
og_site_name = tree.xpath('//meta[@property="og:site_name"]/@content')
|
||||
og_image = tree.xpath('//meta[@property="og:image"]/@content')
|
||||
og_mimetype = tree.xpath('//meta[@property="og:image:type"]/@content')
|
||||
return {
|
||||
'og_description': og_description[0] if og_description else None,
|
||||
'og_image': og_image[0] if og_image else None,
|
||||
'og_mimetype': og_mimetype[0] if og_mimetype else None,
|
||||
'og_title': og_title,
|
||||
'og_type': og_type[0] if og_type else None,
|
||||
'og_site_name': og_site_name[0] if og_site_name else None,
|
||||
'source_url': url,
|
||||
}
|
||||
37
odoo-bringout-oca-ocb-mail/mail/tools/parser.py
Normal file
37
odoo-bringout-oca-ocb-mail/mail/tools/parser.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import ast
|
||||
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tools import is_list_of
|
||||
|
||||
|
||||
def parse_res_ids(res_ids, env):
|
||||
""" Returns the already valid list/tuple of int or returns the literal eval
|
||||
of the string as a list/tuple of int. Void strings / missing values are
|
||||
evaluated as an empty list.
|
||||
|
||||
:param str|tuple|list res_ids: a list of ids, tuple or list;
|
||||
|
||||
:raise: ValidationError if the provided res_ids is an incorrect type or
|
||||
invalid format;
|
||||
|
||||
:return list: list of ids
|
||||
"""
|
||||
if is_list_of(res_ids, int) or not res_ids:
|
||||
return res_ids
|
||||
error_msg = env._(
|
||||
"Invalid res_ids %(res_ids_str)s (type %(res_ids_type)s)",
|
||||
res_ids_str=res_ids,
|
||||
res_ids_type=str(res_ids.__class__.__name__),
|
||||
)
|
||||
try:
|
||||
res_ids = ast.literal_eval(res_ids)
|
||||
except Exception as e:
|
||||
raise ValidationError(error_msg) from e
|
||||
|
||||
if not is_list_of(res_ids, int):
|
||||
raise ValidationError(error_msg)
|
||||
|
||||
return res_ids
|
||||
189
odoo-bringout-oca-ocb-mail/mail/tools/web_push.py
Normal file
189
odoo-bringout-oca-ocb-mail/mail/tools/web_push.py
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
import logging as logger
|
||||
import os
|
||||
import struct
|
||||
import textwrap
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
|
||||
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from . import jwt
|
||||
|
||||
MAX_PAYLOAD_SIZE = 4096
|
||||
|
||||
# size of the overhead of the header for all encryption blocks
|
||||
# +-----------+-----------------+---------------------------+------------------------+
|
||||
# | salt (16) | record_size (4) | sender_public_key.len (1) | sender_public_key (65) |
|
||||
# +-----------+-----------------+---------------------------+------------------------+
|
||||
# sender_public_key = 0x04 (1 byte) | X-coord (32 bytes) | Y-coord (32 bytes)
|
||||
# using SECP256R1 curve + X9.62 encoding + SEC1 uncompressed formatting
|
||||
ENCRYPTION_HEADER_SIZE = 16 + 4 + 1 + (1 + 32 + 32)
|
||||
|
||||
# size of the overhead of encryption per encryption block
|
||||
# 1 padding delimiter (continue or final block) + 16-bytes in-message authentication tag from AEAD_AES_128_GCM
|
||||
ENCRYPTION_BLOCK_OVERHEAD = 1 + 16
|
||||
|
||||
class PUSH_NOTIFICATION_TYPE:
|
||||
CALL = "CALL"
|
||||
CANCEL = "CANCEL"
|
||||
|
||||
|
||||
class PUSH_NOTIFICATION_ACTION:
|
||||
ACCEPT = "ACCEPT"
|
||||
DECLINE = "DECLINE"
|
||||
|
||||
|
||||
_logger = logger.getLogger(__name__)
|
||||
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Errors specific to web push
|
||||
# ------------------------------------------------------------
|
||||
|
||||
class DeviceUnreachableError(Exception):
|
||||
pass
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Web Push
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def _iv(base, counter):
|
||||
mask = int.from_bytes(base[4:], 'big')
|
||||
return base[:4] + (counter ^ mask).to_bytes(8, 'big')
|
||||
|
||||
def _derive_key(salt, private_key, device):
|
||||
# browser keys
|
||||
device_keys = json.loads(device["keys"])
|
||||
p256dh = jwt.base64_decode_with_padding(device_keys.get('p256dh'))
|
||||
auth = jwt.base64_decode_with_padding(device_keys.get('auth'))
|
||||
|
||||
# generate a public key derived from the browser public key
|
||||
pub_key = ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256R1(), p256dh)
|
||||
sender_pub_key = private_key.public_key().public_bytes(
|
||||
Encoding.X962, PublicFormat.UncompressedPoint
|
||||
)
|
||||
|
||||
context = b"WebPush: info\x00" + p256dh + sender_pub_key
|
||||
key_info = b"Content-Encoding: aes128gcm\x00"
|
||||
nonce_info = b"Content-Encoding: nonce\x00"
|
||||
|
||||
# Create the 3 HKDF keys needed to encrypt the message (auth, key, nonce)
|
||||
hkdf_auth = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=auth,
|
||||
info=context,
|
||||
backend=default_backend(),
|
||||
)
|
||||
hkdf_key = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=16,
|
||||
salt=salt,
|
||||
info=key_info,
|
||||
backend=default_backend(),
|
||||
)
|
||||
hkdf_nonce = HKDF(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=12,
|
||||
salt=salt,
|
||||
info=nonce_info,
|
||||
backend=default_backend(),
|
||||
)
|
||||
secret = hkdf_auth.derive(private_key.exchange(ec.ECDH(), pub_key))
|
||||
return hkdf_key.derive(secret), hkdf_nonce.derive(secret)
|
||||
|
||||
def _encrypt_payload(content, device, record_size=MAX_PAYLOAD_SIZE):
|
||||
"""
|
||||
Encrypt a payload for Push Notification Endpoint using AES128GCM
|
||||
|
||||
https://www.rfc-editor.org/rfc/rfc7516
|
||||
https://www.rfc-editor.org/rfc/rfc8188
|
||||
:param content: the unencrypted payload
|
||||
:param device: the web push user browser information
|
||||
:param record_size: record size must be bigger than 18
|
||||
:return: the encrypted payload
|
||||
"""
|
||||
# The private_key is an ephemeral ECDH key used only for a transaction
|
||||
private_key = ec.generate_private_key(ec.SECP256R1(), default_backend())
|
||||
salt = os.urandom(16)
|
||||
# generate key
|
||||
(key, nonce) = _derive_key(salt=salt, private_key=private_key, device=device)
|
||||
# AEAD_AES_128_GCM produces ciphertext 16 octets longer than its input plaintext.
|
||||
# Therefore, the unencrypted content of each record is shorter than the record size by 16 octets.
|
||||
# Valid records always contain at least a padding delimiter octet and a 16-octet authentication tag.
|
||||
overhead = 1 + 16
|
||||
chunk_size = record_size - overhead
|
||||
|
||||
body = b""
|
||||
end = len(content)
|
||||
aesgcm = AESGCM(key)
|
||||
for i in range(0, end, chunk_size):
|
||||
padding = b"\x02" if (i + chunk_size) >= end else b"\x01"
|
||||
body += aesgcm.encrypt(nonce, content[i: i + chunk_size] + padding, None)
|
||||
|
||||
sender_public_key = private_key.public_key().public_bytes(
|
||||
Encoding.X962, PublicFormat.UncompressedPoint
|
||||
)
|
||||
|
||||
# +-----------+-----------------+---------------------------+-------------------------------------------+
|
||||
# | salt (16) | record_size (4) | sender_public_key.len (1) | sender_public_key (sender_public_key.len) |
|
||||
# +-----------+-----------------+---------------------------+-------------------------------------------+
|
||||
header = struct.pack("!16sLB", salt, record_size, len(sender_public_key))
|
||||
header += sender_public_key
|
||||
return header + body
|
||||
|
||||
def push_to_end_point(base_url, device, payload, vapid_private_key, vapid_public_key, session):
|
||||
"""
|
||||
https://www.rfc-editor.org/rfc/rfc8291
|
||||
"""
|
||||
endpoint = device["endpoint"]
|
||||
url = urlsplit(endpoint)
|
||||
# The TDL ".invalid" is intended for use in online construction of domain names that are sure to be invalid and
|
||||
# which it is obvious at a glance are invalid.
|
||||
# https://datatracker.ietf.org/doc/html/rfc2606#section-2
|
||||
if url.netloc.endswith(".invalid"):
|
||||
raise DeviceUnreachableError("Device Unreachable")
|
||||
jwt_claims = {
|
||||
# aud: The “Audience” is a JWT construct that indicates the recipient scheme and host
|
||||
# e.g. for an endpoint like https://updates.push.services.mozilla.com/wpush/v2/gAAAAABY...,
|
||||
# the “aud” would be https://updates.push.services.mozilla.com
|
||||
'aud': '{}://{}'.format(url.scheme, url.netloc),
|
||||
# sub: the sub value needs to be either a URL address. This is so that if a push service needed to reach out
|
||||
# to sender, it can find contact information from the JWT.
|
||||
'sub': base_url,
|
||||
}
|
||||
token = jwt.sign(jwt_claims, vapid_private_key, ttl=12 * 60 * 60, algorithm=jwt.Algorithm.ES256)
|
||||
body_payload = payload.encode()
|
||||
payload = _encrypt_payload(body_payload, device)
|
||||
headers = {
|
||||
# Authorization header field contains these parameters:
|
||||
# - "t" is the JWT;
|
||||
# - "k" the base64url-encoded key that signed that token.
|
||||
'Authorization': 'vapid t={}, k={}'.format(token, vapid_public_key),
|
||||
'Content-Encoding': 'aes128gcm',
|
||||
# The TTL is set to '60' as workaround because the push notifications
|
||||
# are not received on Edge with TTL ='0'.
|
||||
# Using the TTL '0' , the microsoft endpoint returns a 400 bad request error.
|
||||
# and we are sure that the notification will be received
|
||||
'TTL': '60',
|
||||
}
|
||||
|
||||
response = session.post(endpoint, headers=headers, data=payload, timeout=5)
|
||||
if response.status_code == 201:
|
||||
_logger.debug('Sent push notification %s', endpoint)
|
||||
else:
|
||||
error_message_shorten = textwrap.shorten(response.text, 100)
|
||||
_logger.warning('Failed push notification %s %d - %s',
|
||||
endpoint, response.status_code, error_message_shorten)
|
||||
|
||||
# Invalid subscription
|
||||
if response.status_code == 404 or response.status_code == 410:
|
||||
raise DeviceUnreachableError("Device Unreachable")
|
||||
Loading…
Add table
Add a link
Reference in a new issue