mirror of
https://github.com/bringout/oca-ocb-core.git
synced 2026-04-21 06:32:10 +02:00
19.0 vanilla
This commit is contained in:
parent
d1963a3c3a
commit
2d3ee4855a
7430 changed files with 2687981 additions and 2965473 deletions
|
|
@ -1,18 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
# core models (required for mixins)
|
||||
from . import mail_alias
|
||||
from . import mail_alias_domain
|
||||
from . import models
|
||||
|
||||
# mixin
|
||||
from . import mail_activity_mixin
|
||||
from . import mail_alias_mixin_optional
|
||||
from . import mail_alias_mixin
|
||||
from . import mail_render_mixin
|
||||
from . import mail_composer_mixin
|
||||
from . import mail_thread
|
||||
from . import mail_thread_blacklist
|
||||
from . import mail_thread_cc
|
||||
from . import mail_thread_main_attachment
|
||||
from . import mail_tracking_duration_mixin
|
||||
from . import template_reset_mixin
|
||||
|
||||
# mail models
|
||||
|
|
@ -20,44 +23,54 @@ from . import fetchmail
|
|||
from . import mail_notification # keep before as decorated m2m
|
||||
from . import mail_activity_type
|
||||
from . import mail_activity
|
||||
from . import mail_activity_plan
|
||||
from . import mail_activity_plan_template
|
||||
from . import mail_blacklist
|
||||
from . import mail_followers
|
||||
from . import mail_gateway_allowed
|
||||
from . import mail_link_preview
|
||||
from . import mail_message_link_preview
|
||||
from . import mail_message_reaction
|
||||
from . import mail_message_schedule
|
||||
from . import mail_message_subtype
|
||||
from . import mail_message_translation
|
||||
from . import mail_message
|
||||
from . import mail_mail
|
||||
from . import mail_presence
|
||||
from . import mail_push
|
||||
from . import mail_push_device
|
||||
from . import mail_scheduled_message
|
||||
from . import mail_tracking_value
|
||||
from . import mail_template
|
||||
|
||||
# discuss
|
||||
from . import mail_channel_member
|
||||
from . import mail_channel_rtc_session
|
||||
from . import mail_channel
|
||||
from . import mail_guest
|
||||
from . import mail_ice_server
|
||||
from . import mail_shortcode
|
||||
from . import mail_canned_response
|
||||
from . import res_users_settings
|
||||
from . import res_users_settings_volumes
|
||||
|
||||
# odoo models
|
||||
from . import bus_presence
|
||||
from . import ir_action_act_window
|
||||
from . import ir_actions_server
|
||||
from . import ir_attachment
|
||||
from . import ir_config_parameter
|
||||
from . import ir_cron
|
||||
from . import ir_http
|
||||
from . import ir_mail_server
|
||||
from . import ir_model
|
||||
from . import ir_model_fields
|
||||
from . import ir_ui_menu
|
||||
from . import ir_ui_view
|
||||
from . import ir_qweb
|
||||
from . import ir_websocket
|
||||
from . import res_company
|
||||
from . import res_config_settings
|
||||
from . import res_partner
|
||||
from . import res_role
|
||||
from . import res_users
|
||||
from . import res_groups
|
||||
from . import update
|
||||
|
||||
# after mail specifically as discuss module depends on mail
|
||||
from . import discuss
|
||||
|
||||
# discuss_channel_member must be loaded first
|
||||
from . import res_partner
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class BusPresence(models.Model):
|
||||
_inherit = ['bus.presence']
|
||||
|
||||
guest_id = fields.Many2one('mail.guest', 'Guest', ondelete='cascade')
|
||||
|
||||
def init(self):
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS bus_presence_guest_unique ON %s (guest_id) WHERE guest_id IS NOT NULL" % self._table)
|
||||
|
||||
_sql_constraints = [
|
||||
("partner_or_guest_exists", "CHECK((user_id IS NOT NULL AND guest_id IS NULL) OR (user_id IS NULL AND guest_id IS NOT NULL))", "A bus presence must have a user or a guest."),
|
||||
]
|
||||
21
odoo-bringout-oca-ocb-mail/mail/models/discuss/__init__.py
Normal file
21
odoo-bringout-oca-ocb-mail/mail/models/discuss/__init__.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
# mail
|
||||
from . import mail_message
|
||||
|
||||
# discuss
|
||||
from . import discuss_call_history
|
||||
from . import discuss_channel_member
|
||||
from . import discuss_channel_rtc_session
|
||||
from . import discuss_channel
|
||||
from . import discuss_gif_favorite
|
||||
from . import discuss_voice_metadata
|
||||
from . import mail_guest
|
||||
|
||||
# odoo models
|
||||
from . import bus_listener_mixin
|
||||
from . import ir_attachment
|
||||
from . import ir_websocket
|
||||
from . import res_groups
|
||||
from . import res_partner
|
||||
from . import res_users
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class BusListenerMixin(models.AbstractModel):
|
||||
_inherit = "bus.listener.mixin"
|
||||
|
||||
def _bus_send_transient_message(self, channel, content):
|
||||
"""Posts a fake message in the given ``channel``, only visible for ``self`` listeners."""
|
||||
self._bus_send(
|
||||
"discuss.channel/transient_message",
|
||||
{
|
||||
"body": Markup("<span class='o_mail_notification'>%s</span>") % content,
|
||||
"channel_id": channel.id,
|
||||
},
|
||||
)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, models, fields
|
||||
|
||||
|
||||
class DiscussCallHistory(models.Model):
|
||||
_name = "discuss.call.history"
|
||||
_order = "start_dt DESC, id DESC"
|
||||
_description = "Keep the call history"
|
||||
|
||||
channel_id = fields.Many2one("discuss.channel", index=True, required=True, ondelete="cascade")
|
||||
duration_hour = fields.Float(compute="_compute_duration_hour")
|
||||
start_dt = fields.Datetime(index=True, required=True)
|
||||
end_dt = fields.Datetime()
|
||||
start_call_message_id = fields.Many2one("mail.message", index=True)
|
||||
|
||||
_channel_id_not_null_constraint = models.Constraint(
|
||||
"CHECK (channel_id IS NOT NULL)", "Call history must have a channel"
|
||||
)
|
||||
_start_dt_is_not_null_constraint = models.Constraint(
|
||||
"CHECK (start_dt IS NOT NULL)", "Call history must have a start date"
|
||||
)
|
||||
_message_id_unique_constraint = models.Constraint(
|
||||
"UNIQUE (start_call_message_id)", "Messages can only be linked to one call history"
|
||||
)
|
||||
_channel_id_end_dt_idx = models.Index("(channel_id, end_dt) WHERE end_dt IS NULL")
|
||||
|
||||
@api.depends("start_dt", "end_dt")
|
||||
def _compute_duration_hour(self):
|
||||
for record in self:
|
||||
end_dt = record.end_dt or fields.Datetime.now()
|
||||
record.duration_hour = (end_dt - record.start_dt).total_seconds() / 3600
|
||||
1690
odoo-bringout-oca-ocb-mail/mail/models/discuss/discuss_channel.py
Normal file
1690
odoo-bringout-oca-ocb-mail/mail/models/discuss/discuss_channel.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,691 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
import requests
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
from odoo.addons.mail.tools.web_push import PUSH_NOTIFICATION_ACTION, PUSH_NOTIFICATION_TYPE
|
||||
from odoo.exceptions import AccessError, UserError, ValidationError
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import SQL
|
||||
|
||||
from ...tools import jwt, discuss
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
SFU_MODE_THRESHOLD = 3
|
||||
|
||||
|
||||
class DiscussChannelMember(models.Model):
|
||||
_name = 'discuss.channel.member'
|
||||
_inherit = ["bus.listener.mixin"]
|
||||
_description = "Channel Member"
|
||||
_rec_names_search = ["channel_id", "partner_id", "guest_id"]
|
||||
_bypass_create_check = {}
|
||||
|
||||
# identity
|
||||
partner_id = fields.Many2one("res.partner", "Partner", ondelete="cascade", index=True)
|
||||
guest_id = fields.Many2one("mail.guest", "Guest", ondelete="cascade", index=True)
|
||||
is_self = fields.Boolean(compute="_compute_is_self", search="_search_is_self")
|
||||
# channel
|
||||
channel_id = fields.Many2one("discuss.channel", "Channel", ondelete="cascade", required=True, bypass_search_access=True)
|
||||
# state
|
||||
custom_channel_name = fields.Char('Custom channel name')
|
||||
fetched_message_id = fields.Many2one('mail.message', string='Last Fetched', index="btree_not_null")
|
||||
seen_message_id = fields.Many2one('mail.message', string='Last Seen', index="btree_not_null")
|
||||
new_message_separator = fields.Integer(help="Message id before which the separator should be displayed", default=0, required=True)
|
||||
message_unread_counter = fields.Integer('Unread Messages Counter', compute='_compute_message_unread', compute_sudo=True)
|
||||
custom_notifications = fields.Selection(
|
||||
[("all", "All Messages"), ("mentions", "Mentions Only"), ("no_notif", "Nothing")],
|
||||
"Customized Notifications",
|
||||
help="Use default from user settings if not specified. This setting will only be applied to channels.",
|
||||
)
|
||||
mute_until_dt = fields.Datetime("Mute notifications until", help="If set, the member will not receive notifications from the channel until this date.")
|
||||
is_pinned = fields.Boolean("Is pinned on the interface", compute="_compute_is_pinned", search="_search_is_pinned")
|
||||
unpin_dt = fields.Datetime("Unpin date", index=True, help="Contains the date and time when the channel was unpinned by the user.")
|
||||
last_interest_dt = fields.Datetime(
|
||||
"Last Interest",
|
||||
default=lambda self: fields.Datetime.now() - timedelta(seconds=1),
|
||||
index=True,
|
||||
help="Contains the date and time of the last interesting event that happened in this channel for this user. This includes: creating, joining, pinning",
|
||||
)
|
||||
last_seen_dt = fields.Datetime("Last seen date")
|
||||
# RTC
|
||||
rtc_session_ids = fields.One2many(string="RTC Sessions", comodel_name='discuss.channel.rtc.session', inverse_name='channel_member_id')
|
||||
rtc_inviting_session_id = fields.Many2one('discuss.channel.rtc.session', string='Ringing session')
|
||||
|
||||
_seen_message_id_idx = models.Index("(channel_id, partner_id, seen_message_id)")
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_unpin_outdated_sub_channels(self):
|
||||
outdated_dt = fields.Datetime.now() - timedelta(days=2)
|
||||
self.env["discuss.channel"].flush_model()
|
||||
self.env["discuss.channel.member"].flush_model()
|
||||
self.env["mail.message"].flush_model()
|
||||
self.env.cr.execute(
|
||||
"""
|
||||
SELECT member.id
|
||||
FROM discuss_channel_member member
|
||||
JOIN discuss_channel channel
|
||||
ON channel.id = member.channel_id
|
||||
AND channel.parent_channel_id IS NOT NULL
|
||||
WHERE (
|
||||
member.unpin_dt IS NULL
|
||||
OR member.last_interest_dt >= member.unpin_dt
|
||||
OR channel.last_interest_dt >= member.unpin_dt
|
||||
)
|
||||
AND COALESCE(member.last_interest_dt, member.create_date) < %(outdated_dt)s
|
||||
AND COALESCE(channel.last_interest_dt, channel.create_date) < %(outdated_dt)s
|
||||
AND NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM mail_message
|
||||
WHERE mail_message.res_id = channel.id
|
||||
AND mail_message.model = 'discuss.channel'
|
||||
AND mail_message.id >= member.new_message_separator
|
||||
AND mail_message.message_type NOT IN ('notification', 'user_notification')
|
||||
)
|
||||
""",
|
||||
{"outdated_dt": outdated_dt},
|
||||
)
|
||||
members = self.env["discuss.channel.member"].search(
|
||||
[("id", "in", [row[0] for row in self.env.cr.fetchall()])],
|
||||
)
|
||||
members.unpin_dt = fields.Datetime.now()
|
||||
for member in members:
|
||||
Store(bus_channel=member._bus_channel()).add(
|
||||
member.channel_id, {"close_chat_window": True}
|
||||
).bus_send()
|
||||
|
||||
@api.constrains('partner_id')
|
||||
def _contrains_no_public_member(self):
|
||||
for member in self:
|
||||
if any(user._is_public() for user in member.partner_id.user_ids):
|
||||
raise ValidationError(_("Channel members cannot include public users."))
|
||||
|
||||
@api.depends_context("uid", "guest")
|
||||
def _compute_is_self(self):
|
||||
if not self:
|
||||
return
|
||||
current_partner, current_guest = self.env["res.partner"]._get_current_persona()
|
||||
self.is_self = False
|
||||
for member in self:
|
||||
if current_partner and member.partner_id == current_partner:
|
||||
member.is_self = True
|
||||
if current_guest and member.guest_id == current_guest:
|
||||
member.is_self = True
|
||||
|
||||
def _search_is_self(self, operator, operand):
|
||||
if operator != 'in':
|
||||
return NotImplemented
|
||||
current_partner, current_guest = self.env["res.partner"]._get_current_persona()
|
||||
domain_partner = Domain("partner_id", "=", current_partner.id) if current_partner else Domain.FALSE
|
||||
domain_guest = Domain("guest_id", "=", current_guest.id) if current_guest else Domain.FALSE
|
||||
return domain_partner | domain_guest
|
||||
|
||||
def _search_is_pinned(self, operator, operand):
|
||||
if operator != 'in':
|
||||
return NotImplemented
|
||||
|
||||
def custom_pinned(model: models.BaseModel, alias, query):
|
||||
channel_model = model.browse().channel_id
|
||||
channel_alias = query.make_alias(alias, 'channel_id')
|
||||
query.add_join("LEFT JOIN", channel_alias, channel_model._table, SQL(
|
||||
"%s = %s",
|
||||
model._field_to_sql(alias, 'channel_id'),
|
||||
channel_model._field_to_sql(channel_alias, 'id'),
|
||||
))
|
||||
return SQL(
|
||||
"""(%(unpin)s IS NULL
|
||||
OR %(last_interest)s >= %(unpin)s
|
||||
OR %(channel_last_interest)s >= %(unpin)s
|
||||
)""",
|
||||
unpin=model._field_to_sql(alias, "unpin_dt", query),
|
||||
last_interest=model._field_to_sql(alias, "last_interest_dt", query),
|
||||
channel_last_interest=channel_model._field_to_sql(channel_alias, "last_interest_dt", query),
|
||||
)
|
||||
|
||||
return Domain.custom(to_sql=custom_pinned)
|
||||
|
||||
@api.depends("channel_id.message_ids", "new_message_separator")
|
||||
def _compute_message_unread(self):
|
||||
if self.ids:
|
||||
self.env['mail.message'].flush_model()
|
||||
self.flush_recordset(['channel_id', 'new_message_separator'])
|
||||
self.env.cr.execute("""
|
||||
SELECT count(mail_message.id) AS count,
|
||||
discuss_channel_member.id
|
||||
FROM mail_message
|
||||
INNER JOIN discuss_channel_member
|
||||
ON discuss_channel_member.channel_id = mail_message.res_id
|
||||
WHERE mail_message.model = 'discuss.channel'
|
||||
AND mail_message.message_type NOT IN ('notification', 'user_notification')
|
||||
AND mail_message.id >= discuss_channel_member.new_message_separator
|
||||
AND discuss_channel_member.id IN %(ids)s
|
||||
GROUP BY discuss_channel_member.id
|
||||
""", {'ids': tuple(self.ids)})
|
||||
unread_counter_by_member = {res['id']: res['count'] for res in self.env.cr.dictfetchall()}
|
||||
for member in self:
|
||||
member.message_unread_counter = unread_counter_by_member.get(member.id)
|
||||
else:
|
||||
self.message_unread_counter = 0
|
||||
|
||||
@api.depends("partner_id.name", "guest_id.name", "channel_id.display_name")
|
||||
def _compute_display_name(self):
|
||||
for member in self:
|
||||
member.display_name = _(
|
||||
"“%(member_name)s” in “%(channel_name)s”",
|
||||
member_name=member.partner_id.name or member.guest_id.name,
|
||||
channel_name=member.channel_id.display_name,
|
||||
)
|
||||
|
||||
@api.depends("last_interest_dt", "unpin_dt", "channel_id.last_interest_dt")
|
||||
def _compute_is_pinned(self):
|
||||
for member in self:
|
||||
member.is_pinned = (
|
||||
not member.unpin_dt
|
||||
or (
|
||||
member.last_interest_dt
|
||||
and member.last_interest_dt >= member.unpin_dt
|
||||
)
|
||||
or (
|
||||
member.channel_id.last_interest_dt
|
||||
and member.channel_id.last_interest_dt >= member.unpin_dt
|
||||
)
|
||||
)
|
||||
|
||||
_partner_unique = models.UniqueIndex("(channel_id, partner_id) WHERE partner_id IS NOT NULL")
|
||||
_guest_unique = models.UniqueIndex("(channel_id, guest_id) WHERE guest_id IS NOT NULL")
|
||||
_partner_or_guest_exists = models.Constraint(
|
||||
'CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))',
|
||||
'A channel member must be a partner or a guest.',
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
if self.env.context.get("mail_create_bypass_create_check") is self._bypass_create_check:
|
||||
self = self.sudo()
|
||||
for vals in vals_list:
|
||||
if "channel_id" not in vals:
|
||||
raise UserError(
|
||||
_(
|
||||
"It appears you're trying to create a channel member, but it seems like you forgot to specify the related channel. "
|
||||
"To move forward, please make sure to provide the necessary channel information."
|
||||
)
|
||||
)
|
||||
channel = self.env["discuss.channel"].browse(vals["channel_id"])
|
||||
if channel.channel_type == "chat" and len(channel.channel_member_ids) > 0:
|
||||
raise UserError(
|
||||
_("Adding more members to this chat isn't possible; it's designed for just two people.")
|
||||
)
|
||||
name_members_by_channel = {
|
||||
channel: channel.channel_name_member_ids
|
||||
for channel in self.env["discuss.channel"].browse(
|
||||
{vals["channel_id"] for vals in vals_list}
|
||||
)
|
||||
}
|
||||
res = super().create(vals_list)
|
||||
# help the ORM to detect changes
|
||||
res.partner_id.invalidate_recordset(["channel_ids"])
|
||||
res.guest_id.invalidate_recordset(["channel_ids"])
|
||||
# Always link members to parent channels as well. Member list should be
|
||||
# kept in sync.
|
||||
for member in res:
|
||||
if parent := member.channel_id.parent_channel_id:
|
||||
parent._add_members(partners=member.partner_id, guests=member.guest_id)
|
||||
for channel, members in name_members_by_channel.items():
|
||||
if channel.channel_name_member_ids != members:
|
||||
Store(bus_channel=channel).add(
|
||||
channel,
|
||||
Store.Many("channel_name_member_ids", sort="id"),
|
||||
).bus_send()
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
for channel_member in self:
|
||||
for field_name in ['channel_id', 'partner_id', 'guest_id']:
|
||||
if field_name in vals and vals[field_name] != channel_member[field_name].id:
|
||||
raise AccessError(_('You can not write on %(field_name)s.', field_name=field_name))
|
||||
|
||||
def get_field_name(field_description):
|
||||
if isinstance(field_description, Store.Attr):
|
||||
return field_description.field_name
|
||||
return field_description
|
||||
|
||||
def get_vals(member):
|
||||
return {
|
||||
get_field_name(field_description): (
|
||||
member[get_field_name(field_description)],
|
||||
field_description,
|
||||
)
|
||||
for field_description in self._sync_field_names()
|
||||
}
|
||||
|
||||
old_vals_by_member = {member: get_vals(member) for member in self}
|
||||
result = super().write(vals)
|
||||
for member in self:
|
||||
new_values = get_vals(member)
|
||||
diff = []
|
||||
for field_name, (new_value, field_description) in new_values.items():
|
||||
old_value = old_vals_by_member[member][field_name][0]
|
||||
if new_value != old_value:
|
||||
diff.append(field_description)
|
||||
if diff:
|
||||
diff.extend(
|
||||
[
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
*self.env["discuss.channel.member"]._to_store_persona([]),
|
||||
]
|
||||
)
|
||||
if "message_unread_counter" in diff:
|
||||
# sudo: bus.bus: reading non-sensitive last id
|
||||
bus_last_id = self.env["bus.bus"].sudo()._bus_last_id()
|
||||
diff.append({"message_unread_counter_bus_id": bus_last_id})
|
||||
Store(bus_channel=member._bus_channel()).add(member, diff).bus_send()
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _sync_field_names(self):
|
||||
return [
|
||||
"custom_channel_name",
|
||||
"custom_notifications",
|
||||
"last_interest_dt",
|
||||
"message_unread_counter",
|
||||
"mute_until_dt",
|
||||
"new_message_separator",
|
||||
# sudo: discuss.channel.rtc.session - each member can see who is inviting them
|
||||
Store.One(
|
||||
"rtc_inviting_session_id",
|
||||
extra_fields=self.rtc_inviting_session_id._get_store_extra_fields(),
|
||||
sudo=True,
|
||||
),
|
||||
"unpin_dt",
|
||||
]
|
||||
|
||||
def unlink(self):
|
||||
# sudo: discuss.channel.rtc.session - cascade unlink of sessions for self member
|
||||
self.sudo().rtc_session_ids.unlink() # ensure unlink overrides are applied
|
||||
# always unlink members of sub-channels as well
|
||||
domains = [
|
||||
[
|
||||
("id", "not in", self.ids),
|
||||
("partner_id", "=", member.partner_id.id),
|
||||
("guest_id", "=", member.guest_id.id),
|
||||
("channel_id", "in", member.channel_id.sub_channel_ids.ids),
|
||||
]
|
||||
for member in self
|
||||
]
|
||||
for member in self.env["discuss.channel.member"].search(Domain.OR(domains)):
|
||||
member.channel_id._action_unfollow(partner=member.partner_id, guest=member.guest_id)
|
||||
# sudo - discuss.channel: allowed to access channels to update member-based naming
|
||||
name_members_by_channel = {
|
||||
channel: channel.channel_name_member_ids for channel in self.channel_id
|
||||
}
|
||||
res = super().unlink()
|
||||
for channel, members in name_members_by_channel.items():
|
||||
# sudo - discuss.channel: updating channel names according to members is allowed,
|
||||
# even after the member left the channel.
|
||||
channel_sudo = channel.sudo()
|
||||
if channel_sudo.channel_name_member_ids != members:
|
||||
Store(bus_channel=channel).add(
|
||||
channel_sudo,
|
||||
Store.Many("channel_name_member_ids", sort="id"),
|
||||
).bus_send()
|
||||
return res
|
||||
|
||||
def _bus_channel(self):
|
||||
return self.partner_id.main_user_id or self.guest_id
|
||||
|
||||
def _notify_typing(self, is_typing):
|
||||
""" Broadcast the typing notification to channel members
|
||||
:param is_typing: (boolean) tells whether the members are typing or not
|
||||
"""
|
||||
for member in self:
|
||||
Store(bus_channel=member.channel_id).add(
|
||||
member,
|
||||
extra_fields={"isTyping": is_typing, "is_typing_dt": fields.Datetime.now()},
|
||||
).bus_send()
|
||||
|
||||
def _notify_mute(self):
|
||||
for member in self:
|
||||
if member.mute_until_dt and member.mute_until_dt != -1:
|
||||
self.env.ref("mail.ir_cron_discuss_channel_member_unmute")._trigger(member.mute_until_dt)
|
||||
|
||||
@api.model
|
||||
def _cleanup_expired_mutes(self):
|
||||
"""
|
||||
Cron job for cleanup expired unmute by resetting mute_until_dt and sending bus notifications.
|
||||
"""
|
||||
members = self.search([("mute_until_dt", "<=", fields.Datetime.now())])
|
||||
members.write({"mute_until_dt": False})
|
||||
members._notify_mute()
|
||||
|
||||
def _to_store_persona(self, fields=None):
|
||||
if fields == "avatar_card":
|
||||
fields = ["avatar_128", "im_status", "name"]
|
||||
return [
|
||||
# sudo: res.partner - reading partner related to a member is considered acceptable
|
||||
Store.Attr(
|
||||
"partner_id",
|
||||
lambda m: Store.One(
|
||||
m.partner_id.sudo(),
|
||||
(p_fields := m._get_store_partner_fields(fields)),
|
||||
extra_fields=self.env["res.partner"]._get_store_mention_fields()
|
||||
if p_fields or p_fields is None
|
||||
else None,
|
||||
),
|
||||
predicate=lambda m: m.partner_id,
|
||||
),
|
||||
# sudo: mail.guest - reading guest related to a member is considered acceptable
|
||||
Store.Attr(
|
||||
"guest_id",
|
||||
lambda m: Store.One(m.guest_id.sudo(), m._get_store_guest_fields(fields)),
|
||||
predicate=lambda m: m.guest_id,
|
||||
),
|
||||
]
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
"create_date",
|
||||
"fetched_message_id",
|
||||
"last_seen_dt",
|
||||
"seen_message_id",
|
||||
*self.env["discuss.channel.member"]._to_store_persona(),
|
||||
]
|
||||
|
||||
def _get_store_partner_fields(self, fields):
|
||||
self.ensure_one()
|
||||
return fields
|
||||
|
||||
def _get_store_guest_fields(self, fields):
|
||||
self.ensure_one()
|
||||
return fields
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# RTC (voice/video)
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
def _rtc_join_call(self, store: Store = None, check_rtc_session_ids=None, camera=False):
|
||||
self.ensure_one()
|
||||
session_domain = []
|
||||
if self.partner_id:
|
||||
session_domain = [("partner_id", "=", self.partner_id.id)]
|
||||
elif self.guest_id:
|
||||
session_domain = [("guest_id", "=", self.guest_id.id)]
|
||||
user_sessions = self.search(session_domain).rtc_session_ids
|
||||
check_rtc_session_ids = (check_rtc_session_ids or []) + user_sessions.ids
|
||||
self.channel_id._rtc_cancel_invitations(member_ids=self.ids)
|
||||
user_sessions.unlink()
|
||||
rtc_session = self.env['discuss.channel.rtc.session'].create({'channel_member_id': self.id, 'is_camera_on': camera})
|
||||
current_rtc_sessions, outdated_rtc_sessions = self._rtc_sync_sessions(check_rtc_session_ids=check_rtc_session_ids)
|
||||
ice_servers = self.env["mail.ice.server"]._get_ice_servers()
|
||||
self._join_sfu(ice_servers)
|
||||
if store:
|
||||
store.add(
|
||||
self.channel_id, {"rtc_session_ids": Store.Many(current_rtc_sessions, mode="ADD")}
|
||||
)
|
||||
store.add(
|
||||
self.channel_id,
|
||||
{"rtc_session_ids": Store.Many(outdated_rtc_sessions, [], mode="DELETE")},
|
||||
)
|
||||
store.add_singleton_values(
|
||||
"Rtc",
|
||||
{
|
||||
"iceServers": ice_servers or False,
|
||||
"localSession": Store.One(rtc_session),
|
||||
"serverInfo": self._get_rtc_server_info(rtc_session, ice_servers),
|
||||
},
|
||||
)
|
||||
if self.channel_id._should_invite_members_to_join_call():
|
||||
self._rtc_invite_members()
|
||||
|
||||
def _join_sfu(self, ice_servers=None, force=False):
|
||||
if len(self.channel_id.rtc_session_ids) < SFU_MODE_THRESHOLD and not force:
|
||||
if self.channel_id.sfu_channel_uuid:
|
||||
self.channel_id.sfu_channel_uuid = None
|
||||
self.channel_id.sfu_server_url = None
|
||||
return
|
||||
elif self.channel_id.sfu_channel_uuid and self.channel_id.sfu_server_url:
|
||||
return
|
||||
sfu_server_url = discuss.get_sfu_url(self.env)
|
||||
if not sfu_server_url:
|
||||
return
|
||||
sfu_local_key = self.env["ir.config_parameter"].sudo().get_param("mail.sfu_local_key")
|
||||
if not sfu_local_key:
|
||||
sfu_local_key = str(uuid.uuid4())
|
||||
self.env["ir.config_parameter"].sudo().set_param("mail.sfu_local_key", sfu_local_key)
|
||||
json_web_token = jwt.sign(
|
||||
{"iss": f"{self.get_base_url()}:channel:{self.channel_id.id}", "key": sfu_local_key},
|
||||
key=discuss.get_sfu_key(self.env),
|
||||
ttl=30,
|
||||
algorithm=jwt.Algorithm.HS256,
|
||||
)
|
||||
try:
|
||||
response = requests.get(
|
||||
sfu_server_url + "/v1/channel",
|
||||
headers={"Authorization": "jwt " + json_web_token},
|
||||
timeout=3,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as error:
|
||||
_logger.warning("Failed to obtain a channel from the SFU server, user will stay in p2p: %s", error)
|
||||
return
|
||||
response_dict = response.json()
|
||||
self.channel_id.sfu_channel_uuid = response_dict["uuid"]
|
||||
self.channel_id.sfu_server_url = response_dict["url"]
|
||||
for session in self.channel_id.rtc_session_ids:
|
||||
session._bus_send(
|
||||
"discuss.channel.rtc.session/sfu_hot_swap",
|
||||
{"serverInfo": self._get_rtc_server_info(session, ice_servers, key=sfu_local_key)},
|
||||
)
|
||||
|
||||
def _get_rtc_server_info(self, rtc_session, ice_servers=None, key=None):
|
||||
sfu_channel_uuid = self.channel_id.sfu_channel_uuid
|
||||
sfu_server_url = self.channel_id.sfu_server_url
|
||||
if not sfu_channel_uuid or not sfu_server_url:
|
||||
return None
|
||||
if not key:
|
||||
key = self.env["ir.config_parameter"].sudo().get_param("mail.sfu_local_key")
|
||||
claims = {
|
||||
"session_id": rtc_session.id,
|
||||
"ice_servers": ice_servers,
|
||||
}
|
||||
json_web_token = jwt.sign(claims, key=key, ttl=60 * 60 * 8, algorithm=jwt.Algorithm.HS256) # 8 hours
|
||||
return {"url": sfu_server_url, "channelUUID": sfu_channel_uuid, "jsonWebToken": json_web_token}
|
||||
|
||||
def _rtc_leave_call(self, session_id=None):
|
||||
self.ensure_one()
|
||||
if self.rtc_session_ids:
|
||||
if session_id:
|
||||
self.rtc_session_ids.filtered(lambda rec: rec.id == session_id).unlink()
|
||||
return
|
||||
self.rtc_session_ids.unlink()
|
||||
else:
|
||||
self.channel_id._rtc_cancel_invitations(member_ids=self.ids)
|
||||
|
||||
def _rtc_sync_sessions(self, check_rtc_session_ids=None):
|
||||
"""Synchronize the RTC sessions for self channel member.
|
||||
- Inactive sessions of the channel are deleted.
|
||||
- Current sessions are returned.
|
||||
- Sessions given in check_rtc_session_ids that no longer exists
|
||||
are returned as non-existing.
|
||||
|
||||
:param list check_rtc_session_ids: list of the ids of the sessions to check
|
||||
:returns: (current_rtc_sessions, outdated_rtc_sessions)
|
||||
:rtype: tuple
|
||||
"""
|
||||
self.ensure_one()
|
||||
self.channel_id.rtc_session_ids._delete_inactive_rtc_sessions()
|
||||
check_rtc_sessions = self.env['discuss.channel.rtc.session'].browse([int(check_rtc_session_id) for check_rtc_session_id in (check_rtc_session_ids or [])])
|
||||
return self.channel_id.rtc_session_ids, check_rtc_sessions - self.channel_id.rtc_session_ids
|
||||
|
||||
def _get_rtc_invite_members_domain(self, member_ids=None):
|
||||
""" Get the domain used to get the members to invite to and RTC call on
|
||||
the member's channel.
|
||||
|
||||
:param list member_ids: List of the partner ids to invite.
|
||||
"""
|
||||
self.ensure_one()
|
||||
domain = Domain.AND([
|
||||
[('channel_id', '=', self.channel_id.id)],
|
||||
[('rtc_inviting_session_id', '=', False)],
|
||||
[('rtc_session_ids', '=', False)],
|
||||
Domain.OR([
|
||||
[("partner_id", "=", False)],
|
||||
[("partner_id.user_ids.manual_im_status", "!=", "busy")],
|
||||
]),
|
||||
Domain("guest_id", "=", False) | Domain("guest_id.presence_ids.last_poll", ">", "-12H"),
|
||||
])
|
||||
if member_ids:
|
||||
domain &= Domain('id', 'in', member_ids)
|
||||
return domain
|
||||
|
||||
def _rtc_invite_members(self, member_ids=None):
|
||||
""" Sends invitations to join the RTC call to all connected members of the thread who are not already invited,
|
||||
if member_ids is set, only the specified ids will be invited.
|
||||
|
||||
:param list member_ids: list of the partner ids to invite
|
||||
"""
|
||||
self.ensure_one()
|
||||
members = self.env["discuss.channel.member"].search(
|
||||
self._get_rtc_invite_members_domain(member_ids)
|
||||
)
|
||||
if members:
|
||||
members.rtc_inviting_session_id = self.rtc_session_ids.id
|
||||
Store(bus_channel=self.channel_id).add(
|
||||
self.channel_id,
|
||||
{
|
||||
"invited_member_ids": Store.Many(
|
||||
members,
|
||||
[
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
*self.env["discuss.channel.member"]._to_store_persona("avatar_card"),
|
||||
],
|
||||
mode="ADD",
|
||||
),
|
||||
},
|
||||
).bus_send()
|
||||
devices, private_key, public_key = self.channel_id._web_push_get_partners_parameters(members.partner_id.ids)
|
||||
if devices:
|
||||
if self.channel_id.channel_type != 'chat':
|
||||
icon = f"/web/image/discuss.channel/{self.channel_id.id}/avatar_128"
|
||||
elif guest := self.env["mail.guest"]._get_guest_from_context():
|
||||
icon = f"/web/image/mail.guest/{guest.id}/avatar_128"
|
||||
elif partner := self.env.user.partner_id:
|
||||
icon = f"/web/image/res.partner/{partner.id}/avatar_128"
|
||||
languages = [partner.lang for partner in devices.partner_id]
|
||||
payload_by_lang = {}
|
||||
for lang in languages:
|
||||
env_lang = self.with_context(lang=lang).env
|
||||
payload_by_lang[lang] = {
|
||||
"title": env_lang._("Incoming call"),
|
||||
"options": {
|
||||
"body": env_lang._("Conference: %s", self.channel_id.display_name),
|
||||
"icon": icon,
|
||||
"vibrate": [100, 50, 100],
|
||||
"requireInteraction": True,
|
||||
"tag": self.channel_id._get_call_notification_tag(),
|
||||
"data": {
|
||||
"type": PUSH_NOTIFICATION_TYPE.CALL,
|
||||
"model": "discuss.channel",
|
||||
"action": "mail.action_discuss",
|
||||
"res_id": self.channel_id.id,
|
||||
},
|
||||
"actions": [
|
||||
{
|
||||
"action": PUSH_NOTIFICATION_ACTION.DECLINE,
|
||||
"type": "button",
|
||||
"title": env_lang._("Decline"),
|
||||
},
|
||||
{
|
||||
"action": PUSH_NOTIFICATION_ACTION.ACCEPT,
|
||||
"type": "button",
|
||||
"title": env_lang._("Accept"),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
self.channel_id._web_push_send_notification(devices, private_key, public_key, payload_by_lang=payload_by_lang)
|
||||
return members
|
||||
|
||||
def _mark_as_read(self, last_message_id):
|
||||
"""
|
||||
Mark channel as read by updating the seen message id of the current
|
||||
member as well as its new message separator.
|
||||
|
||||
:param last_message_id: the id of the message to be marked as read.
|
||||
"""
|
||||
self.ensure_one()
|
||||
domain = [
|
||||
("model", "=", "discuss.channel"),
|
||||
("res_id", "=", self.channel_id.id),
|
||||
("id", "<=", last_message_id),
|
||||
]
|
||||
last_message = self.env['mail.message'].search(domain, order="id DESC", limit=1)
|
||||
if not last_message:
|
||||
return
|
||||
self._set_last_seen_message(last_message)
|
||||
self._set_new_message_separator(last_message.id + 1)
|
||||
|
||||
def _set_last_seen_message(self, message, notify=True):
|
||||
"""
|
||||
Set the last seen message of the current member.
|
||||
|
||||
:param message: the message to set as last seen message.
|
||||
:param notify: whether to send a bus notification relative to the new
|
||||
last seen message.
|
||||
"""
|
||||
self.ensure_one()
|
||||
bus_channel = self._bus_channel()
|
||||
if self.seen_message_id.id < message.id:
|
||||
self.write({
|
||||
"fetched_message_id": max(self.fetched_message_id.id, message.id),
|
||||
"seen_message_id": message.id,
|
||||
"last_seen_dt": fields.Datetime.now(),
|
||||
})
|
||||
if self.channel_id.channel_type in self.channel_id._types_allowing_seen_infos():
|
||||
bus_channel = self.channel_id
|
||||
if not notify:
|
||||
return
|
||||
Store(bus_channel=bus_channel).add(
|
||||
self,
|
||||
[
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
*self.env["discuss.channel.member"]._to_store_persona("avatar_card"),
|
||||
"seen_message_id",
|
||||
],
|
||||
).bus_send()
|
||||
|
||||
def _set_new_message_separator(self, message_id):
|
||||
"""
|
||||
:param message_id: id of the message above which the new message
|
||||
separator should be displayed.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if message_id == self.new_message_separator:
|
||||
bus_last_id = self.env["bus.bus"].sudo()._bus_last_id()
|
||||
Store(bus_channel=self._bus_channel()).add(
|
||||
self,
|
||||
[
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
"message_unread_counter",
|
||||
{"message_unread_counter_bus_id": bus_last_id},
|
||||
"new_message_separator",
|
||||
*self.env["discuss.channel.member"]._to_store_persona([]),
|
||||
],
|
||||
).bus_send()
|
||||
return
|
||||
self.new_message_separator = message_id
|
||||
|
||||
def _get_html_link_title(self):
|
||||
return self.partner_id.name if self.partner_id else self.guest_id.name
|
||||
|
||||
def _get_html_link(self, *args, for_persona=False, **kwargs):
|
||||
if not for_persona:
|
||||
return self._get_html_link(*args, **kwargs)
|
||||
if self.partner_id:
|
||||
return self.partner_id._get_html_link(title=f"@{self._get_html_link_title()}")
|
||||
return Markup("<strong>%s</strong>") % self.guest_id.name
|
||||
|
|
@ -0,0 +1,181 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from collections import defaultdict
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.addons.mail.tools import discuss, jwt
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DiscussChannelRtcSession(models.Model):
|
||||
_name = 'discuss.channel.rtc.session'
|
||||
_inherit = ["bus.listener.mixin"]
|
||||
_description = 'Mail RTC session'
|
||||
_rec_name = 'channel_member_id'
|
||||
|
||||
channel_member_id = fields.Many2one('discuss.channel.member', required=True, ondelete='cascade')
|
||||
channel_id = fields.Many2one('discuss.channel', related='channel_member_id.channel_id', store=True, readonly=True, index='btree_not_null')
|
||||
partner_id = fields.Many2one('res.partner', related='channel_member_id.partner_id', string="Partner", store=True, index=True)
|
||||
guest_id = fields.Many2one('mail.guest', related='channel_member_id.guest_id')
|
||||
|
||||
write_date = fields.Datetime("Last Updated On", index=True)
|
||||
|
||||
is_screen_sharing_on = fields.Boolean(string="Is sharing the screen")
|
||||
is_camera_on = fields.Boolean(string="Is sending user video")
|
||||
is_muted = fields.Boolean(string="Is microphone muted")
|
||||
is_deaf = fields.Boolean(string="Has disabled incoming sound")
|
||||
|
||||
_channel_member_unique = models.Constraint(
|
||||
'UNIQUE(channel_member_id)',
|
||||
'There can only be one rtc session per channel member',
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
rtc_sessions = super().create(vals_list)
|
||||
rtc_sessions_by_channel = defaultdict(lambda: self.env["discuss.channel.rtc.session"])
|
||||
for rtc_session in rtc_sessions:
|
||||
rtc_sessions_by_channel[rtc_session.channel_id] += rtc_session
|
||||
for channel, rtc_sessions in rtc_sessions_by_channel.items():
|
||||
Store(bus_channel=channel).add(
|
||||
channel,
|
||||
{"rtc_session_ids": Store.Many(rtc_sessions, mode="ADD")},
|
||||
).bus_send()
|
||||
for channel in rtc_sessions.channel_id.filtered(lambda c: len(c.rtc_session_ids) == 1):
|
||||
body = Markup('<div data-oe-type="call" class="o_mail_notification"></div>')
|
||||
message = channel.message_post(body=body, message_type="notification")
|
||||
# sudo - discuss.call.history: can create call history when call is created.
|
||||
self.env["discuss.call.history"].sudo().create(
|
||||
{
|
||||
"channel_id": channel.id,
|
||||
"start_dt": fields.Datetime.now(),
|
||||
"start_call_message_id": message.id,
|
||||
},
|
||||
)
|
||||
Store(bus_channel=channel).add(message, [Store.Many("call_history_ids", [])]).bus_send()
|
||||
return rtc_sessions
|
||||
|
||||
def unlink(self):
|
||||
call_ended_channels = self.channel_id.filtered(lambda c: not (c.rtc_session_ids - self))
|
||||
for channel in call_ended_channels:
|
||||
# If there is no member left in the RTC call, all invitations are cancelled.
|
||||
# Note: invitation depends on field `rtc_inviting_session_id` so the cancel must be
|
||||
# done before the delete to be able to know who was invited.
|
||||
channel._rtc_cancel_invitations()
|
||||
# If there is no member left in the RTC call, we remove the SFU channel uuid as the SFU
|
||||
# server will timeout the channel. It is better to obtain a new channel from the SFU server
|
||||
# than to attempt recycling a possibly stale channel uuid.
|
||||
channel.sfu_channel_uuid = False
|
||||
channel.sfu_server_url = False
|
||||
rtc_sessions_by_channel = defaultdict(lambda: self.env["discuss.channel.rtc.session"])
|
||||
for rtc_session in self:
|
||||
rtc_sessions_by_channel[rtc_session.channel_id] += rtc_session
|
||||
for channel, rtc_sessions in rtc_sessions_by_channel.items():
|
||||
Store(bus_channel=channel).add(
|
||||
channel,
|
||||
{"rtc_session_ids": Store.Many(rtc_sessions, [], mode="DELETE")},
|
||||
).bus_send()
|
||||
for rtc_session in self:
|
||||
rtc_session._bus_send(
|
||||
"discuss.channel.rtc.session/ended", {"sessionId": rtc_session.id}
|
||||
)
|
||||
# sudo - dicuss.rtc.call.history: setting the end date of the call
|
||||
# after it ends is allowed.
|
||||
for history in (
|
||||
self.env["discuss.call.history"]
|
||||
.sudo()
|
||||
.search([("channel_id", "in", call_ended_channels.ids), ("end_dt", "=", False)])
|
||||
):
|
||||
history.end_dt = fields.Datetime.now()
|
||||
Store(bus_channel=history.channel_id).add(
|
||||
history,
|
||||
["duration_hour", "end_dt"],
|
||||
).bus_send()
|
||||
return super().unlink()
|
||||
|
||||
def _bus_channel(self):
|
||||
return self.channel_member_id._bus_channel()
|
||||
|
||||
def _update_and_broadcast(self, values):
|
||||
""" Updates the session and notifies all members of the channel
|
||||
of the change.
|
||||
"""
|
||||
valid_values = {'is_screen_sharing_on', 'is_camera_on', 'is_muted', 'is_deaf'}
|
||||
self.write({key: values[key] for key in valid_values if key in values})
|
||||
store = Store().add(self, extra_fields=self._get_store_extra_fields())
|
||||
self.channel_id._bus_send(
|
||||
"discuss.channel.rtc.session/update_and_broadcast",
|
||||
{"data": store.get_result(), "channelId": self.channel_id.id},
|
||||
)
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_inactive_sessions(self):
|
||||
""" Garbage collect sessions that aren't active anymore,
|
||||
this can happen when the server or the user's browser crash
|
||||
or when the user's odoo session ends.
|
||||
"""
|
||||
self.search(self._inactive_rtc_session_domain()).unlink()
|
||||
|
||||
def action_disconnect(self):
|
||||
session_ids_by_channel_by_url = defaultdict(lambda: defaultdict(list))
|
||||
for rtc_session in self:
|
||||
sfu_channel_uuid = rtc_session.channel_id.sfu_channel_uuid
|
||||
url = rtc_session.channel_id.sfu_server_url
|
||||
if sfu_channel_uuid and url:
|
||||
session_ids_by_channel_by_url[url][sfu_channel_uuid].append(rtc_session.id)
|
||||
key = discuss.get_sfu_key(self.env)
|
||||
if key:
|
||||
with requests.Session() as requests_session:
|
||||
for url, session_ids_by_channel in session_ids_by_channel_by_url.items():
|
||||
try:
|
||||
requests_session.post(
|
||||
url + '/v1/disconnect',
|
||||
data=jwt.sign({'sessionIdsByChannel': session_ids_by_channel}, key=key, ttl=20, algorithm=jwt.Algorithm.HS256),
|
||||
timeout=3
|
||||
).raise_for_status()
|
||||
except requests.exceptions.RequestException as error:
|
||||
_logger.warning("Could not disconnect sessions at sfu server %s: %s", url, error)
|
||||
self.unlink()
|
||||
|
||||
def _delete_inactive_rtc_sessions(self):
|
||||
"""Deletes the inactive sessions from self."""
|
||||
self.filtered_domain(self._inactive_rtc_session_domain()).unlink()
|
||||
|
||||
def _notify_peers(self, notifications):
|
||||
""" Used for peer-to-peer communication,
|
||||
guarantees that the sender is the current guest or partner.
|
||||
|
||||
:param notifications: list of tuple with the following elements:
|
||||
- target_session_ids: a list of discuss.channel.rtc.session ids
|
||||
- content: a string with the content to be sent to the targets
|
||||
"""
|
||||
self.ensure_one()
|
||||
payload_by_target = defaultdict(lambda: {'sender': self.id, 'notifications': []})
|
||||
for target_session_ids, content in notifications:
|
||||
for target_session in self.env['discuss.channel.rtc.session'].browse(target_session_ids).exists():
|
||||
payload_by_target[target_session]['notifications'].append(content)
|
||||
for target, payload in payload_by_target.items():
|
||||
target._bus_send("discuss.channel.rtc.session/peer_notification", payload)
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return Store.One(
|
||||
"channel_member_id",
|
||||
[
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
*self.env["discuss.channel.member"]._to_store_persona("avatar_card"),
|
||||
],
|
||||
)
|
||||
|
||||
def _get_store_extra_fields(self):
|
||||
return ["is_camera_on", "is_deaf", "is_muted", "is_screen_sharing_on"]
|
||||
|
||||
@api.model
|
||||
def _inactive_rtc_session_domain(self):
|
||||
return [('write_date', '<', fields.Datetime.now() - relativedelta(minutes=1, seconds=15))]
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class DiscussGifFavorite(models.Model):
|
||||
_name = 'discuss.gif.favorite'
|
||||
_description = "Save favorite GIF from Tenor API"
|
||||
|
||||
tenor_gif_id = fields.Char("GIF id from Tenor", required=True)
|
||||
|
||||
_user_gif_favorite = models.Constraint(
|
||||
'unique(create_uid,tenor_gif_id)',
|
||||
'User should not have duplicated favorite GIF',
|
||||
)
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class DiscussVoiceMetadata(models.Model):
|
||||
_name = 'discuss.voice.metadata'
|
||||
_description = "Metadata for voice attachments"
|
||||
|
||||
attachment_id = fields.Many2one(
|
||||
"ir.attachment", ondelete="cascade", bypass_search_access=True, copy=False, index=True
|
||||
)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models, fields
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class IrAttachment(models.Model):
|
||||
_inherit = "ir.attachment"
|
||||
|
||||
voice_ids = fields.One2many("discuss.voice.metadata", "attachment_id")
|
||||
|
||||
def _bus_channel(self):
|
||||
self.ensure_one()
|
||||
if self.res_model == "discuss.channel" and self.res_id:
|
||||
return self.env["discuss.channel"].browse(self.res_id)
|
||||
guest = self.env["mail.guest"]._get_guest_from_context()
|
||||
if self.env.user._is_public() and guest:
|
||||
return guest
|
||||
return super()._bus_channel()
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
# sudo: discuss.voice.metadata - checking the existence of voice metadata for accessible
|
||||
# attachments is fine
|
||||
return super()._to_store_defaults(target) + [Store.Many("voice_ids", [], sudo=True)]
|
||||
|
||||
def _post_add_create(self, **kwargs):
|
||||
super()._post_add_create(**kwargs)
|
||||
if kwargs.get('voice'):
|
||||
self._set_voice_metadata()
|
||||
|
||||
def _set_voice_metadata(self):
|
||||
self.env["discuss.voice.metadata"].create([{"attachment_id": att.id} for att in self])
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
|
||||
from odoo import models
|
||||
from odoo.fields import Domain
|
||||
|
||||
|
||||
class IrWebsocket(models.AbstractModel):
|
||||
"""Override to handle discuss specific features (channel in particular)."""
|
||||
|
||||
_inherit = "ir.websocket"
|
||||
|
||||
def _build_bus_channel_list(self, channels):
|
||||
channels = list(channels) # do not alter original list
|
||||
discuss_channel_ids = list()
|
||||
for channel in list(channels):
|
||||
if isinstance(channel, str) and channel.startswith("mail.guest_"):
|
||||
channels.remove(channel)
|
||||
guest = self.env["mail.guest"]._get_guest_from_token(channel.split("_")[1])
|
||||
if guest:
|
||||
self = self.with_context(guest=guest)
|
||||
if isinstance(channel, str):
|
||||
match = re.findall(r'discuss\.channel_(\d+)', channel)
|
||||
if match:
|
||||
channels.remove(channel)
|
||||
discuss_channel_ids.append(int(match[0]))
|
||||
guest = self.env["mail.guest"]._get_guest_from_context()
|
||||
if guest:
|
||||
channels.append(guest)
|
||||
domain = ["|", ("is_member", "=", True), ("id", "in", discuss_channel_ids)]
|
||||
all_user_channels = self.env["discuss.channel"].search(domain)
|
||||
internal_specific_channels = [
|
||||
(c, "internal_users")
|
||||
for c in all_user_channels
|
||||
if not self.env.user.share
|
||||
]
|
||||
channels.extend([*all_user_channels, *internal_specific_channels])
|
||||
return super()._build_bus_channel_list(channels)
|
||||
157
odoo-bringout-oca-ocb-mail/mail/models/discuss/mail_guest.py
Normal file
157
odoo-bringout-oca-ocb-mail/mail/models/discuss/mail_guest.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import pytz
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from odoo.tools import consteq, get_lang
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.http import request
|
||||
from odoo.addons.base.models.res_partner import _tz_get
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools.misc import limited_field_access_token
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailGuest(models.Model):
|
||||
_name = 'mail.guest'
|
||||
_description = "Guest"
|
||||
_inherit = ["avatar.mixin", "bus.listener.mixin"]
|
||||
_avatar_name_field = "name"
|
||||
_cookie_name = 'dgid'
|
||||
_cookie_separator = '|'
|
||||
|
||||
@api.model
|
||||
def _lang_get(self):
|
||||
return self.env['res.lang'].get_installed()
|
||||
|
||||
name = fields.Char(string="Name", required=True)
|
||||
access_token = fields.Char(string="Access Token", default=lambda self: str(uuid.uuid4()), groups='base.group_system', required=True, readonly=True, copy=False)
|
||||
country_id = fields.Many2one(string="Country", comodel_name='res.country')
|
||||
email = fields.Char()
|
||||
lang = fields.Selection(string="Language", selection=_lang_get)
|
||||
timezone = fields.Selection(string="Timezone", selection=_tz_get)
|
||||
channel_ids = fields.Many2many(string="Channels", comodel_name='discuss.channel', relation='discuss_channel_member', column1='guest_id', column2='channel_id', copy=False)
|
||||
presence_ids = fields.One2many("mail.presence", "guest_id", groups="base.group_system")
|
||||
# sudo: mail.guest - can access presence of accessible guest
|
||||
im_status = fields.Char("IM Status", compute="_compute_im_status", compute_sudo=True)
|
||||
offline_since = fields.Datetime("Offline since", compute="_compute_im_status", compute_sudo=True)
|
||||
|
||||
@api.depends("presence_ids.status")
|
||||
def _compute_im_status(self):
|
||||
for guest in self:
|
||||
guest.im_status = guest.presence_ids.status or "offline"
|
||||
guest.offline_since = (
|
||||
guest.presence_ids.last_poll
|
||||
if guest.im_status == "offline"
|
||||
else None
|
||||
)
|
||||
|
||||
def _get_guest_from_token(self, token=""):
|
||||
"""Returns the guest record for the given token, if applicable."""
|
||||
guest = self.env["mail.guest"]
|
||||
parts = token.split(self._cookie_separator)
|
||||
if len(parts) == 2:
|
||||
guest_id, guest_access_token = parts
|
||||
# sudo: mail.guest: guests need sudo to read their access_token
|
||||
guest = self.browse(int(guest_id)).sudo().exists()
|
||||
if not guest or not guest.access_token or not consteq(guest.access_token, guest_access_token):
|
||||
guest = self.env["mail.guest"]
|
||||
return guest.sudo(False)
|
||||
|
||||
def _get_guest_from_context(self):
|
||||
"""Returns the current guest record from the context, if applicable."""
|
||||
guest = self.env.context.get('guest')
|
||||
if isinstance(guest, self.pool['mail.guest']):
|
||||
assert len(guest) <= 1, "Context guest should be empty or a single record."
|
||||
return guest.sudo(False).with_context(guest=guest)
|
||||
return self.env['mail.guest']
|
||||
|
||||
def _get_or_create_guest(self, *, guest_name, country_code, timezone):
|
||||
if not (guest := self._get_guest_from_context()):
|
||||
guest = self.create(
|
||||
{
|
||||
"country_id": self.env["res.country"].search([("code", "=", country_code)]).id,
|
||||
"lang": get_lang(self.env).code,
|
||||
"name": guest_name,
|
||||
"timezone": timezone,
|
||||
}
|
||||
)
|
||||
guest._set_auth_cookie()
|
||||
return guest.sudo(False)
|
||||
|
||||
def _get_timezone_from_request(self, request):
|
||||
timezone = request.cookies.get('tz')
|
||||
return timezone if timezone in pytz.all_timezones else False
|
||||
|
||||
def _update_name(self, name):
|
||||
self.ensure_one()
|
||||
name = name.strip()
|
||||
if len(name) < 1:
|
||||
raise UserError(_("Guest's name cannot be empty."))
|
||||
if len(name) > 512:
|
||||
raise UserError(_("Guest's name is too long."))
|
||||
self.name = name
|
||||
for channel in self.channel_ids:
|
||||
Store(bus_channel=channel).add(self, ["avatar_128", "name"]).bus_send()
|
||||
Store(bus_channel=self).add(self, ["avatar_128", "name"]).bus_send()
|
||||
|
||||
def _update_timezone(self, timezone):
|
||||
query = """
|
||||
UPDATE mail_guest
|
||||
SET timezone = %s
|
||||
WHERE id IN (
|
||||
SELECT id FROM mail_guest WHERE id = %s
|
||||
FOR NO KEY UPDATE SKIP LOCKED
|
||||
)
|
||||
"""
|
||||
self.env.cr.execute(query, (timezone, self.id))
|
||||
|
||||
def _get_im_status_access_token(self):
|
||||
"""Return a scoped access token for the `im_status` field. The token is used in
|
||||
`ir_websocket._prepare_subscribe_data` to grant access to presence channels.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
self.ensure_one()
|
||||
return limited_field_access_token(self, "im_status", scope="mail.presence")
|
||||
|
||||
def _field_store_repr(self, field_name):
|
||||
if field_name == "avatar_128":
|
||||
return [
|
||||
Store.Attr("avatar_128_access_token", lambda g: g._get_avatar_128_access_token()),
|
||||
"write_date",
|
||||
]
|
||||
if field_name == "im_status":
|
||||
return [
|
||||
"im_status",
|
||||
Store.Attr("im_status_access_token", lambda g: g._get_im_status_access_token()),
|
||||
]
|
||||
return [field_name]
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return ["avatar_128", "im_status", "name"]
|
||||
|
||||
def _set_auth_cookie(self):
|
||||
"""Add a cookie to the response to identify the guest. Every route
|
||||
that expects a guest will make use of it to authenticate the guest
|
||||
through `add_guest_to_context`.
|
||||
"""
|
||||
self.ensure_one()
|
||||
expiration_date = datetime.now() + timedelta(days=365)
|
||||
request.future_response.set_cookie(
|
||||
self._cookie_name,
|
||||
self._format_auth_cookie(),
|
||||
httponly=True,
|
||||
expires=expiration_date,
|
||||
)
|
||||
request.update_context(guest=self.sudo(False))
|
||||
|
||||
def _format_auth_cookie(self):
|
||||
"""Format the cookie value for the given guest.
|
||||
|
||||
:return: formatted cookie value
|
||||
:rtype: str
|
||||
"""
|
||||
self.ensure_one()
|
||||
return f"{self.id}{self._cookie_separator}{self.access_token}"
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, models, fields
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailMessage(models.Model):
|
||||
_inherit = "mail.message"
|
||||
|
||||
call_history_ids = fields.One2many("discuss.call.history", "start_call_message_id")
|
||||
channel_id = fields.Many2one("discuss.channel", compute="_compute_channel_id")
|
||||
|
||||
@api.depends("model", "res_id")
|
||||
def _compute_channel_id(self):
|
||||
for message in self:
|
||||
if message.model == "discuss.channel" and message.res_id:
|
||||
message.channel_id = self.env["discuss.channel"].browse(message.res_id)
|
||||
else:
|
||||
message.channel_id = False
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return super()._to_store_defaults(target) + [
|
||||
Store.Many(
|
||||
"call_history_ids",
|
||||
["duration_hour", "end_dt"],
|
||||
predicate=lambda m: m.body and 'data-oe-type="call"' in m.body,
|
||||
),
|
||||
]
|
||||
|
||||
def _extras_to_store(self, store: Store, format_reply):
|
||||
super()._extras_to_store(store, format_reply=format_reply)
|
||||
if format_reply:
|
||||
# sudo: mail.message: access to parent is allowed
|
||||
store.add(
|
||||
self.sudo().filtered(lambda message: message.channel_id),
|
||||
Store.One("parent_id", format_reply=False),
|
||||
)
|
||||
|
||||
def _bus_channel(self):
|
||||
self.ensure_one()
|
||||
if self.channel_id:
|
||||
return self.channel_id
|
||||
guest = self.env["mail.guest"]._get_guest_from_context()
|
||||
if self.env.user._is_public() and guest:
|
||||
return guest
|
||||
return super()._bus_channel()
|
||||
13
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_groups.py
Normal file
13
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_groups.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class ResGroups(models.Model):
|
||||
_inherit = "res.groups"
|
||||
|
||||
def write(self, vals):
|
||||
res = super().write(vals)
|
||||
if vals.get("user_ids"):
|
||||
self.env["discuss.channel"].search([("group_ids", "in", self.all_implied_ids._ids)])._subscribe_users_automatically()
|
||||
return res
|
||||
158
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_partner.py
Normal file
158
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_partner.py
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import email_normalize, single_email_re, SQL
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
from odoo.exceptions import AccessError
|
||||
|
||||
|
||||
class ResPartner(models.Model):
|
||||
_inherit = "res.partner"
|
||||
|
||||
channel_ids = fields.Many2many(
|
||||
"discuss.channel",
|
||||
"discuss_channel_member",
|
||||
"partner_id",
|
||||
"channel_id",
|
||||
string="Channels",
|
||||
copy=False,
|
||||
)
|
||||
channel_member_ids = fields.One2many("discuss.channel.member", "partner_id")
|
||||
is_in_call = fields.Boolean(compute="_compute_is_in_call", groups="base.group_system")
|
||||
rtc_session_ids = fields.One2many("discuss.channel.rtc.session", "partner_id")
|
||||
|
||||
@api.depends("rtc_session_ids")
|
||||
def _compute_is_in_call(self):
|
||||
for partner in self:
|
||||
partner.is_in_call = bool(partner.rtc_session_ids)
|
||||
|
||||
@api.readonly
|
||||
@api.model
|
||||
def search_for_channel_invite(self, search_term, channel_id=None, limit=30):
|
||||
"""Returns partners matching search_term that can be invited to a channel.
|
||||
|
||||
- If `channel_id` is specified, only partners that can actually be invited to the channel
|
||||
are returned (not already members, and in accordance to the channel configuration).
|
||||
|
||||
- If no matching partners are found and the search term is a valid email address,
|
||||
then the method may return `selectable_email` as a fallback direct email invite, provided that
|
||||
the channel allows invites by email.
|
||||
|
||||
"""
|
||||
store = Store()
|
||||
channel_invites = self._search_for_channel_invite(store, search_term, channel_id, limit)
|
||||
selectable_email = None
|
||||
email_already_sent = None
|
||||
if channel_invites["count"] == 0 and single_email_re.match(search_term):
|
||||
email = email_normalize(search_term)
|
||||
channel = self.env["discuss.channel"].search_fetch([("id", "=", int(channel_id))])
|
||||
member_domain = Domain("channel_id", "=", channel.id)
|
||||
member_domain &= Domain("guest_id.email", "=", email) | Domain(
|
||||
"partner_id.email", "=", email
|
||||
)
|
||||
if channel._allow_invite_by_email() and not self.env[
|
||||
"discuss.channel.member"
|
||||
].search_count(member_domain):
|
||||
selectable_email = email
|
||||
# sudo - mail.mail: checking mail records to determine if an email was already sent is acceptable.
|
||||
email_already_sent = (
|
||||
self.env["mail.mail"]
|
||||
.sudo()
|
||||
.search_count(
|
||||
[
|
||||
("email_to", "=", email),
|
||||
("model", "=", "discuss.channel"),
|
||||
("res_id", "=", channel.id),
|
||||
]
|
||||
)
|
||||
> 0
|
||||
)
|
||||
|
||||
return {
|
||||
**channel_invites,
|
||||
"email_already_sent": email_already_sent,
|
||||
"selectable_email": selectable_email,
|
||||
"store_data": store.get_result(),
|
||||
}
|
||||
|
||||
@api.readonly
|
||||
@api.model
|
||||
def _search_for_channel_invite(self, store: Store, search_term, channel_id=None, limit=30):
|
||||
domain = Domain.AND(
|
||||
[
|
||||
Domain("name", "ilike", search_term) | Domain("email", "ilike", search_term),
|
||||
[('id', '!=', self.env.user.partner_id.id)],
|
||||
[("active", "=", True)],
|
||||
[("user_ids", "!=", False)],
|
||||
[("user_ids.active", "=", True)],
|
||||
[("user_ids.share", "=", False)],
|
||||
]
|
||||
)
|
||||
channel = self.env["discuss.channel"]
|
||||
if channel_id:
|
||||
channel = self.env["discuss.channel"].search([("id", "=", int(channel_id))])
|
||||
domain &= Domain("channel_ids", "not in", channel.id)
|
||||
if channel.group_public_id:
|
||||
domain &= Domain("user_ids.all_group_ids", "in", channel.group_public_id.id)
|
||||
query = self._search(domain, limit=limit)
|
||||
# bypass lack of support for case insensitive order in search()
|
||||
query.order = SQL('LOWER(%s), "res_partner"."id"', self._field_to_sql(self._table, "name"))
|
||||
selectable_partners = self.env["res.partner"].browse(query)
|
||||
selectable_partners._search_for_channel_invite_to_store(store, channel)
|
||||
return {
|
||||
"count": self.env["res.partner"].search_count(domain),
|
||||
"partner_ids": selectable_partners.ids,
|
||||
}
|
||||
|
||||
def _search_for_channel_invite_to_store(self, store: Store, channel):
|
||||
store.add(self)
|
||||
|
||||
@api.readonly
|
||||
@api.model
|
||||
def get_mention_suggestions_from_channel(self, channel_id, search, limit=8):
|
||||
"""Return 'limit'-first partners' such that the name or email matches a 'search' string.
|
||||
Prioritize partners that are also (internal) users, and then extend the research to all partners.
|
||||
Only members of the given channel are returned.
|
||||
The return format is a list of partner data (as per returned by `_to_store()`).
|
||||
"""
|
||||
channel = self.env["discuss.channel"].search([("id", "=", channel_id)])
|
||||
if not channel:
|
||||
return []
|
||||
domain = Domain([
|
||||
self._get_mention_suggestions_domain(search),
|
||||
("channel_ids", "in", (channel.parent_channel_id | channel).ids)
|
||||
])
|
||||
extra_domain = Domain([
|
||||
('user_ids', '!=', False),
|
||||
('user_ids.active', '=', True),
|
||||
('partner_share', '=', False),
|
||||
])
|
||||
allowed_group = (channel.parent_channel_id or channel).group_public_id
|
||||
if allowed_group:
|
||||
extra_domain &= Domain("user_ids.all_group_ids", "in", allowed_group.id)
|
||||
partners = self._search_mention_suggestions(domain, limit, extra_domain)
|
||||
members_domain = [
|
||||
("channel_id", "in", (channel.parent_channel_id | channel).ids),
|
||||
("partner_id", "in", partners.ids)
|
||||
]
|
||||
members = self.env["discuss.channel.member"].search(members_domain)
|
||||
member_fields = [
|
||||
Store.One("channel_id", [], as_thread=True),
|
||||
*self.env["discuss.channel.member"]._to_store_persona([]),
|
||||
]
|
||||
store = (
|
||||
Store()
|
||||
.add(members, member_fields)
|
||||
.add(partners, extra_fields=partners._get_store_mention_fields())
|
||||
)
|
||||
store.add(channel, "group_public_id")
|
||||
if allowed_group:
|
||||
for p in partners:
|
||||
store.add(p, {"group_ids": [("ADD", (allowed_group & p.user_ids.all_group_ids).ids)]})
|
||||
try:
|
||||
roles = self.env["res.role"].search([("name", "ilike", search)], limit=8)
|
||||
store.add(roles, "name")
|
||||
except AccessError:
|
||||
pass
|
||||
return store.get_result()
|
||||
72
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_users.py
Normal file
72
odoo-bringout-oca-ocb-mail/mail/models/discuss/res_users.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class ResUsers(models.Model):
|
||||
_inherit = "res.users"
|
||||
|
||||
is_in_call = fields.Boolean("Is in call", related="partner_id.is_in_call")
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
users = super().create(vals_list)
|
||||
self.env["discuss.channel"].search([("group_ids", "in", users.all_group_ids.ids)])._subscribe_users_automatically()
|
||||
return users
|
||||
|
||||
def write(self, vals):
|
||||
res = super().write(vals)
|
||||
if "active" in vals and not vals["active"]:
|
||||
self._unsubscribe_from_non_public_channels()
|
||||
if vals.get("group_ids"):
|
||||
# form: {'group_ids': [(3, 10), (3, 3), (4, 10), (4, 3)]} or {'group_ids': [(6, 0, [ids]}
|
||||
user_group_ids = [command[1] for command in vals["group_ids"] if command[0] == 4]
|
||||
user_group_ids += [id for command in vals["group_ids"] if command[0] == 6 for id in command[2]]
|
||||
user_group_ids += self.env['res.groups'].browse(user_group_ids).all_implied_ids._ids
|
||||
self.env["discuss.channel"].search([("group_ids", "in", user_group_ids)])._subscribe_users_automatically()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
self._unsubscribe_from_non_public_channels()
|
||||
return super().unlink()
|
||||
|
||||
def _unsubscribe_from_non_public_channels(self):
|
||||
"""This method un-subscribes users from group restricted channels. Main purpose
|
||||
of this method is to prevent sending internal communication to archived / deleted users.
|
||||
"""
|
||||
domain = [("partner_id", "in", self.partner_id.ids)]
|
||||
# sudo: discuss.channel.member - removing member of other users based on channel restrictions
|
||||
current_cm = self.env["discuss.channel.member"].sudo().search(domain)
|
||||
current_cm.filtered(
|
||||
lambda cm: (cm.channel_id.channel_type == "channel" and cm.channel_id.group_public_id)
|
||||
).unlink()
|
||||
|
||||
def _init_messaging(self, store: Store):
|
||||
self = self.with_user(self)
|
||||
channels = self.env["discuss.channel"]._get_channels_as_member()
|
||||
domain = [("channel_id", "in", channels.ids), ("is_self", "=", True)]
|
||||
members = self.env["discuss.channel.member"].search(domain)
|
||||
members_with_unread = members.filtered(lambda member: member.message_unread_counter)
|
||||
# fetch channels data before calling super to benefit from prefetching (channel info might
|
||||
# prefetch a lot of data that super could use, about the current user in particular)
|
||||
super()._init_messaging(store)
|
||||
store.add_global_values(initChannelsUnreadCounter=len(members_with_unread))
|
||||
|
||||
def _init_store_data(self, store: Store):
|
||||
super()._init_store_data(store)
|
||||
# sudo: ir.config_parameter - reading hard-coded keys to check their existence, safe to
|
||||
# return whether the features are enabled
|
||||
get_param = self.env["ir.config_parameter"].sudo().get_param
|
||||
store.add_global_values(
|
||||
hasGifPickerFeature=bool(get_param("discuss.tenor_api_key")),
|
||||
hasMessageTranslationFeature=bool(get_param("mail.google_translate_api_key")),
|
||||
hasCannedResponses=bool(self.env["mail.canned.response"].sudo().search([
|
||||
"|",
|
||||
("create_uid", "=", self.env.user.id),
|
||||
("group_ids", "in", self.env.user.all_group_ids.ids),
|
||||
], limit=1)) if self.env.user else False,
|
||||
channel_types_with_seen_infos=sorted(
|
||||
self.env["discuss.channel"]._types_allowing_seen_infos()
|
||||
),
|
||||
)
|
||||
|
|
@ -1,10 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import imaplib
|
||||
import datetime
|
||||
import functools
|
||||
import logging
|
||||
import poplib
|
||||
import socket
|
||||
|
||||
from imaplib import IMAP4, IMAP4_SSL
|
||||
from poplib import POP3, POP3_SSL
|
||||
|
|
@ -13,59 +12,86 @@ from ssl import SSLError
|
|||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import exception_to_unicode
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
MAX_POP_MESSAGES = 50
|
||||
MAIL_TIMEOUT = 60
|
||||
MAIL_SERVER_DOMAIN = Domain('state', '=', 'done') & Domain('server_type', '!=', 'local')
|
||||
MAIL_SERVER_DEACTIVATE_TIME = datetime.timedelta(days=5) # deactivate cron when has general connection issues
|
||||
|
||||
# Workaround for Python 2.7.8 bug https://bugs.python.org/issue23906
|
||||
poplib._MAXLINE = 65536
|
||||
|
||||
# Add timeout to IMAP connections
|
||||
# HACK https://bugs.python.org/issue38615
|
||||
# TODO: clean in Python 3.9
|
||||
IMAP4._create_socket = lambda self, timeout=MAIL_TIMEOUT: socket.create_connection((self.host or None, self.port), timeout)
|
||||
|
||||
class OdooIMAP4(IMAP4):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._unread_messages = None
|
||||
|
||||
def check_unread_messages(self):
|
||||
self.select()
|
||||
_result, data = self.search(None, '(UNSEEN)')
|
||||
self._unread_messages = data[0].split() if data and data[0] else []
|
||||
self._unread_messages.reverse()
|
||||
return len(self._unread_messages)
|
||||
|
||||
def retrieve_unread_messages(self):
|
||||
assert self._unread_messages is not None
|
||||
while self._unread_messages:
|
||||
num = self._unread_messages.pop()
|
||||
_result, data = self.fetch(num, '(RFC822)')
|
||||
self.store(num, '-FLAGS', '\\Seen')
|
||||
yield num, data[0][1]
|
||||
|
||||
def handled_message(self, num):
|
||||
self.store(num, '+FLAGS', '\\Seen')
|
||||
|
||||
def disconnect(self):
|
||||
if self._unread_messages is not None:
|
||||
self.close()
|
||||
self.logout()
|
||||
|
||||
|
||||
def make_wrap_property(name):
|
||||
return property(
|
||||
lambda self: getattr(self.__obj__, name),
|
||||
lambda self, value: setattr(self.__obj__, name, value),
|
||||
)
|
||||
class OdooIMAP4_SSL(OdooIMAP4, IMAP4_SSL):
|
||||
pass
|
||||
|
||||
|
||||
class IMAP4Connection:
|
||||
"""Wrapper around IMAP4 and IMAP4_SSL"""
|
||||
def __init__(self, server, port, is_ssl):
|
||||
self.__obj__ = IMAP4_SSL(server, port) if is_ssl else IMAP4(server, port)
|
||||
class OdooPOP3(POP3):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._unread_messages = None
|
||||
|
||||
def check_unread_messages(self):
|
||||
(num_messages, _total_size) = self.stat()
|
||||
self.list()
|
||||
self._unread_messages = list(range(num_messages, 0, -1))
|
||||
return num_messages
|
||||
|
||||
def retrieve_unread_messages(self):
|
||||
while self._unread_messages:
|
||||
num = self._unread_messages.pop()
|
||||
(_header, messages, _octets) = self.retr(num)
|
||||
message = (b'\n').join(messages)
|
||||
yield num, message
|
||||
|
||||
def handled_message(self, num):
|
||||
self.dele(num)
|
||||
|
||||
def disconnect(self):
|
||||
self.quit()
|
||||
|
||||
|
||||
class POP3Connection:
|
||||
"""Wrapper around POP3 and POP3_SSL"""
|
||||
def __init__(self, server, port, is_ssl, timeout=MAIL_TIMEOUT):
|
||||
self.__obj__ = POP3_SSL(server, port, timeout=timeout) if is_ssl else POP3(server, port, timeout=timeout)
|
||||
|
||||
|
||||
IMAP_COMMANDS = [cmd.lower() for cmd in imaplib.Commands]
|
||||
IMAP_ATTRIBUTES = ['examine', 'login_cram_md5', 'move', 'recent', 'response', 'shutdown', 'unselect'] + IMAP_COMMANDS
|
||||
POP3_ATTRIBUTES = [
|
||||
'apop', 'capa', 'close', 'dele', 'list', 'noop', 'pass_', 'quit', 'retr', 'rpop', 'rset', 'set_debuglevel', 'stat',
|
||||
'stls', 'top', 'uidl', 'user', 'utf8'
|
||||
]
|
||||
for name in IMAP_ATTRIBUTES:
|
||||
setattr(IMAP4Connection, name, make_wrap_property(name))
|
||||
|
||||
for name in POP3_ATTRIBUTES:
|
||||
setattr(POP3Connection, name, make_wrap_property(name))
|
||||
class OdooPOP3_SSL(OdooPOP3, POP3_SSL):
|
||||
pass
|
||||
|
||||
|
||||
class FetchmailServer(models.Model):
|
||||
"""Incoming POP/IMAP mail server account"""
|
||||
|
||||
_name = 'fetchmail.server'
|
||||
_description = 'Incoming Mail Server'
|
||||
_order = 'priority'
|
||||
_email_field = 'user'
|
||||
|
||||
name = fields.Char('Name', required=True)
|
||||
active = fields.Boolean('Active', default=True)
|
||||
|
|
@ -73,8 +99,8 @@ class FetchmailServer(models.Model):
|
|||
('draft', 'Not Confirmed'),
|
||||
('done', 'Confirmed'),
|
||||
], string='Status', index=True, readonly=True, copy=False, default='draft')
|
||||
server = fields.Char(string='Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft': [('readonly', False)]})
|
||||
port = fields.Integer(readonly=True, states={'draft': [('readonly', False)]})
|
||||
server = fields.Char(string='Server Name', readonly=False, help="Hostname or IP of the mail server")
|
||||
port = fields.Integer()
|
||||
server_type = fields.Selection([
|
||||
('imap', 'IMAP Server'),
|
||||
('pop', 'POP Server'),
|
||||
|
|
@ -87,13 +113,16 @@ class FetchmailServer(models.Model):
|
|||
original = fields.Boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference "
|
||||
"and attached to each processed message. This will usually double the size of your message database.")
|
||||
date = fields.Datetime(string='Last Fetch Date', readonly=True)
|
||||
user = fields.Char(string='Username', readonly=True, states={'draft': [('readonly', False)]})
|
||||
password = fields.Char(readonly=True, states={'draft': [('readonly', False)]})
|
||||
error_date = fields.Datetime(string='Last Error Date', readonly=True,
|
||||
help="Date of last failure, reset on success.")
|
||||
error_message = fields.Text(string='Last Error Message', readonly=True)
|
||||
user = fields.Char(string='Username', readonly=False)
|
||||
password = fields.Char()
|
||||
object_id = fields.Many2one('ir.model', string="Create a New Record", help="Process each incoming mail as part of a conversation "
|
||||
"corresponding to this document type. This will create "
|
||||
"new documents for new conversations, or attach follow-up "
|
||||
"emails to the existing conversations (documents).")
|
||||
priority = fields.Integer(string='Server Priority', readonly=True, states={'draft': [('readonly', False)]}, help="Defines the order of processing, lower values mean higher priority", default=5)
|
||||
priority = fields.Integer(string='Server Priority', readonly=False, help="Defines the order of processing, lower values mean higher priority", default=5)
|
||||
message_ids = fields.One2many('mail.mail', 'fetchmail_server_id', string='Messages', readonly=True)
|
||||
configuration = fields.Text('Configuration', readonly=True)
|
||||
script = fields.Char(readonly=True, default='/mail/static/scripts/odoo-mailgate.py')
|
||||
|
|
@ -129,17 +158,17 @@ odoo_mailgate: "|/path/to/odoo-mailgate.py --host=localhost -u %(uid)d -p PASSWO
|
|||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(FetchmailServer, self).create(vals_list)
|
||||
res = super().create(vals_list)
|
||||
self._update_cron()
|
||||
return res
|
||||
|
||||
def write(self, values):
|
||||
res = super(FetchmailServer, self).write(values)
|
||||
def write(self, vals):
|
||||
res = super().write(vals)
|
||||
self._update_cron()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
res = super(FetchmailServer, self).unlink()
|
||||
res = super().unlink()
|
||||
self._update_cron()
|
||||
return res
|
||||
|
||||
|
|
@ -147,7 +176,7 @@ odoo_mailgate: "|/path/to/odoo-mailgate.py --host=localhost -u %(uid)d -p PASSWO
|
|||
self.write({'state': 'draft'})
|
||||
return True
|
||||
|
||||
def connect(self, allow_archived=False):
|
||||
def _connect__(self, allow_archived=False): # noqa: PLW3201
|
||||
"""
|
||||
:param bool allow_archived: by default (False), an exception is raised when calling this method on an
|
||||
archived record. It can be set to True for testing so that the exception is no longer raised.
|
||||
|
|
@ -157,17 +186,19 @@ odoo_mailgate: "|/path/to/odoo-mailgate.py --host=localhost -u %(uid)d -p PASSWO
|
|||
raise UserError(_('The server "%s" cannot be used because it is archived.', self.display_name))
|
||||
connection_type = self._get_connection_type()
|
||||
if connection_type == 'imap':
|
||||
connection = IMAP4Connection(self.server, int(self.port), self.is_ssl)
|
||||
self._imap_login(connection)
|
||||
server, port, is_ssl = self.server, int(self.port), self.is_ssl
|
||||
connection = OdooIMAP4_SSL(server, port, timeout=MAIL_TIMEOUT) if is_ssl else OdooIMAP4(server, port, timeout=MAIL_TIMEOUT)
|
||||
self._imap_login__(connection)
|
||||
elif connection_type == 'pop':
|
||||
connection = POP3Connection(self.server, int(self.port), self.is_ssl)
|
||||
server, port, is_ssl = self.server, int(self.port), self.is_ssl
|
||||
connection = OdooPOP3_SSL(server, port, timeout=MAIL_TIMEOUT) if is_ssl else OdooPOP3(server, port, timeout=MAIL_TIMEOUT)
|
||||
#TODO: use this to remove only unread messages
|
||||
#connection.user("recent:"+server.user)
|
||||
connection.user(self.user)
|
||||
connection.pass_(self.password)
|
||||
return connection
|
||||
|
||||
def _imap_login(self, connection):
|
||||
def _imap_login__(self, connection): # noqa: PLW3201
|
||||
"""Authenticate the IMAP connection.
|
||||
|
||||
Can be overridden in other module for different authentication methods.
|
||||
|
|
@ -179,114 +210,125 @@ odoo_mailgate: "|/path/to/odoo-mailgate.py --host=localhost -u %(uid)d -p PASSWO
|
|||
|
||||
def button_confirm_login(self):
|
||||
for server in self:
|
||||
connection = False
|
||||
connection = None
|
||||
try:
|
||||
connection = server.connect(allow_archived=True)
|
||||
connection = server._connect__(allow_archived=True)
|
||||
server.write({'state': 'done'})
|
||||
except UnicodeError as e:
|
||||
raise UserError(_("Invalid server name !\n %s", tools.ustr(e)))
|
||||
raise UserError(_("Invalid server name!\n %s", tools.exception_to_unicode(e)))
|
||||
except (gaierror, timeout, IMAP4.abort) as e:
|
||||
raise UserError(_("No response received. Check server information.\n %s", tools.ustr(e)))
|
||||
raise UserError(_("No response received. Check server information.\n %s", tools.exception_to_unicode(e)))
|
||||
except (IMAP4.error, poplib.error_proto) as err:
|
||||
raise UserError(_("Server replied with following exception:\n %s", tools.ustr(err)))
|
||||
raise UserError(_("Server replied with following exception:\n %s", tools.exception_to_unicode(err)))
|
||||
except SSLError as e:
|
||||
raise UserError(_("An SSL exception occurred. Check SSL/TLS configuration on server port.\n %s", tools.ustr(e)))
|
||||
raise UserError(_("An SSL exception occurred. Check SSL/TLS configuration on server port.\n %s", tools.exception_to_unicode(e)))
|
||||
except (OSError, Exception) as err:
|
||||
_logger.info("Failed to connect to %s server %s.", server.server_type, server.name, exc_info=True)
|
||||
raise UserError(_("Connection test failed: %s", tools.ustr(err)))
|
||||
raise UserError(_("Connection test failed: %s", tools.exception_to_unicode(err)))
|
||||
finally:
|
||||
try:
|
||||
if connection:
|
||||
connection_type = server._get_connection_type()
|
||||
if connection_type == 'imap':
|
||||
connection.close()
|
||||
elif connection_type == 'pop':
|
||||
connection.quit()
|
||||
connection.disconnect()
|
||||
except Exception:
|
||||
# ignored, just a consequence of the previous exception
|
||||
pass
|
||||
return True
|
||||
|
||||
@api.model
|
||||
def _fetch_mails(self):
|
||||
""" Method called by cron to fetch mails from servers """
|
||||
return self.search([('state', '=', 'done'), ('server_type', '!=', 'local')]).fetch_mail()
|
||||
|
||||
def fetch_mail(self):
|
||||
""" WARNING: meant for cron usage only - will commit() after each email! """
|
||||
additionnal_context = {
|
||||
'fetchmail_cron_running': True
|
||||
}
|
||||
MailThread = self.env['mail.thread']
|
||||
for server in self:
|
||||
_logger.info('start checking for new emails on %s server %s', server.server_type, server.name)
|
||||
additionnal_context['default_fetchmail_server_id'] = server.id
|
||||
""" Action to fetch the mail from the current server. """
|
||||
self.ensure_one().check_access('write')
|
||||
exception = self.sudo()._fetch_mail()
|
||||
if exception is not None:
|
||||
raise exception
|
||||
|
||||
@api.model
|
||||
def _fetch_mails(self, **kw):
|
||||
""" Method called by cron to fetch mails from servers """
|
||||
assert self.env.context.get('cron_id') == self.env.ref('mail.ir_cron_mail_gateway_action').id, "Meant for cron usage only"
|
||||
self.search(MAIL_SERVER_DOMAIN)._fetch_mail(**kw)
|
||||
if not self.search_count(MAIL_SERVER_DOMAIN):
|
||||
# no server is active anymore
|
||||
self.env['ir.cron']._commit_progress(deactivate=True)
|
||||
|
||||
def _fetch_mail(self, batch_limit=50) -> Exception | None:
|
||||
""" Fetch e-mails from multiple servers.
|
||||
|
||||
Commit after each message.
|
||||
"""
|
||||
result_exception = None
|
||||
servers = self.with_context(fetchmail_cron_running=True)
|
||||
total_remaining = len(servers) # number of remaining messages + number of unchecked servers
|
||||
self.env['ir.cron']._commit_progress(remaining=total_remaining)
|
||||
|
||||
for server in servers:
|
||||
total_remaining -= 1 # the server is checked
|
||||
if not server.try_lock_for_update(allow_referencing=True).filtered_domain(MAIL_SERVER_DOMAIN):
|
||||
_logger.info('Skip checking for new mails on mail server id %d (unavailable)', server.id)
|
||||
continue
|
||||
server_type_and_name = server.server_type, server.name # avoid reading this after each commit
|
||||
_logger.info('Start checking for new emails on %s server %s', *server_type_and_name)
|
||||
count, failed = 0, 0
|
||||
imap_server = None
|
||||
pop_server = None
|
||||
connection_type = server._get_connection_type()
|
||||
if connection_type == 'imap':
|
||||
|
||||
# processing messages in a separate transaction to keep lock on the server
|
||||
server_connection = None
|
||||
message_cr = None
|
||||
try:
|
||||
server_connection = server._connect__()
|
||||
message_cr = self.env.registry.cursor()
|
||||
MailThread = server.env['mail.thread'].with_env(self.env(cr=message_cr)).with_context(default_fetchmail_server_id=server.id)
|
||||
thread_process_message = functools.partial(
|
||||
MailThread.message_process,
|
||||
model=server.object_id.model,
|
||||
save_original=server.original,
|
||||
strip_attachments=(not server.attach),
|
||||
)
|
||||
unread_message_count = server_connection.check_unread_messages()
|
||||
_logger.debug('%d unread messages on %s server %s.', unread_message_count, *server_type_and_name)
|
||||
total_remaining += unread_message_count
|
||||
for message_num, message in server_connection.retrieve_unread_messages():
|
||||
_logger.debug('Fetched message %r on %s server %s.', message_num, *server_type_and_name)
|
||||
count += 1
|
||||
total_remaining -= 1
|
||||
try:
|
||||
thread_process_message(message=message)
|
||||
remaining_time = MailThread.env['ir.cron']._commit_progress(1)
|
||||
except Exception: # noqa: BLE001
|
||||
MailThread.env.cr.rollback()
|
||||
failed += 1
|
||||
_logger.info('Failed to process mail from %s server %s.', *server_type_and_name, exc_info=True)
|
||||
remaining_time = MailThread.env['ir.cron']._commit_progress()
|
||||
server_connection.handled_message(message_num)
|
||||
if count >= batch_limit or not remaining_time:
|
||||
break
|
||||
server.error_date = False
|
||||
server.error_message = False
|
||||
except Exception as e: # noqa: BLE001
|
||||
result_exception = e
|
||||
_logger.info("General failure when trying to fetch mail from %s server %s.", *server_type_and_name, exc_info=True)
|
||||
if not server.error_date:
|
||||
server.error_date = fields.Datetime.now()
|
||||
server.error_message = exception_to_unicode(e)
|
||||
elif server.error_date < fields.Datetime.now() - MAIL_SERVER_DEACTIVATE_TIME:
|
||||
message = "Deactivating fetchmail %s server %s (too many failures)" % server_type_and_name
|
||||
server.set_draft()
|
||||
server.env['ir.cron']._notify_admin(message)
|
||||
finally:
|
||||
if message_cr is not None:
|
||||
message_cr.close()
|
||||
try:
|
||||
imap_server = server.connect()
|
||||
imap_server.select()
|
||||
result, data = imap_server.search(None, '(UNSEEN)')
|
||||
for num in data[0].split():
|
||||
res_id = None
|
||||
result, data = imap_server.fetch(num, '(RFC822)')
|
||||
imap_server.store(num, '-FLAGS', '\\Seen')
|
||||
try:
|
||||
res_id = MailThread.with_context(**additionnal_context).message_process(server.object_id.model, data[0][1], save_original=server.original, strip_attachments=(not server.attach))
|
||||
except Exception:
|
||||
_logger.info('Failed to process mail from %s server %s.', server.server_type, server.name, exc_info=True)
|
||||
failed += 1
|
||||
imap_server.store(num, '+FLAGS', '\\Seen')
|
||||
self._cr.commit()
|
||||
count += 1
|
||||
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.server_type, server.name, (count - failed), failed)
|
||||
except Exception:
|
||||
_logger.info("General failure when trying to fetch mail from %s server %s.", server.server_type, server.name, exc_info=True)
|
||||
finally:
|
||||
if imap_server:
|
||||
try:
|
||||
imap_server.close()
|
||||
imap_server.logout()
|
||||
except OSError:
|
||||
_logger.warning('Failed to properly finish imap connection: %s.', server.name, exc_info=True)
|
||||
elif connection_type == 'pop':
|
||||
try:
|
||||
while True:
|
||||
failed_in_loop = 0
|
||||
num = 0
|
||||
pop_server = server.connect()
|
||||
(num_messages, total_size) = pop_server.stat()
|
||||
pop_server.list()
|
||||
for num in range(1, min(MAX_POP_MESSAGES, num_messages) + 1):
|
||||
(header, messages, octets) = pop_server.retr(num)
|
||||
message = (b'\n').join(messages)
|
||||
res_id = None
|
||||
try:
|
||||
res_id = MailThread.with_context(**additionnal_context).message_process(server.object_id.model, message, save_original=server.original, strip_attachments=(not server.attach))
|
||||
pop_server.dele(num)
|
||||
except Exception:
|
||||
_logger.info('Failed to process mail from %s server %s.', server.server_type, server.name, exc_info=True)
|
||||
failed += 1
|
||||
failed_in_loop += 1
|
||||
self.env.cr.commit()
|
||||
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", num, server.server_type, server.name, (num - failed_in_loop), failed_in_loop)
|
||||
# Stop if (1) no more message left or (2) all messages have failed
|
||||
if num_messages < MAX_POP_MESSAGES or failed_in_loop == num:
|
||||
break
|
||||
pop_server.quit()
|
||||
except Exception:
|
||||
_logger.info("General failure when trying to fetch mail from %s server %s.", server.server_type, server.name, exc_info=True)
|
||||
finally:
|
||||
if pop_server:
|
||||
try:
|
||||
pop_server.quit()
|
||||
except OSError:
|
||||
_logger.warning('Failed to properly finish pop connection: %s.', server.name, exc_info=True)
|
||||
if server_connection:
|
||||
server_connection.disconnect()
|
||||
except (OSError, IMAP4.abort):
|
||||
_logger.warning('Failed to properly finish %s connection: %s.', *server_type_and_name, exc_info=True)
|
||||
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, *server_type_and_name, (count - failed), failed)
|
||||
server.write({'date': fields.Datetime.now()})
|
||||
return True
|
||||
# Commit before updating the progress because progress may be
|
||||
# updated for messages using another transaction. Without a commit
|
||||
# before updating the progress, we would have a serialization error.
|
||||
self.env.cr.commit()
|
||||
if not self.env['ir.cron']._commit_progress(remaining=total_remaining):
|
||||
break
|
||||
return result_exception
|
||||
|
||||
def _get_connection_type(self):
|
||||
"""Return which connection must be used for this mail server (IMAP or POP).
|
||||
|
|
@ -303,6 +345,6 @@ odoo_mailgate: "|/path/to/odoo-mailgate.py --host=localhost -u %(uid)d -p PASSWO
|
|||
try:
|
||||
# Enabled/Disable cron based on the number of 'done' server of type pop or imap
|
||||
cron = self.env.ref('mail.ir_cron_mail_gateway_action')
|
||||
cron.toggle(model=self._name, domain=[('state', '=', 'done'), ('server_type', '!=', 'local')])
|
||||
cron.toggle(model=self._name, domain=MAIL_SERVER_DOMAIN)
|
||||
except ValueError:
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
from odoo import fields, models
|
||||
|
||||
|
||||
class ActWindowView(models.Model):
|
||||
class IrActionsAct_WindowView(models.Model):
|
||||
_inherit = 'ir.actions.act_window.view'
|
||||
|
||||
view_mode = fields.Selection(selection_add=[
|
||||
|
|
|
|||
|
|
@ -4,29 +4,59 @@
|
|||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class ServerActions(models.Model):
|
||||
class IrActionsServer(models.Model):
|
||||
""" Add mail.thread related options in server actions. """
|
||||
_name = 'ir.actions.server'
|
||||
_description = 'Server Action'
|
||||
_inherit = ['ir.actions.server']
|
||||
_inherit = ['ir.actions.server', 'mail.thread', 'mail.activity.mixin']
|
||||
|
||||
name = fields.Char(tracking=True)
|
||||
model_id = fields.Many2one(tracking=True)
|
||||
crud_model_id = fields.Many2one(tracking=True)
|
||||
link_field_id = fields.Many2one(tracking=True)
|
||||
update_path = fields.Char(tracking=True)
|
||||
value = fields.Text(tracking=True)
|
||||
evaluation_type = fields.Selection(tracking=True)
|
||||
webhook_url = fields.Char(tracking=True)
|
||||
|
||||
state = fields.Selection(
|
||||
selection_add=[('mail_post', 'Send Email'),
|
||||
('followers', 'Add Followers'),
|
||||
('next_activity', 'Create Next Activity'),
|
||||
tracking=True,
|
||||
selection_add=[
|
||||
('next_activity', 'Create Activity'),
|
||||
('mail_post', 'Send Email'),
|
||||
('followers', 'Add Followers'),
|
||||
('remove_followers', 'Remove Followers'),
|
||||
('code',),
|
||||
],
|
||||
ondelete={'mail_post': 'cascade',
|
||||
'followers': 'cascade',
|
||||
'remove_followers': 'cascade',
|
||||
'next_activity': 'cascade',
|
||||
}
|
||||
)
|
||||
# Followers
|
||||
partner_ids = fields.Many2many(
|
||||
'res.partner', string='Add Followers',
|
||||
compute='_compute_partner_ids', readonly=False, store=True)
|
||||
followers_type = fields.Selection(
|
||||
selection=[
|
||||
('specific', 'Specific Followers'),
|
||||
('generic', 'Dynamic Followers'),
|
||||
],
|
||||
help="""
|
||||
- Specific Followers: select specific contacts to add/remove from record's followers.
|
||||
- Dynamic Followers: all contacts of the chosen record's field will be added/removed from followers.
|
||||
""",
|
||||
string='Followers Type',
|
||||
compute='_compute_followers_type',
|
||||
readonly=False, store=True
|
||||
)
|
||||
followers_partner_field_name = fields.Char(
|
||||
string='Followers Field',
|
||||
compute='_compute_followers_info',
|
||||
readonly=False, store=True
|
||||
)
|
||||
partner_ids = fields.Many2many('res.partner', compute='_compute_followers_info', readonly=False, store=True)
|
||||
|
||||
# Message Post / Email
|
||||
template_id = fields.Many2one(
|
||||
'mail.template', 'Email Template',
|
||||
|
|
@ -39,19 +69,19 @@ class ServerActions(models.Model):
|
|||
'Subscribe Recipients', compute='_compute_mail_post_autofollow',
|
||||
readonly=False, store=True)
|
||||
mail_post_method = fields.Selection(
|
||||
selection=[('email', 'Email'), ('comment', 'Post as Message'), ('note', 'Post as Note')],
|
||||
string='Send as',
|
||||
selection=[('email', 'Email'), ('comment', 'Message'), ('note', 'Note')],
|
||||
string='Send Email As',
|
||||
compute='_compute_mail_post_method',
|
||||
readonly=False, store=True,
|
||||
help='Choose method for email sending:\nEMail: send directly emails\nPost as Message: post on document and notify followers\nPost as Note: log a note on document')
|
||||
readonly=False, store=True)
|
||||
|
||||
# Next Activity
|
||||
activity_type_id = fields.Many2one(
|
||||
'mail.activity.type', string='Activity',
|
||||
'mail.activity.type', string='Activity Type',
|
||||
domain="['|', ('res_model', '=', False), ('res_model', '=', model_name)]",
|
||||
compute='_compute_activity_type_id', readonly=False, store=True,
|
||||
compute='_compute_activity_info', readonly=False, store=True,
|
||||
ondelete='restrict')
|
||||
activity_summary = fields.Char(
|
||||
'Summary',
|
||||
'Title',
|
||||
compute='_compute_activity_info', readonly=False, store=True)
|
||||
activity_note = fields.Html(
|
||||
'Note',
|
||||
|
|
@ -63,19 +93,42 @@ class ServerActions(models.Model):
|
|||
[('days', 'Days'),
|
||||
('weeks', 'Weeks'),
|
||||
('months', 'Months')],
|
||||
string='Due type', default='days',
|
||||
string='Due type',
|
||||
compute='_compute_activity_info', readonly=False, store=True)
|
||||
activity_user_type = fields.Selection(
|
||||
[('specific', 'Specific User'),
|
||||
('generic', 'Generic User From Record')],
|
||||
('generic', 'Dynamic User (based on record)')],
|
||||
string='User Type',
|
||||
compute='_compute_activity_info', readonly=False, store=True,
|
||||
help="Use 'Specific User' to always assign the same user on the next activity. Use 'Generic User From Record' to specify the field name of the user to choose on the record.")
|
||||
help="Use 'Specific User' to always assign the same user on the next activity. Use 'Dynamic User' to specify the field name of the user to choose on the record.")
|
||||
activity_user_id = fields.Many2one(
|
||||
'res.users', string='Responsible',
|
||||
compute='_compute_activity_info', readonly=False, store=True)
|
||||
compute='_compute_activity_user_info', readonly=False, store=True)
|
||||
activity_user_field_name = fields.Char(
|
||||
'User field name',
|
||||
compute='_compute_activity_info', readonly=False, store=True)
|
||||
'User Field',
|
||||
compute='_compute_activity_user_info', readonly=False, store=True)
|
||||
|
||||
def _name_depends(self):
|
||||
return [*super()._name_depends(), "template_id", "activity_type_id"]
|
||||
|
||||
def _generate_action_name(self):
|
||||
self.ensure_one()
|
||||
if self.state == 'mail_post' and self.template_id:
|
||||
return _('Send %(template_name)s', template_name=self.template_id.name)
|
||||
if self.state == 'next_activity' and self.activity_type_id:
|
||||
return _('Create %(activity_name)s', activity_name=self.activity_type_id.name)
|
||||
return super()._generate_action_name()
|
||||
|
||||
@api.depends('state')
|
||||
def _compute_available_model_ids(self):
|
||||
mail_thread_based = self.filtered(
|
||||
lambda action: action.state in {'mail_post', 'followers', 'remove_followers', 'next_activity'}
|
||||
)
|
||||
if mail_thread_based:
|
||||
mail_models = self.env['ir.model'].search([('is_mail_thread', '=', True), ('transient', '=', False)])
|
||||
for action in mail_thread_based:
|
||||
action.available_model_ids = mail_models.ids
|
||||
super(IrActionsServer, self - mail_thread_based)._compute_available_model_ids()
|
||||
|
||||
@api.depends('model_id', 'state')
|
||||
def _compute_template_id(self):
|
||||
|
|
@ -102,63 +155,144 @@ class ServerActions(models.Model):
|
|||
to_reset.mail_post_method = False
|
||||
other = self - to_reset
|
||||
if other:
|
||||
other.mail_post_method = 'email'
|
||||
|
||||
@api.depends('state')
|
||||
def _compute_partner_ids(self):
|
||||
to_reset = self.filtered(lambda act: act.state != 'followers')
|
||||
if to_reset:
|
||||
to_reset.partner_ids = False
|
||||
other.mail_post_method = 'comment'
|
||||
|
||||
@api.depends('model_id', 'state')
|
||||
def _compute_activity_type_id(self):
|
||||
to_reset = self.filtered(
|
||||
lambda act: act.state != 'next_activity' or \
|
||||
(act.model_id.model != act.activity_type_id.res_model)
|
||||
)
|
||||
def _compute_followers_type(self):
|
||||
to_reset = self.filtered(lambda act: not act.model_id or act.state not in ['followers', 'remove_followers'])
|
||||
to_reset.followers_type = False
|
||||
to_default = (self - to_reset).filtered(lambda act: not act.followers_type)
|
||||
to_default.followers_type = 'specific'
|
||||
|
||||
@api.depends('followers_type')
|
||||
def _compute_followers_info(self):
|
||||
for action in self:
|
||||
if action.followers_type == 'specific':
|
||||
action.followers_partner_field_name = False
|
||||
elif action.followers_type == 'generic':
|
||||
action.partner_ids = False
|
||||
IrModelFields = self.env['ir.model.fields']
|
||||
domain = [('model', '=', action.model_id.model), ("relation", "=", "res.partner")]
|
||||
action.followers_partner_field_name = (
|
||||
IrModelFields.search([*domain, ("name", "=", "partner_id")], limit=1)
|
||||
or IrModelFields.search(domain, limit=1)
|
||||
).name
|
||||
else:
|
||||
action.partner_ids = False
|
||||
action.followers_partner_field_name = False
|
||||
|
||||
@api.depends('model_id', 'state')
|
||||
def _compute_activity_info(self):
|
||||
to_reset = self.filtered(lambda act: not act.model_id or act.state != 'next_activity')
|
||||
if to_reset:
|
||||
to_reset.activity_type_id = False
|
||||
|
||||
@api.depends('state')
|
||||
def _compute_activity_info(self):
|
||||
to_reset = self.filtered(lambda act: act.state != 'next_activity')
|
||||
if to_reset:
|
||||
to_reset.activity_summary = False
|
||||
to_reset.activity_note = False
|
||||
to_reset.activity_date_deadline_range = False
|
||||
to_reset.activity_date_deadline_range_type = False
|
||||
to_reset.activity_user_type = False
|
||||
to_reset.activity_user_id = False
|
||||
to_reset.activity_user_field_name = False
|
||||
to_default = self.filtered(lambda act: act.state == 'next_activity')
|
||||
for activity in to_default:
|
||||
if not activity.activity_date_deadline_range_type:
|
||||
activity.activity_date_deadline_range_type = 'days'
|
||||
if not activity.activity_user_type:
|
||||
activity.activity_user_type = 'specific'
|
||||
if not activity.activity_user_field_name:
|
||||
activity.activity_user_field_name = 'user_id'
|
||||
for action in (self - to_reset):
|
||||
if action.activity_type_id.res_model and action.model_id.model != action.activity_type_id.res_model:
|
||||
action.activity_type_id = False
|
||||
if not action.activity_summary:
|
||||
action.activity_summary = action.activity_type_id.summary
|
||||
if not action.activity_date_deadline_range_type:
|
||||
action.activity_date_deadline_range_type = 'days'
|
||||
if not action.activity_user_type:
|
||||
action.activity_user_type = 'specific'
|
||||
|
||||
@api.constrains('activity_date_deadline_range')
|
||||
def _check_activity_date_deadline_range(self):
|
||||
if any(action.activity_date_deadline_range < 0 for action in self):
|
||||
raise ValidationError(_("The 'Due Date In' value can't be negative."))
|
||||
@api.depends('model_id', 'activity_user_type')
|
||||
def _compute_activity_user_info(self):
|
||||
to_compute = self.filtered("activity_user_type")
|
||||
(self - to_compute).activity_user_id = False
|
||||
(self - to_compute).activity_user_field_name = False
|
||||
for action in to_compute:
|
||||
if action.activity_user_type == 'specific':
|
||||
action.activity_user_field_name = False
|
||||
else:
|
||||
action.activity_user_id = False
|
||||
IrModelFields = self.env['ir.model.fields']
|
||||
domain = [('model', '=', action.model_id.model), ("relation", "=", "res.users")]
|
||||
action.activity_user_field_name = (
|
||||
IrModelFields.search([*domain, ("name", "=", "user_id")], limit=1)
|
||||
or IrModelFields.search(domain, limit=1)
|
||||
).name
|
||||
|
||||
@api.constrains('state', 'model_id')
|
||||
def _check_model_coherency(self):
|
||||
for action in self:
|
||||
if action.state in ('followers', 'next_activity') and action.model_id.transient:
|
||||
raise ValidationError(_("This action cannot be done on transient models."))
|
||||
if action.state == 'followers' and not action.model_id.is_mail_thread:
|
||||
raise ValidationError(_("Add Followers can only be done on a mail thread models"))
|
||||
if action.state == 'next_activity' and not action.model_id.is_mail_activity:
|
||||
raise ValidationError(_("A next activity can only be planned on models that use activities."))
|
||||
@api.model
|
||||
def _warning_depends(self):
|
||||
return super()._warning_depends() + [
|
||||
'activity_date_deadline_range',
|
||||
'model_id',
|
||||
'template_id',
|
||||
'state',
|
||||
'followers_type',
|
||||
'followers_partner_field_name',
|
||||
'activity_user_type',
|
||||
'activity_user_field_name',
|
||||
]
|
||||
|
||||
def _get_warning_messages(self):
|
||||
warnings = super()._get_warning_messages()
|
||||
|
||||
if self.activity_date_deadline_range < 0:
|
||||
warnings.append(_("The 'Due Date In' value can't be negative."))
|
||||
|
||||
if self.state == 'mail_post' and self.template_id and self.template_id.model_id != self.model_id:
|
||||
warnings.append(_("Mail template model of $(action_name)s does not match action model.", action_name=self.name))
|
||||
|
||||
if self.state in {'mail_post', 'followers', 'remove_followers', 'next_activity'} and self.model_id.transient:
|
||||
warnings.append(_("This action cannot be done on transient models."))
|
||||
|
||||
if (
|
||||
(self.state in {"followers", "remove_followers"}
|
||||
or (self.state == "mail_post" and self.mail_post_method != "email"))
|
||||
and not self.model_id.is_mail_thread
|
||||
):
|
||||
warnings.append(_("This action can only be done on a mail thread models"))
|
||||
|
||||
if self.state == 'next_activity' and not self.model_id.is_mail_activity:
|
||||
warnings.append(_("A next activity can only be planned on models that use activities."))
|
||||
|
||||
if self.state in ('followers', 'remove_followers') and self.followers_type == 'generic' and self.followers_partner_field_name:
|
||||
fields, field_chain_str = self._get_relation_chain("followers_partner_field_name")
|
||||
if fields and fields[-1].comodel_name != "res.partner":
|
||||
warnings.append(_(
|
||||
"The field '%(field_chain_str)s' is not a partner field.",
|
||||
field_chain_str=field_chain_str,
|
||||
))
|
||||
|
||||
if self.state == 'next_activity' and self.activity_user_type == 'generic' and self.activity_user_field_name:
|
||||
fields, field_chain_str = self._get_relation_chain("activity_user_field_name")
|
||||
if fields and fields[-1].comodel_name != "res.users":
|
||||
warnings.append(_(
|
||||
"The field '%(field_chain_str)s' is not a user field.",
|
||||
field_chain_str=field_chain_str,
|
||||
))
|
||||
|
||||
return warnings
|
||||
|
||||
def _run_action_followers_multi(self, eval_context=None):
|
||||
Model = self.env[self.model_name]
|
||||
if self.partner_ids and hasattr(Model, 'message_subscribe'):
|
||||
records = Model.browse(self._context.get('active_ids', self._context.get('active_id')))
|
||||
records.message_subscribe(partner_ids=self.partner_ids.ids)
|
||||
if hasattr(Model, 'message_subscribe'):
|
||||
records = Model.browse(self.env.context.get('active_ids', self.env.context.get('active_id')))
|
||||
if self.followers_type == 'specific':
|
||||
partner_ids = self.partner_ids
|
||||
else:
|
||||
followers_field = self.followers_partner_field_name
|
||||
partner_ids = records.mapped(followers_field)
|
||||
records.message_subscribe(partner_ids=partner_ids.ids)
|
||||
return False
|
||||
|
||||
def _run_action_remove_followers_multi(self, eval_context=None):
|
||||
Model = self.env[self.model_name]
|
||||
if hasattr(Model, 'message_unsubscribe'):
|
||||
records = Model.browse(self.env.context.get('active_ids', self.env.context.get('active_id')))
|
||||
if self.followers_type == 'specific':
|
||||
partner_ids = self.partner_ids
|
||||
else:
|
||||
followers_field = self.followers_partner_field_name
|
||||
partner_ids = records.mapped(followers_field)
|
||||
records.message_unsubscribe(partner_ids=partner_ids.ids)
|
||||
return False
|
||||
|
||||
def _is_recompute(self):
|
||||
|
|
@ -168,10 +302,10 @@ class ServerActions(models.Model):
|
|||
Except if the computed field is supposed to trigger the action
|
||||
"""
|
||||
records = self.env[self.model_name].browse(
|
||||
self._context.get('active_ids', self._context.get('active_id')))
|
||||
old_values = self._context.get('old_values')
|
||||
self.env.context.get('active_ids', self.env.context.get('active_id')))
|
||||
old_values = self.env.context.get('old_values')
|
||||
if old_values:
|
||||
domain_post = self._context.get('domain_post')
|
||||
domain_post = self.env.context.get('domain_post')
|
||||
tracked_fields = []
|
||||
if domain_post:
|
||||
for leaf in domain_post:
|
||||
|
|
@ -189,30 +323,30 @@ class ServerActions(models.Model):
|
|||
|
||||
def _run_action_mail_post_multi(self, eval_context=None):
|
||||
# TDE CLEANME: when going to new api with server action, remove action
|
||||
if not self.template_id or (not self._context.get('active_ids') and not self._context.get('active_id')) or self._is_recompute():
|
||||
if not self.template_id or (not self.env.context.get('active_ids') and not self.env.context.get('active_id')) or self._is_recompute():
|
||||
return False
|
||||
res_ids = self._context.get('active_ids', [self._context.get('active_id')])
|
||||
res_ids = self.env.context.get('active_ids', [self.env.context.get('active_id')])
|
||||
|
||||
# Clean context from default_type to avoid making attachment
|
||||
# with wrong values in subsequent operations
|
||||
cleaned_ctx = dict(self.env.context)
|
||||
cleaned_ctx.pop('default_type', None)
|
||||
cleaned_ctx.pop('default_parent_id', None)
|
||||
cleaned_ctx['mail_create_nosubscribe'] = True # do not subscribe random people to records
|
||||
cleaned_ctx['mail_post_autofollow_author_skip'] = True # do not subscribe random people to records
|
||||
cleaned_ctx['mail_post_autofollow'] = self.mail_post_autofollow
|
||||
|
||||
if self.mail_post_method in ('comment', 'note'):
|
||||
records = self.env[self.model_name].with_context(cleaned_ctx).browse(res_ids)
|
||||
message_type = 'auto_comment' if self.state == 'mail_post' else 'notification'
|
||||
if self.mail_post_method == 'comment':
|
||||
subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
|
||||
else:
|
||||
subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note')
|
||||
for record in records:
|
||||
record.message_post_with_template(
|
||||
self.template_id.id,
|
||||
composition_mode='comment',
|
||||
subtype_id=subtype_id,
|
||||
)
|
||||
records.message_post_with_source(
|
||||
self.template_id,
|
||||
message_type=message_type,
|
||||
subtype_id=subtype_id,
|
||||
)
|
||||
else:
|
||||
template = self.template_id.with_context(cleaned_ctx)
|
||||
for res_id in res_ids:
|
||||
|
|
@ -224,10 +358,10 @@ class ServerActions(models.Model):
|
|||
return False
|
||||
|
||||
def _run_action_next_activity(self, eval_context=None):
|
||||
if not self.activity_type_id or not self._context.get('active_id') or self._is_recompute():
|
||||
if not self.activity_type_id or not self.env.context.get('active_id') or self._is_recompute():
|
||||
return False
|
||||
|
||||
records = self.env[self.model_name].browse(self._context.get('active_ids', self._context.get('active_id')))
|
||||
records = self.env[self.model_name].browse(self.env.context.get('active_ids', self.env.context.get('active_id')))
|
||||
|
||||
vals = {
|
||||
'summary': self.activity_summary or '',
|
||||
|
|
@ -244,7 +378,9 @@ class ServerActions(models.Model):
|
|||
elif self.activity_user_type == 'generic' and self.activity_user_field_name in record:
|
||||
user = record[self.activity_user_field_name]
|
||||
if user:
|
||||
vals['user_id'] = user.id
|
||||
# if x2m field, assign to the first user found
|
||||
# (same behavior as Field.traverse_related)
|
||||
vals['user_id'] = user.ids[0]
|
||||
record.activity_schedule(**vals)
|
||||
return False
|
||||
|
||||
|
|
@ -255,8 +391,4 @@ class ServerActions(models.Model):
|
|||
key set to False in the context. This way all notification emails linked
|
||||
to the currently executed action will be set in the queue instead of
|
||||
sent directly. This will avoid possible break in transactions. """
|
||||
eval_context = super(ServerActions, self)._get_eval_context(action=action)
|
||||
ctx = dict(eval_context['env'].context)
|
||||
ctx['mail_notify_force_send'] = False
|
||||
eval_context['env'].context = ctx
|
||||
return eval_context
|
||||
return super(IrActionsServer, self.with_context(mail_notify_force_send=False))._get_eval_context(action=action)
|
||||
|
|
|
|||
|
|
@ -1,94 +1,121 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import _, models, SUPERUSER_ID
|
||||
from odoo.exceptions import AccessError, MissingError, UserError
|
||||
from odoo.tools import consteq
|
||||
import contextlib
|
||||
|
||||
from odoo import _, models, fields, api
|
||||
from odoo.exceptions import AccessError, UserError
|
||||
from odoo.tools.misc import limited_field_access_token, verify_limited_field_access_token
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class IrAttachment(models.Model):
|
||||
_inherit = 'ir.attachment'
|
||||
|
||||
def _check_attachments_access(self, attachment_tokens):
|
||||
"""This method relies on access rules/rights and therefore it should not be called from a sudo env."""
|
||||
self = self.sudo(False)
|
||||
thumbnail = fields.Image()
|
||||
has_thumbnail = fields.Boolean(compute="_compute_has_thumbnail")
|
||||
|
||||
@api.depends("thumbnail")
|
||||
def _compute_has_thumbnail(self):
|
||||
for attachment in self.with_context(bin_size=True):
|
||||
attachment.has_thumbnail = bool(attachment.thumbnail)
|
||||
|
||||
def _has_attachments_ownership(self, attachment_tokens):
|
||||
""" Checks if the current user has ownership of all attachments in the recordset.
|
||||
Ownership is defined as either:
|
||||
- Having 'write' access to the attachment.
|
||||
- Providing a valid, scoped 'attachment_ownership' access token.
|
||||
|
||||
:param list attachment_tokens: A list of access tokens
|
||||
"""
|
||||
attachment_tokens = attachment_tokens or ([None] * len(self))
|
||||
if len(attachment_tokens) != len(self):
|
||||
raise UserError(_("An access token must be provided for each attachment."))
|
||||
for attachment, access_token in zip(self, attachment_tokens):
|
||||
try:
|
||||
attachment_sudo = attachment.with_user(SUPERUSER_ID).exists()
|
||||
if not attachment_sudo:
|
||||
raise MissingError(_("The attachment %s does not exist.", attachment.id))
|
||||
try:
|
||||
attachment.check('write')
|
||||
except AccessError:
|
||||
if not access_token or not attachment_sudo.access_token or not consteq(attachment_sudo.access_token, access_token):
|
||||
message_sudo = self.env['mail.message'].sudo().search([('attachment_ids', 'in', attachment_sudo.ids)], limit=1)
|
||||
if not message_sudo or not message_sudo.is_current_user_or_guest_author:
|
||||
raise
|
||||
except (AccessError, MissingError):
|
||||
raise UserError(_("The attachment %s does not exist or you do not have the rights to access it.", attachment.id))
|
||||
|
||||
def _post_add_create(self):
|
||||
def is_owned(attachment, token):
|
||||
if not attachment.exists():
|
||||
return False
|
||||
if attachment.sudo(False).has_access("write"):
|
||||
return True
|
||||
return token and verify_limited_field_access_token(
|
||||
attachment, "id", token, scope="attachment_ownership"
|
||||
)
|
||||
|
||||
return all(is_owned(att, tok) for att, tok in zip(self, attachment_tokens, strict=True))
|
||||
|
||||
def _post_add_create(self, **kwargs):
|
||||
""" Overrides behaviour when the attachment is created through the controller
|
||||
"""
|
||||
super(IrAttachment, self)._post_add_create()
|
||||
for record in self:
|
||||
record.register_as_main_attachment(force=False)
|
||||
super()._post_add_create(**kwargs)
|
||||
self.register_as_main_attachment(force=False)
|
||||
|
||||
def register_as_main_attachment(self, force=True):
|
||||
""" Registers this attachment as the main one of the model it is
|
||||
attached to.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not self.res_model:
|
||||
return
|
||||
related_record = self.env[self.res_model].browse(self.res_id)
|
||||
if not related_record.check_access_rights('write', raise_exception=False):
|
||||
return
|
||||
# message_main_attachment_id field can be empty, that's why we compare to False;
|
||||
# we are just checking that it exists on the model before writing it
|
||||
if related_record and hasattr(related_record, 'message_main_attachment_id'):
|
||||
if force or not related_record.message_main_attachment_id:
|
||||
#Ignore AccessError, if you don't have access to modify the document
|
||||
#Just don't set the value
|
||||
try:
|
||||
related_record.message_main_attachment_id = self
|
||||
except AccessError:
|
||||
pass
|
||||
|
||||
def _delete_and_notify(self):
|
||||
:param bool force: if set, the method always updates the existing main attachment
|
||||
otherwise it only sets the main attachment if there is none.
|
||||
"""
|
||||
todo = self.filtered(lambda a: a.res_model and a.res_id)
|
||||
if not todo:
|
||||
return
|
||||
|
||||
for model, attachments in todo.grouped("res_model").items():
|
||||
related_records = self.env[model].browse(attachments.mapped("res_id"))
|
||||
if not hasattr(related_records, '_message_set_main_attachment_id'):
|
||||
return
|
||||
|
||||
# this action is generic; if user cannot update record do not crash
|
||||
# just skip update
|
||||
for related_record, attachment in zip(related_records, attachments):
|
||||
with contextlib.suppress(AccessError):
|
||||
related_record._message_set_main_attachment_id(attachment, force=force)
|
||||
|
||||
def _delete_and_notify(self, message=None):
|
||||
if message:
|
||||
# sudo: mail.message - safe write just updating the date, because guests don't have the rights
|
||||
message.sudo().write({}) # to make sure write_date on the message is updated
|
||||
for attachment in self:
|
||||
if attachment.res_model == 'mail.channel' and attachment.res_id:
|
||||
target = self.env['mail.channel'].browse(attachment.res_id)
|
||||
else:
|
||||
target = self.env.user.partner_id
|
||||
self.env['bus.bus']._sendone(target, 'ir.attachment/delete', {
|
||||
'id': attachment.id,
|
||||
})
|
||||
attachment._bus_send(
|
||||
"ir.attachment/delete",
|
||||
{
|
||||
"id": attachment.id,
|
||||
"message": (
|
||||
{"id": message.id, "write_date": message.write_date} if message else None
|
||||
),
|
||||
},
|
||||
)
|
||||
self.unlink()
|
||||
|
||||
def _attachment_format(self, legacy=False):
|
||||
res_list = []
|
||||
for attachment in self:
|
||||
res = {
|
||||
'checksum': attachment.checksum,
|
||||
'id': attachment.id,
|
||||
'filename': attachment.name,
|
||||
'name': attachment.name,
|
||||
'mimetype': attachment.mimetype,
|
||||
'type': attachment.type,
|
||||
'url': attachment.url,
|
||||
}
|
||||
if not legacy:
|
||||
res['originThread'] = [('insert', {
|
||||
'id': attachment.res_id,
|
||||
'model': attachment.res_model,
|
||||
})]
|
||||
else:
|
||||
res.update({
|
||||
'res_id': attachment.res_id,
|
||||
'res_model': attachment.res_model,
|
||||
})
|
||||
res_list.append(res)
|
||||
return res_list
|
||||
def _get_store_ownership_fields(self):
|
||||
return [Store.Attr("ownership_token", lambda a: a._get_ownership_token())]
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
"checksum",
|
||||
"create_date",
|
||||
"file_size",
|
||||
"has_thumbnail",
|
||||
"mimetype",
|
||||
"name",
|
||||
Store.Attr("raw_access_token", lambda a: a._get_raw_access_token()),
|
||||
"res_name",
|
||||
"res_model",
|
||||
Store.One("thread", [], as_thread=True),
|
||||
Store.Attr("thumbnail_access_token", lambda a: a._get_thumbnail_token()),
|
||||
"type",
|
||||
"url",
|
||||
]
|
||||
|
||||
def _get_ownership_token(self):
|
||||
""" Returns a scoped limited access token that indicates ownership of the attachment when
|
||||
using _has_attachments_ownership. If verified by verify_limited_field_access_token,
|
||||
accessing the attachment bypasses the ACLs.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
self.ensure_one()
|
||||
return limited_field_access_token(self, field_name="id", scope="attachment_ownership")
|
||||
|
||||
def _get_thumbnail_token(self):
|
||||
self.ensure_one()
|
||||
return limited_field_access_token(self, "thumbnail", scope="binary")
|
||||
|
|
|
|||
0
odoo-bringout-oca-ocb-mail/mail/models/ir_binary.py
Normal file
0
odoo-bringout-oca-ocb-mail/mail/models/ir_binary.py
Normal file
|
|
@ -4,22 +4,86 @@
|
|||
from odoo import api, models
|
||||
|
||||
|
||||
class IrConfigParameter(models.Model):
|
||||
class IrConfig_Parameter(models.Model):
|
||||
# Override of config parameter to specifically handle the template
|
||||
# rendering group (de)activation through ICP.
|
||||
|
||||
# While being there, let us document quickly mail ICP.
|
||||
|
||||
# Emailing
|
||||
# * 'mail.mail.queue.batch.size': used in MailMail.process_email_queue()
|
||||
# to limit maximum number of mail.mail managed by each cron call to VALUE.
|
||||
# 1000 by default;
|
||||
# * 'mail.session.batch.size': used in MailMail._split_by_mail_configuration()
|
||||
# to prepare batches of maximum VALUE mails to give at '_send()' at each
|
||||
# iteration. For each iteration an SMTP server is opened and closed. It
|
||||
# prepares data for 'send' in conjunction with auto_commit=True in order
|
||||
# to avoid repeating batches in case of failure). 1000 by default;
|
||||
# * 'mail.mail.force.send.limit': used in
|
||||
# - MailThread._notify_thread_by_email(): notification emails flow
|
||||
# - MailComposer._action_send_mail_mass_mail(): mail composer in mass mail mode
|
||||
# to force the cron queue usage and avoid sending too much emails in a given
|
||||
# transaction. When 0 is set flows based on it are always using the email
|
||||
# queue, no direct send is performed. Default value is 100;
|
||||
# * 'mail.batch_size': used in
|
||||
# - MailComposer._action_send_mail_mass_mail(): mails generation based on records
|
||||
# - MailThread._notify_thread_by_email(): mails generation for notification emails
|
||||
# - MailTemplate.send_mail_batch(): mails generation done directly from templates
|
||||
# to split mail generation in batches;
|
||||
# - EventMail._execute_attendee_based() and EventMail._execute_event_based():
|
||||
# mails (+ sms, whatsapp) generation for each attendee of en event;
|
||||
# 50 by default;
|
||||
# * 'mail.render.cron.limit': used in cron involving rendering of content
|
||||
# and/or templates, like event mail scheduler cron. Defaults to 1000;
|
||||
# * 'mail.server.personal.limit.minutes': used when sending email using
|
||||
# personal mail servers, maximum number of emails that can be sent in
|
||||
# one minute
|
||||
|
||||
# Mail Gateway
|
||||
# * 'mail.gateway.loop.minutes' and 'mail.gateway.loop.threshold': block
|
||||
# emails with same email_from if gateway received more than THRESHOLD
|
||||
# in MINUTES. This is used to break loops e.g. when email servers bounce
|
||||
# each other. 20 emails / 120 minutes by default;
|
||||
# * 'mail.default.from_filter': default from_filter used when there is
|
||||
# no specific outgoing mail server used to send emails;
|
||||
# * 'mail.catchall.domain.allowed': optional list of email domains that
|
||||
# restricts right-part of aliases when used in pre-17 compatibility
|
||||
# mode (see MailAlias.alias_incoming_local);
|
||||
|
||||
# Activities
|
||||
# * 'mail.activity.gc.delete_overdue_years': if set, activities outdated
|
||||
# for more than VALUE years are gc. 0 (skipped) by default;
|
||||
# * 'mail.activity.systray.limit': number of activities fetched by the
|
||||
# systray, to avoid performance issues notably with technical users that
|
||||
# rarely connect. 1000 by default;
|
||||
|
||||
# Groups
|
||||
# * 'mail.restrict.template.rendering': ICP used in config settings to
|
||||
# add or remove 'mail.group_mail_template_editor' group to internal
|
||||
# users i.e. restrict or not QWeb rendering and edition by default.
|
||||
# Not activated by default;
|
||||
|
||||
# Discuss
|
||||
# * 'mail.link_preview_throttle': avoid storing link previews for discuss
|
||||
# if more than VALUE existing link previews are stored for the given
|
||||
# domain in the last 10 seconds. 99 by default;
|
||||
# * 'mail.chat_from_token': allow chat from token;
|
||||
|
||||
# Configuration keys
|
||||
# * 'mail.google_translate_api_key': key used to fetch translations using
|
||||
# google translate;
|
||||
# * 'mail.web_push_vapid_private_key' and 'mail.web_push_vapid_public_key':
|
||||
# configuration parameters when using web push notifications;
|
||||
# * 'mail.use_twilio_rtc_servers', 'mail.use_sfu_server', 'mail.sfu_server_url' and 'mail.
|
||||
# sfu_server_key': rtc server usage and configuration;
|
||||
# * 'discuss.tenor_api_key': used for gif fetch service;
|
||||
# * 'mail.server.outlook.iap.endpoint': URL of the IAP endpoint
|
||||
# for outlook oauth server
|
||||
# * 'mail.server.gmail.iap.endpoint': URL of the IAP endpoint
|
||||
# for gmail oauth server
|
||||
|
||||
_inherit = 'ir.config_parameter'
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if vals.get('key') in ['mail.bounce.alias', 'mail.catchall.alias']:
|
||||
vals['value'] = self.env['mail.alias']._clean_and_check_unique([vals.get('value')])[0]
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
for parameter in self:
|
||||
if 'value' in vals and parameter.key in ['mail.bounce.alias', 'mail.catchall.alias'] and vals['value'] != parameter.value:
|
||||
vals['value'] = self.env['mail.alias']._clean_and_check_unique([vals.get('value')])[0]
|
||||
return super().write(vals)
|
||||
|
||||
@api.model
|
||||
def set_param(self, key, value):
|
||||
if key == 'mail.restrict.template.rendering':
|
||||
|
|
@ -27,7 +91,7 @@ class IrConfigParameter(models.Model):
|
|||
group_mail_template_editor = self.env.ref('mail.group_mail_template_editor')
|
||||
|
||||
if not value and group_mail_template_editor not in group_user.implied_ids:
|
||||
group_user.implied_ids |= group_mail_template_editor
|
||||
group_user._apply_group(group_mail_template_editor)
|
||||
|
||||
elif value and group_mail_template_editor in group_user.implied_ids:
|
||||
# remove existing users, including inactive template user
|
||||
|
|
@ -35,6 +99,6 @@ class IrConfigParameter(models.Model):
|
|||
group_user._remove_group(group_mail_template_editor)
|
||||
# sanitize and normalize allowed catchall domains
|
||||
elif key == 'mail.catchall.domain.allowed' and value:
|
||||
value = self.env['mail.alias']._clean_and_check_mail_catchall_allowed_domains(value)
|
||||
value = self.env['mail.alias']._sanitize_allowed_domains(value)
|
||||
|
||||
return super(IrConfigParameter, self).set_param(key, value)
|
||||
return super().set_param(key, value)
|
||||
|
|
|
|||
20
odoo-bringout-oca-ocb-mail/mail/models/ir_cron.py
Normal file
20
odoo-bringout-oca-ocb-mail/mail/models/ir_cron.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models, SUPERUSER_ID
|
||||
|
||||
|
||||
class IrCron(models.AbstractModel):
|
||||
_name = 'ir.cron'
|
||||
_inherit = ['ir.cron', 'mail.thread', 'mail.activity.mixin']
|
||||
|
||||
user_id = fields.Many2one(tracking=True)
|
||||
interval_number = fields.Integer(tracking=True)
|
||||
interval_type = fields.Selection(tracking=True)
|
||||
priority = fields.Integer(tracking=True)
|
||||
|
||||
def _notify_admin(self, message):
|
||||
""" Send a notification to the admin users. """
|
||||
channel_admin = self.env.ref("mail.channel_admin", raise_if_not_found=False)
|
||||
if channel_admin:
|
||||
channel_admin.with_user(SUPERUSER_ID).message_post(body=message)
|
||||
super()._notify_admin(message)
|
||||
|
|
@ -1,27 +1,28 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import odoo
|
||||
from odoo import models
|
||||
from odoo.http import request
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class IrHttp(models.AbstractModel):
|
||||
_inherit = 'ir.http'
|
||||
|
||||
def session_info(self):
|
||||
user = self.env.user
|
||||
result = super(IrHttp, self).session_info()
|
||||
if self.env.user._is_internal():
|
||||
result['notification_type'] = user.notification_type
|
||||
"""Override to add the current user data (partner or guest) if applicable."""
|
||||
result = super().session_info()
|
||||
store = Store()
|
||||
ResUsers = self.env["res.users"]
|
||||
if cids := request.cookies.get("cids", False):
|
||||
allowed_company_ids = []
|
||||
for company_id in [int(cid) for cid in cids.split("-")]:
|
||||
if company_id in self.env.user.company_ids.ids:
|
||||
allowed_company_ids.append(company_id)
|
||||
ResUsers = self.with_context(allowed_company_ids=allowed_company_ids).env["res.users"]
|
||||
ResUsers._init_store_data(store)
|
||||
result["storeData"] = store.get_result()
|
||||
guest = self.env['mail.guest']._get_guest_from_context()
|
||||
if not request.session.uid and guest:
|
||||
user_context = {'lang': guest.lang}
|
||||
mods = odoo.conf.server_wide_modules or []
|
||||
lang = user_context.get("lang")
|
||||
translation_hash = self.env['ir.http'].sudo().get_web_translations_hash(mods, lang)
|
||||
result['cache_hashes']['translations'] = translation_hash
|
||||
result.update({
|
||||
'name': guest.name,
|
||||
'user_context': user_context,
|
||||
})
|
||||
result["user_context"] = user_context
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import _, fields, models
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import email_normalize
|
||||
|
||||
|
||||
class IrMailServer(models.Model):
|
||||
_name = 'ir.mail_server'
|
||||
_inherit = ['ir.mail_server']
|
||||
class IrMail_Server(models.Model):
|
||||
_inherit = 'ir.mail_server'
|
||||
_email_field = 'smtp_user'
|
||||
|
||||
mail_template_ids = fields.One2many(
|
||||
comodel_name='mail.template',
|
||||
|
|
@ -14,10 +17,90 @@ class IrMailServer(models.Model):
|
|||
string='Mail template using this mail server',
|
||||
readonly=True)
|
||||
|
||||
owner_user_id = fields.Many2one('res.users', 'Owner')
|
||||
|
||||
# Store the current time, and the number of emails we sent
|
||||
# Each minute, the time and the count will be reset
|
||||
# Used to throttle the number of emails we send for the personal
|
||||
# mail servers.
|
||||
owner_limit_time = fields.Datetime('Owner Limit Time')
|
||||
owner_limit_count = fields.Integer('Owner Limit Count')
|
||||
|
||||
_unique_owner_user_id = models.Constraint(
|
||||
"UNIQUE(owner_user_id)",
|
||||
"owner_user_id must be unique",
|
||||
)
|
||||
|
||||
def _active_usages_compute(self):
|
||||
usages_super = super()._active_usages_compute()
|
||||
for record in self.filtered('mail_template_ids'):
|
||||
usages_super.setdefault(record.id, []).extend(
|
||||
map(lambda t: _('%s (Email Template)', t.display_name), record.mail_template_ids)
|
||||
self.env._('%s (Email Template)', t.display_name)
|
||||
for t in record.mail_template_ids
|
||||
)
|
||||
return usages_super
|
||||
|
||||
@api.model
|
||||
def _get_default_bounce_address(self):
|
||||
""" Compute the default bounce address. Try to use mail-defined config
|
||||
parameter bounce alias if set. """
|
||||
if self.env.company.bounce_email:
|
||||
return self.env.company.bounce_email
|
||||
return super()._get_default_bounce_address()
|
||||
|
||||
@api.model
|
||||
def _get_default_from_address(self):
|
||||
""" Default from: try to use default_from defined on company's alias
|
||||
domain. """
|
||||
if default_from := self.env.company.default_from_email:
|
||||
return default_from
|
||||
return super()._get_default_from_address()
|
||||
|
||||
def _get_test_email_from(self):
|
||||
self.ensure_one()
|
||||
if from_filter_parts := [part.strip() for part in (self.from_filter or '').split(",") if part.strip()]:
|
||||
# find first found complete email in filter parts
|
||||
if mail_from := next((email for email in from_filter_parts if "@" in email), None):
|
||||
return mail_from
|
||||
# the mail server is configured for a domain that matches the default email address
|
||||
alias_domains = self.env['mail.alias.domain'].sudo().search([])
|
||||
matching = next(
|
||||
(alias_domain for alias_domain in alias_domains
|
||||
if self._match_from_filter(alias_domain.default_from_email, self.from_filter)
|
||||
), False
|
||||
)
|
||||
if matching:
|
||||
return matching.default_from_email
|
||||
# fake default_from "odoo@domain"
|
||||
return f"odoo@{from_filter_parts[0]}"
|
||||
# no from_filter or from_filter is configured for a domain different that
|
||||
# the default_from of company's alias_domain -> fallback
|
||||
return super()._get_test_email_from()
|
||||
|
||||
@api.model
|
||||
def _filter_mail_servers_fallback(self, servers):
|
||||
return servers.filtered(lambda s: not s.owner_user_id)
|
||||
|
||||
def _find_mail_server_allowed_domain(self):
|
||||
"""Restrict search to 'public' servers."""
|
||||
domain = super()._find_mail_server_allowed_domain()
|
||||
domain &= Domain('owner_user_id', '=', False)
|
||||
return domain
|
||||
|
||||
def _check_forced_mail_server(self, mail_server, allow_archived, smtp_from):
|
||||
super()._check_forced_mail_server(mail_server, allow_archived, smtp_from)
|
||||
|
||||
if mail_server.owner_user_id:
|
||||
if email_normalize(smtp_from) != mail_server.from_filter:
|
||||
raise UserError(_('The server "%s" cannot be forced as it belongs to a user.', mail_server.display_name))
|
||||
if not mail_server.active:
|
||||
raise UserError(_('The server "%s" cannot be forced as it belongs to a user and is archived.', mail_server.display_name))
|
||||
if mail_server.owner_user_id.outgoing_mail_server_id != mail_server:
|
||||
raise UserError(_('The server "%s" cannot be forced as the owner does not use it anymore.', mail_server.display_name))
|
||||
|
||||
def _get_personal_mail_servers_limit(self):
|
||||
"""Return the number of email we can send in 1 minutes for this outgoing server.
|
||||
|
||||
0 fallbacks to 30 to avoid blocking servers.
|
||||
"""
|
||||
return int(self.env['ir.config_parameter'].sudo().get_param('mail.server.personal.limit.minutes')) or 30
|
||||
|
|
|
|||
|
|
@ -23,6 +23,10 @@ class IrModel(models.Model):
|
|||
""" Delete mail data (followers, messages, activities) associated with
|
||||
the models being deleted.
|
||||
"""
|
||||
if not self:
|
||||
return True
|
||||
|
||||
# Delete followers, messages and attachments for models that will be unlinked.
|
||||
mail_models = self.search([
|
||||
('model', 'in', ('mail.activity', 'mail.activity.type', 'mail.followers', 'mail.message'))
|
||||
], order='id')
|
||||
|
|
@ -78,10 +82,11 @@ class IrModel(models.Model):
|
|||
res = super(IrModel, self).write(vals)
|
||||
self.env.flush_all()
|
||||
# setup models; this reloads custom models in registry
|
||||
self.pool.setup_models(self._cr)
|
||||
model_names = self.mapped('model')
|
||||
self.pool._setup_models__(self.env.cr, model_names)
|
||||
# update database schema of models
|
||||
models = self.pool.descendants(self.mapped('model'), '_inherits')
|
||||
self.pool.init_models(self._cr, models, dict(self._context, update_custom_fields=True))
|
||||
model_names = self.pool.descendants(model_names, '_inherits')
|
||||
self.pool.init_models(self.env.cr, model_names, dict(self.env.context, update_custom_fields=True))
|
||||
else:
|
||||
res = super(IrModel, self).write(vals)
|
||||
return res
|
||||
|
|
@ -94,30 +99,44 @@ class IrModel(models.Model):
|
|||
return vals
|
||||
|
||||
@api.model
|
||||
def _instanciate(self, model_data):
|
||||
model_class = super(IrModel, self)._instanciate(model_data)
|
||||
if model_data.get('is_mail_blacklist') and model_class._name != 'mail.thread.blacklist':
|
||||
parents = model_class._inherit or []
|
||||
def _instanciate_attrs(self, model_data):
|
||||
attrs = super()._instanciate_attrs(model_data)
|
||||
if model_data.get('is_mail_blacklist') and attrs['_name'] != 'mail.thread.blacklist':
|
||||
parents = attrs.get('_inherit') or []
|
||||
parents = [parents] if isinstance(parents, str) else parents
|
||||
model_class._inherit = parents + ['mail.thread.blacklist']
|
||||
if model_class._custom:
|
||||
model_class._primary_email = 'x_email'
|
||||
elif model_data.get('is_mail_thread') and model_class._name != 'mail.thread':
|
||||
parents = model_class._inherit or []
|
||||
attrs['_inherit'] = parents + ['mail.thread.blacklist']
|
||||
if attrs['_custom']:
|
||||
attrs['_primary_email'] = 'x_email'
|
||||
elif model_data.get('is_mail_thread') and attrs['_name'] != 'mail.thread':
|
||||
parents = attrs.get('_inherit') or []
|
||||
parents = [parents] if isinstance(parents, str) else parents
|
||||
model_class._inherit = parents + ['mail.thread']
|
||||
if model_data.get('is_mail_activity') and model_class._name != 'mail.activity.mixin':
|
||||
parents = model_class._inherit or []
|
||||
attrs['_inherit'] = parents + ['mail.thread']
|
||||
if model_data.get('is_mail_activity') and attrs['_name'] != 'mail.activity.mixin':
|
||||
parents = attrs.get('_inherit') or []
|
||||
parents = [parents] if isinstance(parents, str) else parents
|
||||
model_class._inherit = parents + ['mail.activity.mixin']
|
||||
return model_class
|
||||
attrs['_inherit'] = parents + ['mail.activity.mixin']
|
||||
return attrs
|
||||
|
||||
def _get_model_definitions(self, model_names_to_fetch):
|
||||
fields_by_model_names = super()._get_model_definitions(model_names_to_fetch)
|
||||
for model_name, field_by_fname in fields_by_model_names.items():
|
||||
def _get_definitions(self, model_names):
|
||||
model_definitions = super()._get_definitions(model_names)
|
||||
for model_name, model_definition in model_definitions.items():
|
||||
model = self.env[model_name]
|
||||
tracked_field_names = model._track_get_fields() if 'mail.thread' in model._inherit else []
|
||||
for fname, field in field_by_fname.items():
|
||||
for fname in tracked_field_names:
|
||||
if fname in model_definition["fields"]:
|
||||
model_definition["fields"][fname]["tracking"] = True
|
||||
if isinstance(self.env[model_name], self.env.registry['mail.activity.mixin']):
|
||||
model_definition["has_activities"] = True
|
||||
return model_definitions
|
||||
|
||||
def _get_model_definitions(self, model_names_to_fetch):
|
||||
model_definitions = super()._get_model_definitions(model_names_to_fetch)
|
||||
for model_name, model_definition in model_definitions.items():
|
||||
model = self.env[model_name]
|
||||
tracked_field_names = model._track_get_fields() if 'mail.thread' in model._inherit else []
|
||||
for fname, field in model_definition["fields"].items():
|
||||
if fname in tracked_field_names:
|
||||
field['tracking'] = True
|
||||
return fields_by_model_names
|
||||
if isinstance(self.env[model_name], self.env.registry['mail.activity.mixin']):
|
||||
model_definition["has_activities"] = True
|
||||
return model_definitions
|
||||
|
|
|
|||
|
|
@ -2,21 +2,22 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.tools import groupby
|
||||
|
||||
|
||||
class IrModelField(models.Model):
|
||||
class IrModelFields(models.Model):
|
||||
_inherit = 'ir.model.fields'
|
||||
|
||||
tracking = fields.Integer(
|
||||
string="Enable Ordered Tracking",
|
||||
help="If set every modification done to this field is tracked in the chatter. Value is used to order tracking values.",
|
||||
help="If set every modification done to this field is tracked. Value is used to order tracking values.",
|
||||
)
|
||||
|
||||
def _reflect_field_params(self, field, model_id):
|
||||
""" Tracking value can be either a boolean enabling tracking mechanism
|
||||
on field, either an integer giving the sequence. Default sequence is
|
||||
set to 100. """
|
||||
vals = super(IrModelField, self)._reflect_field_params(field, model_id)
|
||||
vals = super()._reflect_field_params(field, model_id)
|
||||
tracking = getattr(field, 'tracking', None)
|
||||
if tracking is True:
|
||||
tracking = 100
|
||||
|
|
@ -26,7 +27,33 @@ class IrModelField(models.Model):
|
|||
return vals
|
||||
|
||||
def _instanciate_attrs(self, field_data):
|
||||
attrs = super(IrModelField, self)._instanciate_attrs(field_data)
|
||||
attrs = super()._instanciate_attrs(field_data)
|
||||
if attrs and field_data.get('tracking'):
|
||||
attrs['tracking'] = field_data['tracking']
|
||||
return attrs
|
||||
|
||||
def unlink(self):
|
||||
""" When unlinking fields populate tracking value table with relevant
|
||||
information. That way if a field is removed (custom tracked, migration
|
||||
or any other reason) we keep the tracking and its relevant information.
|
||||
Do it only when unlinking fields so that we don't duplicate field
|
||||
information for most tracking. """
|
||||
tracked = self.filtered('tracking')
|
||||
if tracked:
|
||||
tracking_values = self.env['mail.tracking.value'].search(
|
||||
[('field_id', 'in', tracked.ids)]
|
||||
)
|
||||
field_to_trackings = groupby(tracking_values, lambda track: track.field_id)
|
||||
for field, trackings in field_to_trackings:
|
||||
if field.model_id.model not in self.env:
|
||||
# Model is already deleted
|
||||
continue
|
||||
self.env['mail.tracking.value'].concat(*trackings).write({
|
||||
'field_info': {
|
||||
'desc': field.field_description,
|
||||
'name': field.name,
|
||||
'sequence': self.env[field.model_id.model]._mail_track_get_field_sequence(field.name),
|
||||
'type': field.ttype,
|
||||
}
|
||||
})
|
||||
return super().unlink()
|
||||
|
|
|
|||
|
|
@ -1,16 +1,55 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class IrQweb(models.AbstractModel):
|
||||
""" Add ``raise_on_code`` option for qweb. When this option is activated
|
||||
then all directives are prohibited.
|
||||
"""Add ``raise_on_forbidden_code_for_model`` option for qweb.
|
||||
|
||||
When this option is activated, only a whitelist of expressions
|
||||
is allowed for the given model.
|
||||
"""
|
||||
_inherit = 'ir.qweb'
|
||||
|
||||
_inherit = "ir.qweb"
|
||||
|
||||
allowed_directives = (
|
||||
"out",
|
||||
"inner-content",
|
||||
"att",
|
||||
"tag-open",
|
||||
"tag-close",
|
||||
)
|
||||
|
||||
def _get_template_cache_keys(self):
|
||||
return super()._get_template_cache_keys() + ['raise_on_code']
|
||||
return super()._get_template_cache_keys() + ["raise_on_forbidden_code_for_model"]
|
||||
|
||||
def _compile_directives(self, el, compile_context, indent):
|
||||
if compile_context.get('raise_on_code'):
|
||||
raise PermissionError("This rendering mode prohibits the use of directives.")
|
||||
return super()._compile_directives(el, compile_context, indent)
|
||||
def _compile_directive(self, el, compile_context, directive, level):
|
||||
if (
|
||||
"raise_on_forbidden_code_for_model" in compile_context
|
||||
and directive not in self.allowed_directives
|
||||
):
|
||||
raise PermissionError("This directive is not allowed for this rendering mode.")
|
||||
return super()._compile_directive(el, compile_context, directive, level)
|
||||
|
||||
def _compile_directive_att(self, el, compile_context, level):
|
||||
if "raise_on_forbidden_code_for_model" in compile_context:
|
||||
if set(el.attrib) - {"t-out", "t-tag-open", "t-tag-close", "t-inner-content"}:
|
||||
raise PermissionError("This directive is not allowed for this rendering mode.")
|
||||
return super()._compile_directive_att(el, compile_context, level)
|
||||
|
||||
def _compile_expr(self, expr, raise_on_missing=False):
|
||||
model = self.env.context.get("raise_on_forbidden_code_for_model")
|
||||
if model is not None and not self._is_expression_allowed(expr, model):
|
||||
raise PermissionError("This directive is not allowed for this rendering mode.")
|
||||
return super()._compile_expr(expr, raise_on_missing)
|
||||
|
||||
def _compile_directive_out(self, el, compile_context, level):
|
||||
if "raise_on_forbidden_code_for_model" in compile_context:
|
||||
if len(el) != 0:
|
||||
raise PermissionError("No child allowed for t-out.")
|
||||
if set(el.attrib) - {'t-out', 't-tag-open', 't-tag-close'}:
|
||||
raise PermissionError("No other attribute allowed for t-out.")
|
||||
return super()._compile_directive_out(el, compile_context, level)
|
||||
|
||||
def _is_expression_allowed(self, expression, model):
|
||||
return model and expression.strip() in self.env[model].mail_allowed_qweb_expressions()
|
||||
|
|
|
|||
63
odoo-bringout-oca-ocb-mail/mail/models/ir_ui_menu.py
Normal file
63
odoo-bringout-oca-ocb-mail/mail/models/ir_ui_menu.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import contextlib
|
||||
|
||||
from odoo import api, models
|
||||
from odoo.exceptions import AccessError
|
||||
|
||||
|
||||
class IrUiMenu(models.Model):
|
||||
_inherit = 'ir.ui.menu'
|
||||
|
||||
@api.model
|
||||
def _get_best_backend_root_menu_id_for_model(self, res_model):
|
||||
"""Get the best menu root id for the given res_model and the access
|
||||
rights of the user.
|
||||
|
||||
When a link to a model was sent to a user it was targeting a page without
|
||||
menu, so it was hard for the user to act on it.
|
||||
The goal of this method is to find the best suited menu to display on a
|
||||
page of a given model.
|
||||
|
||||
Technically, the method tries to find a menu root which has a sub menu
|
||||
visible to the user that has an action linked to the given model.
|
||||
If there is more than one possibility, it chooses the preferred one based
|
||||
on the following preference function that determine the sub-menu from which
|
||||
the root menu is extracted:
|
||||
- favor the sub-menu linked to an action having a path as it probably indicates
|
||||
a "major" action
|
||||
- then favor the sub-menu with the smallest menu id as it probably indicates
|
||||
that it belongs to the main module of the model and not a sub-one.
|
||||
|
||||
:param str res_model: the model name for which we want to find the best
|
||||
menu root id
|
||||
:return: the best menu root id or None if not found
|
||||
:rtype: int
|
||||
"""
|
||||
with contextlib.suppress(AccessError): # if no access to the menu, return None
|
||||
visible_menu_ids = self._visible_menu_ids()
|
||||
# Try first to get a menu root from the model implementation (take the less specialized i.e. the first one)
|
||||
menu_root_candidates = self.env[res_model]._get_backend_root_menu_ids()
|
||||
menu_root_id = next((m_id for m_id in menu_root_candidates if m_id in visible_menu_ids), None)
|
||||
if menu_root_id:
|
||||
return menu_root_id
|
||||
|
||||
# No menu root could be found by interrogating the model so fall back to a simple heuristic
|
||||
# Prefetch menu fields and all menu's actions of type act_window
|
||||
menus = self.env['ir.ui.menu'].browse(visible_menu_ids)
|
||||
self.env['ir.actions.act_window'].sudo().browse([
|
||||
int(menu['action'].split(',')[1])
|
||||
for menu in menus.read(['action', 'parent_path'])
|
||||
if menu['action'] and menu['action'].startswith('ir.actions.act_window,')
|
||||
]).filtered('res_model')
|
||||
|
||||
def _menu_sort_key(menu_action):
|
||||
menu, action = menu_action
|
||||
return 1 if action.path else 0, -menu.id
|
||||
|
||||
menu_sudo = max((
|
||||
(menu, action) for menu in menus.sudo() for action in (menu.action,)
|
||||
if action and action.type == 'ir.actions.act_window' and action.res_model == res_model
|
||||
and all(int(menu_id) in visible_menu_ids for menu_id in menu.parent_path.split('/') if menu_id)
|
||||
), key=_menu_sort_key, default=(None, None))[0]
|
||||
return int(menu_sudo.parent_path[:menu_sudo.parent_path.index('/')]) if menu_sudo else None
|
||||
|
|
@ -2,15 +2,13 @@
|
|||
from odoo import fields, models
|
||||
|
||||
|
||||
class View(models.Model):
|
||||
class IrUiView(models.Model):
|
||||
_inherit = 'ir.ui.view'
|
||||
|
||||
type = fields.Selection(selection_add=[('activity', 'Activity')])
|
||||
|
||||
def _postprocess_tag_field(self, node, name_manager, node_info):
|
||||
if node.xpath("ancestor::div[hasclass('oe_chatter')]"):
|
||||
# Pass the postprocessing of the mail thread fields
|
||||
# The web client makes it completely custom, and this is therefore pointless.
|
||||
name_manager.has_field(node, node.get('name'), {})
|
||||
return
|
||||
return super()._postprocess_tag_field(node, name_manager, node_info)
|
||||
def _is_qweb_based_view(self, view_type):
|
||||
return view_type == "activity" or super()._is_qweb_based_view(view_type)
|
||||
|
||||
def _get_view_info(self):
|
||||
return {'activity': {'icon': 'fa fa-clock-o'}} | super()._get_view_info()
|
||||
|
|
|
|||
|
|
@ -1,49 +1,119 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from odoo import models
|
||||
from odoo.http import request
|
||||
from odoo.addons.bus.websocket import wsrequest
|
||||
from odoo.fields import Domain
|
||||
from odoo.addons.mail.tools.discuss import add_guest_to_context
|
||||
from odoo.tools.misc import verify_limited_field_access_token
|
||||
|
||||
PRESENCE_CHANNEL_PREFIX = "odoo-presence-"
|
||||
PRESENCE_CHANNEL_REGEX = re.compile(
|
||||
rf"{PRESENCE_CHANNEL_PREFIX}"
|
||||
r"(?P<model>res\.partner|mail\.guest)_(?P<record_id>\d+)"
|
||||
r"(?:-(?P<token>[a-f0-9]{64}o0x[a-f0-9]+))?$"
|
||||
)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrWebsocket(models.AbstractModel):
|
||||
_inherit = 'ir.websocket'
|
||||
"""Override to handle mail specific features (presence in particular)."""
|
||||
|
||||
def _get_im_status(self, data):
|
||||
im_status = super()._get_im_status(data)
|
||||
if 'mail.guest' in data:
|
||||
im_status['guests'] = self.env['mail.guest'].sudo().with_context(active_test=False).search_read(
|
||||
[('id', 'in', data['mail.guest'])],
|
||||
['im_status']
|
||||
_inherit = "ir.websocket"
|
||||
|
||||
def _serve_ir_websocket(self, event_name, data):
|
||||
"""Override to process update_presence."""
|
||||
super()._serve_ir_websocket(event_name, data)
|
||||
if event_name == "update_presence":
|
||||
self._update_mail_presence(**data)
|
||||
|
||||
@add_guest_to_context
|
||||
def _subscribe(self, og_data):
|
||||
super()._subscribe(og_data)
|
||||
|
||||
@add_guest_to_context
|
||||
def _update_mail_presence(self, inactivity_period):
|
||||
partner, guest = self.env["res.partner"]._get_current_persona()
|
||||
if not partner and not guest:
|
||||
return
|
||||
self.env["mail.presence"]._try_update_presence(
|
||||
self.env.user if partner else guest, inactivity_period
|
||||
)
|
||||
|
||||
def _prepare_subscribe_data(self, channels, last):
|
||||
data = super()._prepare_subscribe_data(channels, last)
|
||||
model_ids_to_token = defaultdict(dict)
|
||||
for channel in channels:
|
||||
if not isinstance(channel, str) or not channel.startswith(PRESENCE_CHANNEL_PREFIX):
|
||||
continue
|
||||
data["channels"].discard(channel)
|
||||
if not (match := re.match(PRESENCE_CHANNEL_REGEX, channel)):
|
||||
_logger.warning("Malformed presence channel: %s", channel)
|
||||
continue
|
||||
model, record_id, token = match.groups()
|
||||
model_ids_to_token[model][int(record_id)] = token or ""
|
||||
# sudo - res.partner, mail.guest: can access presence targets to decide whether
|
||||
# the current user is allowed to read it or not.
|
||||
partner_ids = model_ids_to_token["res.partner"].keys()
|
||||
partners = (
|
||||
self.env["res.partner"]
|
||||
.with_context(active_test=False)
|
||||
.sudo()
|
||||
.search([("id", "in", partner_ids)])
|
||||
.sudo(False)
|
||||
)
|
||||
partner, guest = self.env["res.partner"]._get_current_persona()
|
||||
allowed_partners = (
|
||||
partners.filtered(
|
||||
lambda p: verify_limited_field_access_token(
|
||||
p, "im_status", model_ids_to_token["res.partner"][p.id], scope="mail.presence"
|
||||
)
|
||||
or p.has_access("read")
|
||||
)
|
||||
return im_status
|
||||
| partner
|
||||
)
|
||||
guest_ids = model_ids_to_token["mail.guest"].keys()
|
||||
guests = self.env["mail.guest"].sudo().search([("id", "in", guest_ids)]).sudo(False)
|
||||
allowed_guests = (
|
||||
guests.filtered(
|
||||
lambda g: verify_limited_field_access_token(
|
||||
g, "im_status", model_ids_to_token["mail.guest"][g.id], scope="mail.presence"
|
||||
)
|
||||
or g.has_access("read")
|
||||
)
|
||||
| guest
|
||||
)
|
||||
data["channels"].update((partner, "presence") for partner in allowed_partners)
|
||||
data["channels"].update((guest, "presence") for guest in allowed_guests)
|
||||
# There is a gap between a subscription client side (which is debounced)
|
||||
# and the actual subcription thus presences can be missed. Send a
|
||||
# notification to avoid missing presences during a subscription.
|
||||
presence_domain = Domain("last_poll", ">", datetime.now() - timedelta(seconds=2)) & (
|
||||
Domain(
|
||||
"user_id",
|
||||
"in",
|
||||
allowed_partners.with_context(active_test=False).sudo().user_ids.ids,
|
||||
)
|
||||
| Domain("guest_id", "in", allowed_guests.ids)
|
||||
)
|
||||
# sudo: mail.presence: access to presence was validated with access token.
|
||||
data["missed_presences"] = self.env["mail.presence"].sudo().search(presence_domain)
|
||||
return data
|
||||
|
||||
def _build_bus_channel_list(self, channels):
|
||||
# This method can either be called due to an http or a
|
||||
# websocket request. The request itself is necessary to
|
||||
# retrieve the current guest. Let's retrieve the proper
|
||||
# request.
|
||||
req = request or wsrequest
|
||||
channels = list(channels) # do not alter original list
|
||||
guest_sudo = self.env['mail.guest']._get_guest_from_request(req).sudo()
|
||||
mail_channels = self.env['mail.channel']
|
||||
if req.session.uid:
|
||||
partner = self.env.user.partner_id
|
||||
mail_channels = partner.channel_ids
|
||||
channels.append(partner)
|
||||
elif guest_sudo:
|
||||
mail_channels = guest_sudo.channel_ids
|
||||
channels.append(guest_sudo)
|
||||
for mail_channel in mail_channels:
|
||||
channels.append(mail_channel)
|
||||
return super()._build_bus_channel_list(channels)
|
||||
def _after_subscribe_data(self, data):
|
||||
current_partner, current_guest = self.env["res.partner"]._get_current_persona()
|
||||
if current_partner or current_guest:
|
||||
data["missed_presences"]._send_presence(bus_target=current_partner or current_guest)
|
||||
|
||||
def _update_bus_presence(self, inactivity_period, im_status_ids_by_model):
|
||||
super()._update_bus_presence(inactivity_period, im_status_ids_by_model)
|
||||
if not self.env.user or self.env.user._is_public():
|
||||
# This method can either be called due to an http or a
|
||||
# websocket request. The request itself is necessary to
|
||||
# retrieve the current guest. Let's retrieve the proper
|
||||
# request.
|
||||
req = request or wsrequest
|
||||
guest_sudo = self.env['mail.guest']._get_guest_from_request(req).sudo()
|
||||
if not guest_sudo:
|
||||
return
|
||||
guest_sudo.env['bus.presence'].update(inactivity_period, identity_field='guest_id', identity_value=guest_sudo.id)
|
||||
def _on_websocket_closed(self, cookies):
|
||||
super()._on_websocket_closed(cookies)
|
||||
if self.env.user and not self.env.user._is_public():
|
||||
# sudo: mail.presence - user can update their own presence
|
||||
self.env.user.sudo().presence_ids.status = "offline"
|
||||
token = cookies.get(self.env["mail.guest"]._cookie_name, "")
|
||||
if guest := self.env["mail.guest"]._get_guest_from_token(token):
|
||||
# sudo: mail.presence - guest can update their own presence
|
||||
guest.sudo().presence_ids.status = "offline"
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from datetime import datetime
|
||||
|
|
@ -7,7 +6,8 @@ import logging
|
|||
import pytz
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.osv import expression
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import partition, SQL
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -22,7 +22,6 @@ class MailActivityMixin(models.AbstractModel):
|
|||
Activities come with a new JS widget for the form view. It is integrated in the
|
||||
Chatter widget although it is a separate widget. It displays activities linked
|
||||
to the current record and allow to schedule, edit and mark done activities.
|
||||
Just include field activity_ids in the div.oe-chatter to use it.
|
||||
|
||||
There is also a kanban widget defined. It defines a small widget to integrate
|
||||
in kanban vignettes. It allow to manage activities directly from the kanban
|
||||
|
|
@ -49,7 +48,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
|
||||
activity_ids = fields.One2many(
|
||||
'mail.activity', 'res_id', 'Activities',
|
||||
auto_join=True,
|
||||
bypass_search_access=True,
|
||||
groups="base.group_user",)
|
||||
activity_state = fields.Selection([
|
||||
('overdue', 'Overdue'),
|
||||
|
|
@ -62,7 +61,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
'Today: Activity date is today\nPlanned: Future activities.')
|
||||
activity_user_id = fields.Many2one(
|
||||
'res.users', 'Responsible User',
|
||||
related='activity_ids.user_id', readonly=False,
|
||||
compute='_compute_activity_user_id', readonly=True,
|
||||
search='_search_activity_user_id',
|
||||
groups="base.group_user")
|
||||
activity_type_id = fields.Many2one(
|
||||
|
|
@ -111,6 +110,11 @@ class MailActivityMixin(models.AbstractModel):
|
|||
record.activity_exception_decoration = exception_activity_type_id and exception_activity_type_id.decoration_type
|
||||
record.activity_exception_icon = exception_activity_type_id and exception_activity_type_id.icon
|
||||
|
||||
@api.depends('activity_ids.user_id')
|
||||
def _compute_activity_user_id(self):
|
||||
for record in self:
|
||||
record.activity_user_id = record.activity_ids[0].user_id if record.activity_ids else False
|
||||
|
||||
def _search_activity_exception_decoration(self, operator, operand):
|
||||
return [('activity_ids.activity_type_id.decoration_type', operator, operand)]
|
||||
|
||||
|
|
@ -129,14 +133,12 @@ class MailActivityMixin(models.AbstractModel):
|
|||
|
||||
def _search_activity_state(self, operator, value):
|
||||
all_states = {'overdue', 'today', 'planned', False}
|
||||
if operator == '=':
|
||||
search_states = {value}
|
||||
elif operator == '!=':
|
||||
search_states = all_states - {value}
|
||||
elif operator == 'in':
|
||||
if operator == 'in':
|
||||
search_states = set(value)
|
||||
elif operator == 'not in':
|
||||
search_states = all_states - set(value)
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
reverse_search = False
|
||||
if False in search_states:
|
||||
|
|
@ -157,61 +159,77 @@ class MailActivityMixin(models.AbstractModel):
|
|||
|
||||
search_states_int = {integer_state_value.get(s or False) for s in search_states}
|
||||
|
||||
query = """
|
||||
SELECT res_id
|
||||
FROM (
|
||||
SELECT res_id,
|
||||
-- Global activity state
|
||||
MIN(
|
||||
-- Compute the state of each individual activities
|
||||
-- -1: overdue
|
||||
-- 0: today
|
||||
-- 1: planned
|
||||
SIGN(EXTRACT(day from (
|
||||
mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE res_partner.tz)
|
||||
)))
|
||||
)::INT AS activity_state
|
||||
FROM mail_activity
|
||||
LEFT JOIN res_users
|
||||
ON res_users.id = mail_activity.user_id
|
||||
LEFT JOIN res_partner
|
||||
ON res_partner.id = res_users.partner_id
|
||||
WHERE mail_activity.res_model = %(res_model_table)s
|
||||
GROUP BY res_id
|
||||
) AS res_record
|
||||
WHERE %(search_states_int)s @> ARRAY[activity_state]
|
||||
"""
|
||||
|
||||
self._cr.execute(
|
||||
query,
|
||||
{
|
||||
'today_utc': pytz.utc.localize(datetime.utcnow()),
|
||||
'res_model_table': self._name,
|
||||
'search_states_int': list(search_states_int)
|
||||
},
|
||||
self.env['mail.activity'].flush_model(['active', 'date_deadline', 'res_model', 'user_id', 'user_tz'])
|
||||
query = SQL(
|
||||
"""(
|
||||
SELECT res_id
|
||||
FROM (
|
||||
SELECT res_id,
|
||||
-- Global activity state
|
||||
MIN(
|
||||
-- Compute the state of each individual activities
|
||||
-- -1: overdue
|
||||
-- 0: today
|
||||
-- 1: planned
|
||||
SIGN(EXTRACT(day from (
|
||||
mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE COALESCE(mail_activity.user_tz, 'utc'))
|
||||
)))
|
||||
)::INT AS activity_state
|
||||
FROM mail_activity
|
||||
WHERE mail_activity.res_model = %(res_model_table)s AND mail_activity.active = true
|
||||
GROUP BY res_id
|
||||
) AS res_record
|
||||
WHERE %(search_states_int)s @> ARRAY[activity_state]
|
||||
)""",
|
||||
today_utc=pytz.utc.localize(datetime.utcnow()),
|
||||
res_model_table=self._name,
|
||||
search_states_int=list(search_states_int)
|
||||
)
|
||||
return [('id', 'not in' if reverse_search else 'in', [r[0] for r in self._cr.fetchall()])]
|
||||
|
||||
return [('id', 'not in' if reverse_search else 'in', query)]
|
||||
|
||||
@api.depends('activity_ids.date_deadline')
|
||||
def _compute_activity_date_deadline(self):
|
||||
for record in self:
|
||||
record.activity_date_deadline = fields.first(record.activity_ids).date_deadline
|
||||
activities = record.activity_ids
|
||||
record.activity_date_deadline = next(iter(activities), activities).date_deadline
|
||||
|
||||
def _search_activity_date_deadline(self, operator, operand):
|
||||
if operator == '=' and not operand:
|
||||
return [('activity_ids', '=', False)]
|
||||
return [('activity_ids.date_deadline', operator, operand)]
|
||||
if operator == 'in' and False in operand:
|
||||
return Domain('activity_ids', '=', False) | Domain(self._search_activity_date_deadline('in', operand - {False}))
|
||||
if operator in Domain.NEGATIVE_OPERATORS:
|
||||
return NotImplemented
|
||||
return Domain('activity_ids.date_deadline', operator, operand)
|
||||
|
||||
@api.model
|
||||
def _search_activity_user_id(self, operator, operand):
|
||||
return [('activity_ids.user_id', operator, operand)]
|
||||
# field supports comparison with any boolean
|
||||
domain = Domain.FALSE
|
||||
if operator in Domain.NEGATIVE_OPERATORS:
|
||||
return NotImplemented
|
||||
if operator == 'in':
|
||||
bools, values = partition(lambda v: isinstance(v, bool), operand)
|
||||
if bools:
|
||||
if True in bools:
|
||||
domain |= Domain('activity_ids', '!=', False)
|
||||
if False in bools:
|
||||
domain |= Domain('activity_ids', '=', False)
|
||||
if not values:
|
||||
return domain
|
||||
operand = values
|
||||
# basic case
|
||||
return domain | Domain('activity_ids', 'any', [('active', 'in', [True, False]), ('user_id', operator, operand)])
|
||||
|
||||
@api.model
|
||||
def _search_activity_type_id(self, operator, operand):
|
||||
if operator in Domain.NEGATIVE_OPERATORS:
|
||||
return NotImplemented
|
||||
return [('activity_ids.activity_type_id', operator, operand)]
|
||||
|
||||
@api.model
|
||||
def _search_activity_summary(self, operator, operand):
|
||||
if operator in Domain.NEGATIVE_OPERATORS:
|
||||
return NotImplemented
|
||||
return [('activity_ids.summary', operator, operand)]
|
||||
|
||||
@api.depends('activity_ids.date_deadline', 'activity_ids.user_id')
|
||||
|
|
@ -225,116 +243,71 @@ class MailActivityMixin(models.AbstractModel):
|
|||
), False)
|
||||
|
||||
def _search_my_activity_date_deadline(self, operator, operand):
|
||||
activity_ids = self.env['mail.activity']._search([
|
||||
if operator in Domain.NEGATIVE_OPERATORS:
|
||||
return NotImplemented
|
||||
return [('activity_ids', 'any', [
|
||||
('active', '=', True), # never overdue if "done"
|
||||
('date_deadline', operator, operand),
|
||||
('res_model', '=', self._name),
|
||||
('user_id', '=', self.env.user.id)
|
||||
])
|
||||
return [('activity_ids', 'in', activity_ids)]
|
||||
])]
|
||||
|
||||
def write(self, vals):
|
||||
# Delete activities of archived record.
|
||||
if 'active' in vals and vals['active'] is False:
|
||||
self.env['mail.activity'].sudo().search(
|
||||
[('res_model', '=', self._name), ('res_id', 'in', self.ids)]
|
||||
).unlink()
|
||||
return super(MailActivityMixin, self).write(vals)
|
||||
def _read_group_groupby(self, alias, groupby_spec, query):
|
||||
if groupby_spec != 'activity_state':
|
||||
return super()._read_group_groupby(alias, groupby_spec, query)
|
||||
self._check_field_access(self._fields['activity_state'], 'read')
|
||||
|
||||
def unlink(self):
|
||||
""" Override unlink to delete records activities through (res_model, res_id). """
|
||||
record_ids = self.ids
|
||||
result = super(MailActivityMixin, self).unlink()
|
||||
self.env['mail.activity'].sudo().search(
|
||||
[('res_model', '=', self._name), ('res_id', 'in', record_ids)]
|
||||
).unlink()
|
||||
return result
|
||||
# if already grouped by activity_state, do not add the join again
|
||||
alias = query.make_alias(self._table, 'last_activity_state')
|
||||
if alias in query._joins:
|
||||
return SQL.identifier(alias, 'activity_state')
|
||||
|
||||
def _read_progress_bar(self, domain, group_by, progress_bar):
|
||||
group_by_fname = group_by.partition(':')[0]
|
||||
if not (progress_bar['field'] == 'activity_state' and self._fields[group_by_fname].store):
|
||||
return super()._read_progress_bar(domain, group_by, progress_bar)
|
||||
|
||||
# optimization for 'activity_state'
|
||||
|
||||
# explicitly check access rights, since we bypass the ORM
|
||||
self.check_access_rights('read')
|
||||
self._flush_search(domain, fields=[group_by_fname], order='id')
|
||||
self.env['mail.activity'].flush_model(['res_model', 'res_id', 'user_id', 'date_deadline'])
|
||||
self.env['res.users'].flush_model(['partner_id'])
|
||||
self.env['res.partner'].flush_model(['tz'])
|
||||
|
||||
query = self._where_calc(domain)
|
||||
self._apply_ir_rules(query, 'read')
|
||||
gb = group_by.partition(':')[0]
|
||||
annotated_groupbys = [
|
||||
self._read_group_process_groupby(gb, query)
|
||||
for gb in [group_by, 'activity_state']
|
||||
]
|
||||
groupby_dict = {gb['groupby']: gb for gb in annotated_groupbys}
|
||||
for gb in annotated_groupbys:
|
||||
if gb['field'] == 'activity_state':
|
||||
gb['qualified_field'] = '"_last_activity_state"."activity_state"'
|
||||
groupby_terms, _orderby_terms = self._read_group_prepare('activity_state', [], annotated_groupbys, query)
|
||||
select_terms = [
|
||||
'%s as "%s"' % (gb['qualified_field'], gb['groupby'])
|
||||
for gb in annotated_groupbys
|
||||
]
|
||||
from_clause, where_clause, where_params = query.get_sql()
|
||||
tz = self._context.get('tz') or self.env.user.tz or 'UTC'
|
||||
select_query = """
|
||||
SELECT 1 AS id, count(*) AS "__count", {fields}
|
||||
FROM {from_clause}
|
||||
JOIN (
|
||||
SELECT res_id,
|
||||
tz = 'UTC'
|
||||
if self.env.context.get('tz') in pytz.all_timezones_set:
|
||||
tz = self.env.context['tz']
|
||||
|
||||
sql_join = SQL(
|
||||
"""
|
||||
(SELECT res_id,
|
||||
CASE
|
||||
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) > 0 THEN 'planned'
|
||||
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) < 0 THEN 'overdue'
|
||||
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) = 0 THEN 'today'
|
||||
WHEN min(EXTRACT(day from (mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE COALESCE(mail_activity.user_tz, %(tz)s))))) > 0 THEN 'planned'
|
||||
WHEN min(EXTRACT(day from (mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE COALESCE(mail_activity.user_tz, %(tz)s))))) < 0 THEN 'overdue'
|
||||
WHEN min(EXTRACT(day from (mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE COALESCE(mail_activity.user_tz, %(tz)s))))) = 0 THEN 'today'
|
||||
ELSE null
|
||||
END AS activity_state
|
||||
FROM mail_activity
|
||||
JOIN res_users ON (res_users.id = mail_activity.user_id)
|
||||
JOIN res_partner ON (res_partner.id = res_users.partner_id)
|
||||
WHERE res_model = '{model}'
|
||||
GROUP BY res_id
|
||||
) AS "_last_activity_state" ON ("{table}".id = "_last_activity_state".res_id)
|
||||
WHERE {where_clause}
|
||||
GROUP BY {group_by}
|
||||
""".format(
|
||||
fields=', '.join(select_terms),
|
||||
from_clause=from_clause,
|
||||
model=self._name,
|
||||
table=self._table,
|
||||
where_clause=where_clause or '1=1',
|
||||
group_by=', '.join(groupby_terms),
|
||||
FROM mail_activity
|
||||
WHERE res_model = %(res_model)s AND mail_activity.active = true
|
||||
GROUP BY res_id)
|
||||
""",
|
||||
res_model=self._name,
|
||||
today_utc=pytz.utc.localize(datetime.utcnow()),
|
||||
tz=tz,
|
||||
)
|
||||
num_from_params = from_clause.count('%s')
|
||||
where_params[num_from_params:num_from_params] = [tz] * 3 # timezone after from parameters
|
||||
self.env.cr.execute(select_query, where_params)
|
||||
fetched_data = self.env.cr.dictfetchall()
|
||||
self._read_group_resolve_many2x_fields(fetched_data, annotated_groupbys)
|
||||
data = [
|
||||
{key: self._read_group_prepare_data(key, val, groupby_dict)
|
||||
for key, val in row.items()}
|
||||
for row in fetched_data
|
||||
]
|
||||
return [
|
||||
self._read_group_format_result(vals, annotated_groupbys, [group_by], domain)
|
||||
for vals in data
|
||||
]
|
||||
alias = query.left_join(self._table, "id", sql_join, "res_id", "last_activity_state")
|
||||
|
||||
def toggle_active(self):
|
||||
""" Before archiving the record we should also remove its ongoing
|
||||
activities. Otherwise they stay in the systray and concerning archived
|
||||
records it makes no sense. """
|
||||
record_to_deactivate = self.filtered(lambda rec: rec[rec._active_name])
|
||||
if record_to_deactivate:
|
||||
# use a sudo to bypass every access rights; all activities should be removed
|
||||
self.env['mail.activity'].sudo().search([
|
||||
('res_model', '=', self._name),
|
||||
('res_id', 'in', record_to_deactivate.ids)
|
||||
]).unlink()
|
||||
return super(MailActivityMixin, self).toggle_active()
|
||||
return SQL.identifier(alias, 'activity_state')
|
||||
|
||||
# Reschedules next my activity to Today
|
||||
def action_reschedule_my_next_today(self):
|
||||
self.ensure_one()
|
||||
my_next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1]
|
||||
my_next_activity.action_reschedule_today()
|
||||
|
||||
# Reschedules next my activity to Tomorrow
|
||||
def action_reschedule_my_next_tomorrow(self):
|
||||
self.ensure_one()
|
||||
my_next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1]
|
||||
my_next_activity.action_reschedule_tomorrow()
|
||||
|
||||
# Reschedules next my activity to Next Monday
|
||||
def action_reschedule_my_next_nextweek(self):
|
||||
self.ensure_one()
|
||||
my_next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1]
|
||||
my_next_activity.action_reschedule_nextweek()
|
||||
|
||||
def activity_send_mail(self, template_id):
|
||||
""" Automatically send an email based on the given mail.template, given
|
||||
|
|
@ -342,14 +315,13 @@ class MailActivityMixin(models.AbstractModel):
|
|||
template = self.env['mail.template'].browse(template_id).exists()
|
||||
if not template:
|
||||
return False
|
||||
for record in self:
|
||||
record.message_post_with_template(
|
||||
template_id,
|
||||
composition_mode='comment'
|
||||
)
|
||||
self.message_post_with_source(
|
||||
template,
|
||||
subtype_xmlid='mail.mt_comment',
|
||||
)
|
||||
return True
|
||||
|
||||
def activity_search(self, act_type_xmlids='', user_id=None, additional_domain=None):
|
||||
def activity_search(self, act_type_xmlids='', user_id=None, additional_domain=None, only_automated=True):
|
||||
""" Search automated activities on current record set, given a list of activity
|
||||
types xml IDs. It is useful when dealing with specific types involved in automatic
|
||||
activities management.
|
||||
|
|
@ -357,6 +329,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
:param act_type_xmlids: list of activity types xml IDs
|
||||
:param user_id: if set, restrict to activities of that user_id;
|
||||
:param additional_domain: if set, filter on that domain;
|
||||
:param only_automated: if unset, search for all activities, not only automated ones;
|
||||
"""
|
||||
if self.env.context.get('mail_activity_automation_skip'):
|
||||
return self.env['mail.activity']
|
||||
|
|
@ -366,18 +339,18 @@ class MailActivityMixin(models.AbstractModel):
|
|||
if not any(activity_types_ids):
|
||||
return self.env['mail.activity']
|
||||
|
||||
domain = [
|
||||
'&', '&', '&',
|
||||
domain = Domain([
|
||||
('res_model', '=', self._name),
|
||||
('res_id', 'in', self.ids),
|
||||
('automated', '=', True),
|
||||
('activity_type_id', 'in', activity_types_ids)
|
||||
]
|
||||
])
|
||||
|
||||
if only_automated:
|
||||
domain &= Domain('automated', '=', True)
|
||||
if user_id:
|
||||
domain = expression.AND([domain, [('user_id', '=', user_id)]])
|
||||
domain &= Domain('user_id', '=', user_id)
|
||||
if additional_domain:
|
||||
domain = expression.AND([domain, additional_domain])
|
||||
domain &= Domain(additional_domain)
|
||||
|
||||
return self.env['mail.activity'].search(domain)
|
||||
|
||||
|
|
@ -388,6 +361,9 @@ class MailActivityMixin(models.AbstractModel):
|
|||
It is useful to avoid having various "env.ref" in the code and allow
|
||||
to let the mixin handle access rights.
|
||||
|
||||
Note that unless specified otherwise in act_values, the activities created
|
||||
will have their "automated" field set to True.
|
||||
|
||||
:param date_deadline: the day the activity must be scheduled on
|
||||
the timezone of the user must be considered to set the correct deadline
|
||||
"""
|
||||
|
|
@ -400,13 +376,19 @@ class MailActivityMixin(models.AbstractModel):
|
|||
_logger.warning("Scheduled deadline should be a date (got %s)", date_deadline)
|
||||
if act_type_xmlid:
|
||||
activity_type_id = self.env['ir.model.data']._xmlid_to_res_id(act_type_xmlid, raise_if_not_found=False)
|
||||
if activity_type_id:
|
||||
activity_type = self.env['mail.activity.type'].browse(activity_type_id)
|
||||
else:
|
||||
activity_type = self._default_activity_type()
|
||||
else:
|
||||
activity_type_id = act_values.get('activity_type_id', False)
|
||||
activity_type = self.env['mail.activity.type'].browse(activity_type_id) if activity_type_id else self.env['mail.activity.type']
|
||||
activity_type = self.env['mail.activity.type'].browse(activity_type_id)
|
||||
invalid_model = activity_type.res_model and activity_type.res_model != self._name
|
||||
if not activity_type or invalid_model:
|
||||
if invalid_model:
|
||||
_logger.warning(
|
||||
'Invalid activity type model %s used on %s (tried with xml id %s)',
|
||||
activity_type.res_model, self._name, act_type_xmlid or '',
|
||||
)
|
||||
# TODO master: reset invalid model to default type, keep it for stable as not harmful
|
||||
if not activity_type:
|
||||
activity_type = self._default_activity_type()
|
||||
|
||||
model_id = self.env['ir.model']._get(self._name).id
|
||||
create_vals_list = []
|
||||
|
|
@ -421,8 +403,8 @@ class MailActivityMixin(models.AbstractModel):
|
|||
'res_id': record.id,
|
||||
}
|
||||
create_vals.update(act_values)
|
||||
if not create_vals.get('user_id'):
|
||||
create_vals['user_id'] = activity_type.default_user_id.id or self.env.uid
|
||||
if not create_vals.get('user_id') and activity_type.default_user_id:
|
||||
create_vals['user_id'] = activity_type.default_user_id.id
|
||||
create_vals_list.append(create_vals)
|
||||
return self.env['mail.activity'].create(create_vals_list)
|
||||
|
||||
|
|
@ -448,7 +430,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
activities += record.activity_schedule(act_type_xmlid=act_type_xmlid, date_deadline=date_deadline, summary=summary, note=note, **act_values)
|
||||
return activities
|
||||
|
||||
def activity_reschedule(self, act_type_xmlids, user_id=None, date_deadline=None, new_user_id=None):
|
||||
def activity_reschedule(self, act_type_xmlids, user_id=None, date_deadline=None, new_user_id=None, only_automated=True):
|
||||
""" Reschedule some automated activities. Activities to reschedule are
|
||||
selected based on type xml ids and optionally by user. Purpose is to be
|
||||
able to
|
||||
|
|
@ -464,7 +446,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
|
||||
if not any(activity_types_ids):
|
||||
return False
|
||||
activities = self.activity_search(act_type_xmlids, user_id=user_id)
|
||||
activities = self.activity_search(act_type_xmlids, user_id=user_id, only_automated=only_automated)
|
||||
if activities:
|
||||
write_vals = {}
|
||||
if date_deadline:
|
||||
|
|
@ -474,7 +456,7 @@ class MailActivityMixin(models.AbstractModel):
|
|||
activities.write(write_vals)
|
||||
return activities
|
||||
|
||||
def activity_feedback(self, act_type_xmlids, user_id=None, feedback=None, attachment_ids=None):
|
||||
def activity_feedback(self, act_type_xmlids, user_id=None, feedback=None, attachment_ids=None, only_automated=True):
|
||||
""" Set activities as done, limiting to some activity types and
|
||||
optionally to a given user. """
|
||||
if self.env.context.get('mail_activity_automation_skip'):
|
||||
|
|
@ -485,14 +467,14 @@ class MailActivityMixin(models.AbstractModel):
|
|||
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
|
||||
if not any(activity_types_ids):
|
||||
return False
|
||||
activities = self.activity_search(act_type_xmlids, user_id=user_id)
|
||||
activities = self.activity_search(act_type_xmlids, user_id=user_id, only_automated=only_automated)
|
||||
if activities:
|
||||
activities.action_feedback(feedback=feedback, attachment_ids=attachment_ids)
|
||||
return True
|
||||
|
||||
def activity_unlink(self, act_type_xmlids, user_id=None):
|
||||
def activity_unlink(self, act_type_xmlids, user_id=None, only_automated=True):
|
||||
""" Unlink activities, limiting to some activity types and optionally
|
||||
to a given user. """
|
||||
to a given user. """
|
||||
if self.env.context.get('mail_activity_automation_skip'):
|
||||
return False
|
||||
|
||||
|
|
@ -501,5 +483,5 @@ class MailActivityMixin(models.AbstractModel):
|
|||
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
|
||||
if not any(activity_types_ids):
|
||||
return False
|
||||
self.activity_search(act_type_xmlids, user_id=user_id).unlink()
|
||||
self.activity_search(act_type_xmlids, user_id=user_id, only_automated=only_automated).unlink()
|
||||
return True
|
||||
|
|
|
|||
68
odoo-bringout-oca-ocb-mail/mail/models/mail_activity_plan.py
Normal file
68
odoo-bringout-oca-ocb-mail/mail/models/mail_activity_plan.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
|
||||
|
||||
class MailActivityPlan(models.Model):
|
||||
_name = 'mail.activity.plan'
|
||||
_description = 'Activity Plan'
|
||||
_order = 'id DESC'
|
||||
|
||||
def _get_model_selection(self):
|
||||
return [
|
||||
(model.model, model.name)
|
||||
for model in self.env['ir.model'].sudo().search(
|
||||
['&', ('is_mail_activity', '=', True), ('transient', '=', False)])
|
||||
]
|
||||
|
||||
name = fields.Char('Name', required=True)
|
||||
company_id = fields.Many2one(
|
||||
'res.company', default=lambda self: self.env.company)
|
||||
template_ids = fields.One2many(
|
||||
'mail.activity.plan.template', 'plan_id', string='Activities',
|
||||
copy=True)
|
||||
active = fields.Boolean(default=True)
|
||||
res_model_id = fields.Many2one(
|
||||
'ir.model', string='Applies to',
|
||||
compute="_compute_res_model_id", compute_sudo=True,
|
||||
ondelete="cascade", precompute=True, readonly=False, required=True, store=True)
|
||||
res_model = fields.Selection(
|
||||
selection=_get_model_selection, string="Model", required=True,
|
||||
help='Specify a model if the activity should be specific to a model'
|
||||
' and not available when managing activities for other models.')
|
||||
steps_count = fields.Integer(compute='_compute_steps_count')
|
||||
has_user_on_demand = fields.Boolean('Has on demand responsible', compute='_compute_has_user_on_demand')
|
||||
|
||||
@api.depends('res_model')
|
||||
def _compute_res_model_id(self):
|
||||
for plan in self:
|
||||
if plan.res_model:
|
||||
# New records may not have the required "res_model" field set yet
|
||||
# (in onchange)
|
||||
plan.res_model_id = self.env['ir.model']._get_id(plan.res_model)
|
||||
else:
|
||||
plan.res_model_id = False
|
||||
|
||||
@api.constrains('res_model')
|
||||
def _check_res_model_compatibility_with_templates(self):
|
||||
self.template_ids._check_activity_type_res_model()
|
||||
|
||||
@api.depends('template_ids')
|
||||
def _compute_steps_count(self):
|
||||
for plan in self:
|
||||
plan.steps_count = len(plan.template_ids)
|
||||
|
||||
@api.depends('template_ids.responsible_type')
|
||||
def _compute_has_user_on_demand(self):
|
||||
self.has_user_on_demand = False
|
||||
for plan in self.filtered('template_ids'):
|
||||
plan.has_user_on_demand = any(template.responsible_type == 'on_demand' for template in plan.template_ids)
|
||||
|
||||
def copy_data(self, default=None):
|
||||
default = dict(default or {})
|
||||
vals_list = super().copy_data(default=default)
|
||||
if 'name' not in default:
|
||||
for plan, vals in zip(self, vals_list):
|
||||
vals['name'] = _("%s (copy)", plan.name)
|
||||
return vals_list
|
||||
|
|
@ -0,0 +1,168 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class MailActivityPlanTemplate(models.Model):
|
||||
_name = 'mail.activity.plan.template'
|
||||
_description = 'Activity plan template'
|
||||
_order = 'sequence, id'
|
||||
_rec_name = 'summary'
|
||||
|
||||
plan_id = fields.Many2one(
|
||||
'mail.activity.plan', string="Plan",
|
||||
ondelete='cascade', required=True, index=True)
|
||||
res_model = fields.Selection(related="plan_id.res_model")
|
||||
company_id = fields.Many2one(related='plan_id.company_id')
|
||||
sequence = fields.Integer(default=10)
|
||||
activity_type_id = fields.Many2one(
|
||||
'mail.activity.type', 'Activity Type',
|
||||
default=lambda self: self.env.ref('mail.mail_activity_data_todo'),
|
||||
domain="['|', ('res_model', '=', False), '&', ('res_model', '!=', False), ('res_model', '=', parent.res_model)]",
|
||||
ondelete='restrict', required=True
|
||||
)
|
||||
# Activity type delay fields are ignored in favor of these
|
||||
delay_count = fields.Integer(
|
||||
'Interval', default=0,
|
||||
help='Number of days/week/month before executing the action after or before the scheduled plan date.')
|
||||
delay_unit = fields.Selection([
|
||||
('days', 'days'),
|
||||
('weeks', 'weeks'),
|
||||
('months', 'months')],
|
||||
string="Delay units", help="Unit of delay", required=True, default='days')
|
||||
delay_from = fields.Selection([
|
||||
('before_plan_date', 'Before Plan Date'),
|
||||
('after_plan_date', 'After Plan Date'),
|
||||
],
|
||||
string='Trigger', default="before_plan_date", required=True)
|
||||
icon = fields.Char('Icon', related='activity_type_id.icon', readonly=True)
|
||||
summary = fields.Char('Summary', compute="_compute_summary", store=True, readonly=False)
|
||||
responsible_type = fields.Selection([
|
||||
('on_demand', 'Ask at launch'),
|
||||
('other', 'Default user'),
|
||||
], default='on_demand', string='Assignment', required=True,
|
||||
compute="_compute_responsible_type", store=True, readonly=False)
|
||||
responsible_id = fields.Many2one(
|
||||
'res.users',
|
||||
'Assigned to',
|
||||
check_company=True, compute="_compute_responsible_id", store=True, readonly=False)
|
||||
note = fields.Html('Note', compute="_compute_note", store=True, readonly=False)
|
||||
next_activity_ids = fields.Many2many(
|
||||
'mail.activity.type', string='Next Activities',
|
||||
compute='_compute_next_activity_ids', readonly=False, store=True)
|
||||
|
||||
@api.constrains('activity_type_id', 'plan_id')
|
||||
def _check_activity_type_res_model(self):
|
||||
""" Check that the plan models are compatible with the template activity
|
||||
type model. Note that it depends also on "activity_type_id.res_model" and
|
||||
"plan_id.res_model". That's why this method is called by those models
|
||||
when the mentioned fields are updated.
|
||||
"""
|
||||
for template in self.filtered(lambda tpl: tpl.activity_type_id.res_model):
|
||||
if template.activity_type_id.res_model != template.plan_id.res_model:
|
||||
raise ValidationError(
|
||||
_('The activity type "%(activity_type_name)s" is not compatible with the plan "%(plan_name)s"'
|
||||
' because it is limited to the model "%(activity_type_model)s".',
|
||||
activity_type_name=template.activity_type_id.name,
|
||||
activity_type_model=template.activity_type_id.res_model,
|
||||
plan_name=template.plan_id.name,
|
||||
)
|
||||
)
|
||||
|
||||
@api.constrains('responsible_id', 'responsible_type')
|
||||
def _check_responsible(self):
|
||||
""" Ensure that responsible_id is set when responsible is set to "other". """
|
||||
for template in self:
|
||||
if template.responsible_type == 'other' and not template.responsible_id:
|
||||
raise ValidationError(_('When selecting "Default user" assignment, you must specify a responsible.'))
|
||||
|
||||
@api.depends('activity_type_id')
|
||||
def _compute_next_activity_ids(self):
|
||||
""" Update next activities only when changing activity type on template.
|
||||
Any change on type configuration should not be propagated. """
|
||||
for template in self:
|
||||
activity_type = template.activity_type_id
|
||||
if activity_type.triggered_next_type_id:
|
||||
template.next_activity_ids = activity_type.triggered_next_type_id.ids
|
||||
elif activity_type.suggested_next_type_ids:
|
||||
template.next_activity_ids = activity_type.suggested_next_type_ids.ids
|
||||
else:
|
||||
template.next_activity_ids = False
|
||||
|
||||
@api.depends('activity_type_id')
|
||||
def _compute_note(self):
|
||||
for template in self:
|
||||
template.note = template.activity_type_id.default_note
|
||||
|
||||
@api.depends('activity_type_id', 'responsible_type')
|
||||
def _compute_responsible_id(self):
|
||||
for template in self:
|
||||
template.responsible_id = template.activity_type_id.default_user_id
|
||||
if template.responsible_type != 'other' and template.responsible_id:
|
||||
template.responsible_id = False
|
||||
|
||||
@api.depends('activity_type_id')
|
||||
def _compute_responsible_type(self):
|
||||
for template in self:
|
||||
if template.activity_type_id.default_user_id:
|
||||
template.responsible_type = 'other'
|
||||
else:
|
||||
template.responsible_type = 'on_demand'
|
||||
|
||||
@api.depends('activity_type_id')
|
||||
def _compute_summary(self):
|
||||
for template in self:
|
||||
template.summary = template.activity_type_id.summary
|
||||
|
||||
def _get_date_deadline(self, base_date=False):
|
||||
""" Return the deadline of the activity to be created given the base date. """
|
||||
self.ensure_one()
|
||||
base_date = base_date or fields.Date.context_today(self)
|
||||
delta = relativedelta(**{self.delay_unit: self.delay_count})
|
||||
if self.delay_from == 'after_plan_date':
|
||||
return base_date + delta
|
||||
return base_date - delta
|
||||
|
||||
def _determine_responsible(self, on_demand_responsible, applied_on_record):
|
||||
""" Determine the responsible for the activity based on the template
|
||||
for the given record and on demand responsible.
|
||||
|
||||
Based on the responsible_type, this method will determine the responsible
|
||||
to set on the activity for the given record (applied_on_record).
|
||||
Following the responsible_type:
|
||||
- on_demand: on_demand_responsible is used as responsible (allow to set it
|
||||
when using the template)
|
||||
- other: the responsible field is used (preset user at the template level)
|
||||
|
||||
Other module can extend it and base the responsible on the record on which
|
||||
the activity will be set. Ex.: 'coach' on employee record will assign the
|
||||
coach user of the employee.
|
||||
|
||||
:param <res.user> on_demand_responsible: on demand responsible
|
||||
:param recordset applied_on_record: the record on which the activity
|
||||
will be created
|
||||
:returns: {'responsible': <res.user>, error: str|False}
|
||||
:rtype: dict
|
||||
"""
|
||||
self.ensure_one()
|
||||
error = False
|
||||
warning = False
|
||||
if self.responsible_type == 'other':
|
||||
responsible = self.responsible_id
|
||||
elif self.responsible_type == 'on_demand':
|
||||
responsible = on_demand_responsible
|
||||
if not responsible:
|
||||
error = _('No responsible specified for %(activity_type_name)s: %(activity_summary)s.',
|
||||
activity_type_name=self.activity_type_id.name,
|
||||
activity_summary=self.summary or '-')
|
||||
else:
|
||||
raise ValueError(f'Invalid responsible value {self.responsible_type}.')
|
||||
return {
|
||||
'responsible': responsible,
|
||||
'error': error,
|
||||
'warning': warning,
|
||||
}
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import api, exceptions, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class MailActivityType(models.Model):
|
||||
|
|
@ -11,8 +14,8 @@ class MailActivityType(models.Model):
|
|||
case res_model field should be used. """
|
||||
_name = 'mail.activity.type'
|
||||
_description = 'Activity Type'
|
||||
_rec_name = 'name'
|
||||
_order = 'sequence, id'
|
||||
_rec_name = 'name'
|
||||
|
||||
def _get_model_selection(self):
|
||||
return [
|
||||
|
|
@ -35,7 +38,7 @@ class MailActivityType(models.Model):
|
|||
('months', 'months')], string="Delay units", help="Unit of delay", required=True, default='days')
|
||||
delay_label = fields.Char(compute='_compute_delay_label')
|
||||
delay_from = fields.Selection([
|
||||
('current_date', 'after completion date'),
|
||||
('current_date', 'after previous activity completion date'),
|
||||
('previous_activity', 'after previous activity deadline')], string="Delay Type", help="Type of delay", required=True, default='previous_activity')
|
||||
icon = fields.Char('Icon', help="Font awesome icon e.g. fa-tasks")
|
||||
decoration_type = fields.Selection([
|
||||
|
|
@ -77,6 +80,11 @@ class MailActivityType(models.Model):
|
|||
help='Technical field to keep track of the model at the start of editing to support UX related behaviour')
|
||||
res_model_change = fields.Boolean(string="Model has change", default=False, store=False)
|
||||
|
||||
@api.constrains('res_model')
|
||||
def _check_activity_type_res_model(self):
|
||||
self.env['mail.activity.plan.template'].search(
|
||||
[('activity_type_id', 'in', self.ids)])._check_activity_type_res_model()
|
||||
|
||||
@api.onchange('res_model')
|
||||
def _onchange_res_model(self):
|
||||
self.mail_template_ids = self.sudo().mail_template_ids.filtered(lambda template: template.model_id.model == self.res_model)
|
||||
|
|
@ -119,3 +127,75 @@ class MailActivityType(models.Model):
|
|||
activity_type.chaining_type = 'trigger'
|
||||
else:
|
||||
activity_type.chaining_type = 'suggest'
|
||||
|
||||
def write(self, vals):
|
||||
# Protect some master types against model change when they are used
|
||||
# as default in apps, in business flows, plans, ...
|
||||
if 'res_model' in vals:
|
||||
xmlid_to_model = {
|
||||
xmlid: info['res_model']
|
||||
for xmlid, info in self._get_model_info_by_xmlid().items()
|
||||
}
|
||||
modified = self.browse()
|
||||
for xml_id, model in xmlid_to_model.items():
|
||||
activity_type = self.env.ref(xml_id, raise_if_not_found=False)
|
||||
# beware '' and False for void res_model
|
||||
if activity_type and (vals['res_model'] or False) != (model or False) and activity_type in self:
|
||||
modified += activity_type
|
||||
if modified:
|
||||
raise exceptions.UserError(
|
||||
_('You cannot modify %(activities_names)s target model as they are are required in various apps.',
|
||||
activities_names=', '.join(act.name for act in modified),
|
||||
))
|
||||
return super().write(vals)
|
||||
|
||||
@api.ondelete(at_uninstall=False)
|
||||
def _unlink_except_todo(self):
|
||||
master_data = self.browse()
|
||||
for xml_id in [xmlid for xmlid, info in self._get_model_info_by_xmlid().items() if info['unlink'] is False]:
|
||||
activity_type = self.env.ref(xml_id, raise_if_not_found=False)
|
||||
if activity_type and activity_type in self:
|
||||
master_data += activity_type
|
||||
if master_data:
|
||||
raise exceptions.UserError(
|
||||
_('You cannot delete %(activity_names)s as it is required in various apps.',
|
||||
activity_names=', '.join(act.name for act in master_data),
|
||||
))
|
||||
|
||||
def action_archive(self):
|
||||
if self.env.ref('mail.mail_activity_data_todo') in self:
|
||||
raise UserError(_("The 'To-Do' activity type is used to create reminders from the top bar menu and the command palette. Consequently, it cannot be archived or deleted."))
|
||||
return super().action_archive()
|
||||
|
||||
def unlink(self):
|
||||
""" When removing an activity type, put activities into a Todo. """
|
||||
todo_type = self.env.ref('mail.mail_activity_data_todo')
|
||||
self.env['mail.activity'].search([('activity_type_id', 'in', self.ids)]).write({
|
||||
'activity_type_id': todo_type.id,
|
||||
})
|
||||
return super().unlink()
|
||||
|
||||
def _get_date_deadline(self):
|
||||
""" Return the activity deadline computed from today or from activity_previous_deadline context variable. """
|
||||
self.ensure_one()
|
||||
if self.delay_from == 'previous_activity' and self.env.context.get('activity_previous_deadline'):
|
||||
base = fields.Date.from_string(self.env.context.get('activity_previous_deadline'))
|
||||
else:
|
||||
base = fields.Date.context_today(self)
|
||||
return base + relativedelta(**{self.delay_unit: self.delay_count})
|
||||
|
||||
@api.model
|
||||
def _get_model_info_by_xmlid(self):
|
||||
""" Get model info based on xml ids. """
|
||||
return {
|
||||
# generic call, used notably in VOIP, ... no unlink, necessary for VOIP
|
||||
'mail.mail_activity_data_call': {'res_model': False, 'unlink': False},
|
||||
# generic meeting, used in calendar, hr, ... no unlink, necessary for appointment, appraisals
|
||||
'mail.mail_activity_data_meeting': {'res_model': False, 'unlink': False},
|
||||
# generic todo, used in plans, ... no unlink, basic generic fallback data
|
||||
'mail.mail_activity_data_todo': {'res_model': False, 'unlink': False},
|
||||
# generic upload, used in documents, accounting, ...
|
||||
'mail.mail_activity_data_upload_document': {'res_model': False, 'unlink': True},
|
||||
# generic warning, used in plans, business flows, ...
|
||||
'mail.mail_activity_data_warning': {'res_model': False, 'unlink': True},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
|
||||
import ast
|
||||
import re
|
||||
|
||||
from collections import defaultdict
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.exceptions import ValidationError, UserError
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import is_html_empty, remove_accents
|
||||
|
||||
# see rfc5322 section 3.2.3
|
||||
|
|
@ -15,11 +16,11 @@ atext = r"[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]"
|
|||
dot_atom_text = re.compile(r"^%s+(\.%s+)*$" % (atext, atext))
|
||||
|
||||
|
||||
class Alias(models.Model):
|
||||
class MailAlias(models.Model):
|
||||
"""A Mail Alias is a mapping of an email address with a given Odoo Document
|
||||
model. It is used by Odoo's mail gateway when processing incoming emails
|
||||
sent to the system. If the recipient address (To) of the message matches
|
||||
a Mail Alias, the message will be either processed following the rules
|
||||
a Mail MailAlias, the message will be either processed following the rules
|
||||
of that alias. If the message is a reply it will be attached to the
|
||||
existing discussion on the corresponding record, otherwise a new
|
||||
record of the corresponding model will be created.
|
||||
|
|
@ -30,10 +31,20 @@ class Alias(models.Model):
|
|||
"""
|
||||
_name = 'mail.alias'
|
||||
_description = "Email Aliases"
|
||||
_rec_name = 'alias_name'
|
||||
_order = 'alias_model_id, alias_name'
|
||||
_rec_name = 'alias_name'
|
||||
_rec_names_search = ['alias_name', 'alias_domain']
|
||||
|
||||
alias_name = fields.Char('Alias Name', copy=False, help="The name of the email alias, e.g. 'jobs' if you want to catch emails for <jobs@example.odoo.com>")
|
||||
# email definition
|
||||
alias_name = fields.Char(
|
||||
'Alias Name', copy=False,
|
||||
help="The name of the email alias, e.g. 'jobs' if you want to catch emails for <jobs@example.odoo.com>")
|
||||
alias_full_name = fields.Char('Alias Email', compute='_compute_alias_full_name', store=True, index='btree_not_null')
|
||||
alias_domain_id = fields.Many2one(
|
||||
'mail.alias.domain', string='Alias Domain', ondelete='restrict',
|
||||
default=lambda self: self.env.company.alias_domain_id)
|
||||
alias_domain = fields.Char('Alias domain name', related='alias_domain_id.name')
|
||||
# target: create / update
|
||||
alias_model_id = fields.Many2one('ir.model', 'Aliased Model', required=True, ondelete="cascade",
|
||||
help="The model (Odoo Document Kind) to which this alias "
|
||||
"corresponds. Any incoming email that does not reply to an "
|
||||
|
|
@ -42,11 +53,6 @@ class Alias(models.Model):
|
|||
# hack to only allow selecting mail_thread models (we might
|
||||
# (have a few false positives, though)
|
||||
domain="[('field_id.name', '=', 'message_ids')]")
|
||||
alias_user_id = fields.Many2one('res.users', 'Owner', default=lambda self: self.env.user,
|
||||
help="The owner of records created upon receiving emails on this alias. "
|
||||
"If this field is not set the system will attempt to find the right owner "
|
||||
"based on the sender (From) address, or will use the Administrator account "
|
||||
"if no system user is found for that address.")
|
||||
alias_defaults = fields.Text('Default Values', required=True, default='{}',
|
||||
help="A Python dictionary that will be evaluated to provide "
|
||||
"default values when creating new records for this alias.")
|
||||
|
|
@ -54,165 +60,353 @@ class Alias(models.Model):
|
|||
'Record Thread ID',
|
||||
help="Optional ID of a thread (record) to which all incoming messages will be attached, even "
|
||||
"if they did not reply to it. If set, this will disable the creation of new records completely.")
|
||||
alias_domain = fields.Char('Alias domain', compute='_compute_alias_domain')
|
||||
# owner
|
||||
alias_parent_model_id = fields.Many2one(
|
||||
'ir.model', 'Parent Model',
|
||||
help="Parent model holding the alias. The model holding the alias reference "
|
||||
"is not necessarily the model given by alias_model_id "
|
||||
"(example: project (parent_model) and task (model))")
|
||||
alias_parent_thread_id = fields.Integer('Parent Record Thread ID', help="ID of the parent record holding the alias (example: project holding the task creation alias)")
|
||||
alias_contact = fields.Selection([
|
||||
('everyone', 'Everyone'),
|
||||
('partners', 'Authenticated Partners'),
|
||||
('followers', 'Followers only')], default='everyone',
|
||||
alias_parent_thread_id = fields.Integer(
|
||||
'Parent Record Thread ID',
|
||||
help="ID of the parent record holding the alias (example: project holding the task creation alias)")
|
||||
# incoming configuration (mailgateway)
|
||||
alias_contact = fields.Selection(
|
||||
[
|
||||
('everyone', 'Everyone'),
|
||||
('partners', 'Authenticated Partners'),
|
||||
('followers', 'Followers only')
|
||||
], default='everyone',
|
||||
string='Alias Contact Security', required=True,
|
||||
help="Policy to post a message on the document using the mailgateway.\n"
|
||||
"- everyone: everyone can post\n"
|
||||
"- partners: only authenticated partners\n"
|
||||
"- followers: only followers of the related document or members of following channels\n")
|
||||
alias_incoming_local = fields.Boolean('Local-part based incoming detection', default=False)
|
||||
alias_bounced_content = fields.Html(
|
||||
"Custom Bounced Message", translate=True,
|
||||
help="If set, this content will automatically be sent out to unauthorized users instead of the default message.")
|
||||
alias_status = fields.Selection(
|
||||
[
|
||||
('not_tested', 'Not Tested'),
|
||||
('valid', 'Valid'),
|
||||
('invalid', 'Invalid'),
|
||||
], compute='_compute_alias_status', store=True,
|
||||
help='Alias status assessed on the last message received.')
|
||||
|
||||
_sql_constraints = [
|
||||
('alias_unique', 'UNIQUE(alias_name)', 'Unfortunately this email alias is already used, please choose a unique one')
|
||||
]
|
||||
_name_domain_unique = models.UniqueIndex('(alias_name, COALESCE(alias_domain_id, 0))')
|
||||
|
||||
@api.constrains('alias_domain_id', 'alias_force_thread_id', 'alias_parent_model_id',
|
||||
'alias_parent_thread_id', 'alias_model_id')
|
||||
def _check_alias_domain_id_mc(self):
|
||||
""" Check for invalid alias domains based on company configuration.
|
||||
When having a parent record and/or updating an existing record alias
|
||||
domain should match the one used on the related record. """
|
||||
|
||||
# in sudo, to be able to read alias_parent_model_id (ir.model)
|
||||
tocheck = self.sudo().filtered(lambda alias: alias.alias_domain_id.company_ids)
|
||||
# transient check, mainly for tests / install
|
||||
tocheck = tocheck.filtered(lambda alias:
|
||||
(not alias.alias_model_id.model or alias.alias_model_id.model in self.env) and
|
||||
(not alias.alias_parent_model_id.model or alias.alias_parent_model_id.model in self.env)
|
||||
)
|
||||
if not tocheck:
|
||||
return
|
||||
|
||||
# helpers to find owner / target models
|
||||
def _owner_model(alias):
|
||||
return alias.alias_parent_model_id.model
|
||||
def _owner_env(alias):
|
||||
return self.env[_owner_model(alias)]
|
||||
def _target_model(alias):
|
||||
return alias.alias_model_id.model
|
||||
def _target_env(alias):
|
||||
return self.env[_target_model(alias)]
|
||||
|
||||
# fetch impacted records, classify by model
|
||||
recs_by_model = defaultdict(list)
|
||||
for alias in tocheck:
|
||||
# owner record (like 'project.project' for aliases creating new 'project.task')
|
||||
if alias.alias_parent_model_id and alias.alias_parent_thread_id:
|
||||
if _owner_env(alias)._mail_get_company_field():
|
||||
recs_by_model[_owner_model(alias)].append(alias.alias_parent_thread_id)
|
||||
# target record (like 'mail.group' updating a given group)
|
||||
if alias.alias_model_id and alias.alias_force_thread_id:
|
||||
if _target_env(alias)._mail_get_company_field():
|
||||
recs_by_model[_target_model(alias)].append(alias.alias_force_thread_id)
|
||||
|
||||
# helpers to fetch owner / target with prefetching
|
||||
def _fetch_owner(alias):
|
||||
if alias.alias_parent_thread_id in recs_by_model[alias.alias_parent_model_id.model]:
|
||||
return _owner_env(alias).with_prefetch(
|
||||
recs_by_model[_owner_model(alias)]
|
||||
).browse(alias.alias_parent_thread_id)
|
||||
return None
|
||||
def _fetch_target(alias):
|
||||
if alias.alias_force_thread_id in recs_by_model[alias.alias_model_id.model]:
|
||||
return _target_env(alias).with_prefetch(
|
||||
recs_by_model[_target_model(alias)]
|
||||
).browse(alias.alias_force_thread_id)
|
||||
return None
|
||||
|
||||
# check company domains are compatible
|
||||
for alias in tocheck:
|
||||
if owner := _fetch_owner(alias):
|
||||
company = owner[owner._mail_get_company_field()]
|
||||
if company and company.alias_domain_id != alias.alias_domain_id and alias.alias_domain_id.company_ids:
|
||||
raise ValidationError(_(
|
||||
"We could not create alias %(alias_name)s because domain "
|
||||
"%(alias_domain_name)s belongs to company %(alias_company_names)s "
|
||||
"while the owner document belongs to company %(company_name)s.",
|
||||
alias_company_names=','.join(alias.alias_domain_id.company_ids.mapped('name')),
|
||||
alias_domain_name=alias.alias_domain_id.name,
|
||||
alias_name=alias.display_name,
|
||||
company_name=company.name,
|
||||
))
|
||||
if target := _fetch_target(alias):
|
||||
company = target[target._mail_get_company_field()]
|
||||
if company and company.alias_domain_id != alias.alias_domain_id and alias.alias_domain_id.company_ids:
|
||||
raise ValidationError(_(
|
||||
"We could not create alias %(alias_name)s because domain "
|
||||
"%(alias_domain_name)s belongs to company %(alias_company_names)s "
|
||||
"while the target document belongs to company %(company_name)s.",
|
||||
alias_company_names=','.join(alias.alias_domain_id.company_ids.mapped('name')),
|
||||
alias_domain_name=alias.alias_domain_id.name,
|
||||
alias_name=alias.display_name,
|
||||
company_name=company.name,
|
||||
))
|
||||
|
||||
@api.constrains('alias_name')
|
||||
def _alias_is_ascii(self):
|
||||
def _check_alias_is_ascii(self):
|
||||
""" The local-part ("display-name" <local-part@domain>) of an
|
||||
address only contains limited range of ascii characters.
|
||||
We DO NOT allow anything else than ASCII dot-atom formed
|
||||
local-part. Quoted-string and internationnal characters are
|
||||
to be rejected. See rfc5322 sections 3.4.1 and 3.2.3
|
||||
"""
|
||||
for alias in self:
|
||||
if alias.alias_name and not dot_atom_text.match(alias.alias_name):
|
||||
raise ValidationError(_(
|
||||
"You cannot use anything else than unaccented latin characters in the alias address (%s).",
|
||||
alias.alias_name,
|
||||
))
|
||||
|
||||
@api.depends('alias_name')
|
||||
def _compute_alias_domain(self):
|
||||
self.alias_domain = self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain")
|
||||
for alias in self.filtered('alias_name'):
|
||||
if not dot_atom_text.match(alias.alias_name):
|
||||
raise ValidationError(
|
||||
_("You cannot use anything else than unaccented latin characters in the alias address %(alias_name)s.",
|
||||
alias_name=alias.alias_name)
|
||||
)
|
||||
|
||||
@api.constrains('alias_defaults')
|
||||
def _check_alias_defaults(self):
|
||||
for alias in self:
|
||||
try:
|
||||
dict(ast.literal_eval(alias.alias_defaults))
|
||||
except Exception:
|
||||
raise ValidationError(_('Invalid expression, it must be a literal python dictionary definition e.g. "{\'field\': \'value\'}"'))
|
||||
except Exception as e:
|
||||
raise ValidationError(
|
||||
_('Invalid expression, it must be a literal python dictionary definition e.g. "{\'field\': \'value\'}"')
|
||||
) from e
|
||||
|
||||
@api.constrains('alias_name', 'alias_domain_id')
|
||||
def _check_alias_domain_clash(self):
|
||||
""" Within a given alias domain, aliases should not conflict with bounce
|
||||
or catchall email addresses, as emails should be unique for the gateway. """
|
||||
failing = self.filtered(lambda alias: alias.alias_name and alias.alias_name in [
|
||||
alias.alias_domain_id.bounce_alias, alias.alias_domain_id.catchall_alias
|
||||
])
|
||||
if failing:
|
||||
raise ValidationError(
|
||||
_('Aliases %(alias_names)s is already used as bounce or catchall address. Please choose another alias.',
|
||||
alias_names=', '.join(failing.mapped('display_name')))
|
||||
)
|
||||
|
||||
@api.depends('alias_domain_id.name', 'alias_name')
|
||||
def _compute_alias_full_name(self):
|
||||
""" A bit like display_name, but without the 'inactive alias' UI display.
|
||||
Moreover it is stored, allowing to search on it. """
|
||||
for record in self:
|
||||
if record.alias_domain_id and record.alias_name:
|
||||
record.alias_full_name = f"{record.alias_name}@{record.alias_domain_id.name}"
|
||||
elif record.alias_name:
|
||||
record.alias_full_name = record.alias_name
|
||||
else:
|
||||
record.alias_full_name = False
|
||||
|
||||
@api.depends('alias_domain', 'alias_name')
|
||||
def _compute_display_name(self):
|
||||
""" Return the mail alias display alias_name, including the catchall
|
||||
domain if found otherwise "Inactive Alias". e.g.`jobs@mail.odoo.com`
|
||||
or `jobs` or 'Inactive Alias' """
|
||||
for record in self:
|
||||
if record.alias_name and record.alias_domain:
|
||||
record.display_name = f"{record.alias_name}@{record.alias_domain}"
|
||||
elif record.alias_name:
|
||||
record.display_name = record.alias_name
|
||||
else:
|
||||
record.display_name = _("Inactive Alias")
|
||||
|
||||
@api.depends('alias_contact', 'alias_defaults', 'alias_model_id')
|
||||
def _compute_alias_status(self):
|
||||
"""Reset alias_status to "not_tested" when fields, that can be the source of an error, are modified."""
|
||||
self.alias_status = 'not_tested'
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Creates email.alias records according to the values provided in
|
||||
``vals`` with 1 alteration:
|
||||
""" Creates mail.alias records according to the values provided in
|
||||
``vals`` but sanitize 'alias_name' by replacing certain unsafe
|
||||
characters; set default alias domain if not given.
|
||||
|
||||
* ``alias_name`` value may be cleaned by replacing certain unsafe
|
||||
characters;
|
||||
|
||||
:raise UserError: if given alias_name is already assigned or there are
|
||||
duplicates in given vals_list;
|
||||
:raise UserError: if given (alias_name, alias_domain_id) already exists
|
||||
or if there are duplicates in given vals_list;
|
||||
"""
|
||||
alias_names = [vals['alias_name'] for vals in vals_list if vals.get('alias_name')]
|
||||
if alias_names:
|
||||
sanitized_names = self._clean_and_check_unique(alias_names)
|
||||
for vals in vals_list:
|
||||
if vals.get('alias_name'):
|
||||
vals['alias_name'] = sanitized_names[alias_names.index(vals['alias_name'])]
|
||||
return super(Alias, self).create(vals_list)
|
||||
alias_names, alias_domains = [], []
|
||||
for vals in vals_list:
|
||||
vals['alias_name'] = self._sanitize_alias_name(vals.get('alias_name'))
|
||||
alias_names.append(vals['alias_name'])
|
||||
vals['alias_domain_id'] = vals.get('alias_domain_id', self.env.company.alias_domain_id.id)
|
||||
alias_domains.append(self.env['mail.alias.domain'].browse(vals['alias_domain_id']))
|
||||
|
||||
self._check_unique(alias_names, alias_domains)
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
""""Raises UserError if given alias name is already assigned"""
|
||||
if vals.get('alias_name') and self.ids:
|
||||
if len(self) > 1:
|
||||
raise UserError(_(
|
||||
'Email alias %(alias_name)s cannot be used on %(count)d records at the same time. Please update records one by one.',
|
||||
alias_name=vals['alias_name'], count=len(self)
|
||||
))
|
||||
vals['alias_name'] = self._clean_and_check_unique([vals.get('alias_name')])[0]
|
||||
return super(Alias, self).write(vals)
|
||||
|
||||
def name_get(self):
|
||||
"""Return the mail alias display alias_name, including the implicit
|
||||
mail catchall domain if exists from config otherwise "New Alias".
|
||||
e.g. `jobs@mail.odoo.com` or `jobs` or 'New Alias'
|
||||
""" Raise UserError with a meaningful message instead of letting the
|
||||
uniqueness constraint raise an SQL error. To check uniqueness we have
|
||||
to rebuild pairs of names / domains to validate, taking into account
|
||||
that a void alias_domain_id is acceptable (but also raises for
|
||||
uniqueness).
|
||||
"""
|
||||
res = []
|
||||
for record in self:
|
||||
if record.alias_name and record.alias_domain:
|
||||
res.append((record['id'], "%s@%s" % (record.alias_name, record.alias_domain)))
|
||||
elif record.alias_name:
|
||||
res.append((record['id'], "%s" % (record.alias_name)))
|
||||
alias_names, alias_domains = [], []
|
||||
if 'alias_name' in vals:
|
||||
vals['alias_name'] = self._sanitize_alias_name(vals['alias_name'])
|
||||
if vals.get('alias_name') and self.ids:
|
||||
alias_names = [vals['alias_name']] * len(self)
|
||||
elif 'alias_name' not in vals and 'alias_domain_id' in vals:
|
||||
# avoid checking when writing the same value
|
||||
if [vals['alias_domain_id']] != self.alias_domain_id.ids:
|
||||
alias_names = self.filtered('alias_name').mapped('alias_name')
|
||||
|
||||
if alias_names:
|
||||
tocheck_records = self if vals.get('alias_name') else self.filtered('alias_name')
|
||||
if 'alias_domain_id' in vals:
|
||||
alias_domains = [self.env['mail.alias.domain'].browse(vals['alias_domain_id'])] * len(tocheck_records)
|
||||
else:
|
||||
res.append((record['id'], _("Inactive Alias")))
|
||||
return res
|
||||
alias_domains = [record.alias_domain_id for record in tocheck_records]
|
||||
self._check_unique(alias_names, alias_domains)
|
||||
|
||||
def _clean_and_check_mail_catchall_allowed_domains(self, value):
|
||||
""" The purpose of this system parameter is to avoid the creation
|
||||
of records from incoming emails with a domain != alias_domain
|
||||
but that have a pattern matching an internal mail.alias . """
|
||||
value = [domain.strip().lower() for domain in value.split(',') if domain.strip()]
|
||||
if not value:
|
||||
raise ValidationError(_("Value for `mail.catchall.domain.allowed` cannot be validated.\n"
|
||||
"It should be a comma separated list of domains e.g. example.com,example.org."))
|
||||
return ",".join(value)
|
||||
return super().write(vals)
|
||||
|
||||
def _clean_and_check_unique(self, names):
|
||||
"""When an alias name appears to already be an email, we keep the local
|
||||
part only. A sanitizing / cleaning is also performed on the name. If
|
||||
name already exists an UserError is raised. """
|
||||
def _check_unique(self, alias_names, alias_domains):
|
||||
""" Check unicity constraint won't be raised, otherwise raise a UserError
|
||||
with a complete error message. Also check unicity against alias config
|
||||
parameters.
|
||||
|
||||
def _sanitize_alias_name(name):
|
||||
""" Cleans and sanitizes the alias name """
|
||||
sanitized_name = remove_accents(name).lower().split('@')[0]
|
||||
sanitized_name = re.sub(r'[^\w+.]+', '-', sanitized_name)
|
||||
sanitized_name = re.sub(r'^\.+|\.+$|\.+(?=\.)', '', sanitized_name)
|
||||
sanitized_name = sanitized_name.encode('ascii', errors='replace').decode()
|
||||
return sanitized_name
|
||||
:param list alias_names: a list of names (considered as sanitized
|
||||
and ready to be sent to DB);
|
||||
:param list alias_domains: list of alias_domain records under which
|
||||
the check is performed, as uniqueness is performed for given pair
|
||||
(name, alias_domain);
|
||||
"""
|
||||
if len(alias_names) != len(alias_domains):
|
||||
msg = (f"Invalid call to '_check_unique': names and domains should make coherent lists, "
|
||||
f"received {', '.join(alias_names)} and {', '.join(alias_domains.mapped('name'))}")
|
||||
raise ValueError(msg)
|
||||
|
||||
sanitized_names = [_sanitize_alias_name(name) for name in names]
|
||||
|
||||
catchall_alias = self.env['ir.config_parameter'].sudo().get_param('mail.catchall.alias')
|
||||
bounce_alias = self.env['ir.config_parameter'].sudo().get_param('mail.bounce.alias')
|
||||
alias_domain = self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain")
|
||||
|
||||
# matches catchall or bounce alias
|
||||
for sanitized_name in sanitized_names:
|
||||
if sanitized_name in [catchall_alias, bounce_alias]:
|
||||
matching_alias_name = '%s@%s' % (sanitized_name, alias_domain) if alias_domain else sanitized_name
|
||||
# reorder per alias domain, keep only not void alias names (void domain also checks uniqueness)
|
||||
domain_to_names = defaultdict(list)
|
||||
for alias_name, alias_domain in zip(alias_names, alias_domains):
|
||||
if alias_name and alias_name in domain_to_names[alias_domain]:
|
||||
raise UserError(
|
||||
_('The e-mail alias %(matching_alias_name)s is already used as %(alias_duplicate)s alias. Please choose another alias.',
|
||||
matching_alias_name=matching_alias_name,
|
||||
alias_duplicate=_('catchall') if sanitized_name == catchall_alias else _('bounce'))
|
||||
_('Email aliases %(alias_name)s cannot be used on several records at the same time. Please update records one by one.',
|
||||
alias_name=alias_name)
|
||||
)
|
||||
if alias_name:
|
||||
domain_to_names[alias_domain].append(alias_name)
|
||||
|
||||
# matches existing alias
|
||||
domain = [('alias_name', 'in', sanitized_names)]
|
||||
if self:
|
||||
domain += [('id', 'not in', self.ids)]
|
||||
matching_alias = self.search(domain, limit=1)
|
||||
if not matching_alias:
|
||||
return sanitized_names
|
||||
|
||||
sanitized_alias_name = _sanitize_alias_name(matching_alias.alias_name)
|
||||
matching_alias_name = '%s@%s' % (sanitized_alias_name, alias_domain) if alias_domain else sanitized_alias_name
|
||||
if matching_alias.alias_parent_model_id and matching_alias.alias_parent_thread_id:
|
||||
# If parent model and parent thread ID both are set, display document name also in the warning
|
||||
document_name = self.env[matching_alias.alias_parent_model_id.model].sudo().browse(matching_alias.alias_parent_thread_id).display_name
|
||||
raise UserError(
|
||||
_('The e-mail alias %(matching_alias_name)s is already used by the %(document_name)s %(model_name)s. Choose another alias or change it on the other document.',
|
||||
matching_alias_name=matching_alias_name,
|
||||
document_name=document_name,
|
||||
model_name=matching_alias.alias_parent_model_id.name)
|
||||
)
|
||||
raise UserError(
|
||||
_('The e-mail alias %(matching_alias_name)s is already linked with %(alias_model_name)s. Choose another alias or change it on the linked model.',
|
||||
matching_alias_name=matching_alias_name,
|
||||
alias_model_name=matching_alias.alias_model_id.name)
|
||||
domain = Domain.OR(
|
||||
Domain('alias_name', 'in', alias_names) & Domain('alias_domain_id', '=', alias_domain.id)
|
||||
for alias_domain, alias_names in domain_to_names.items()
|
||||
)
|
||||
if domain and self:
|
||||
domain &= Domain('id', 'not in', self.ids)
|
||||
existing = self.search(domain, limit=1) if domain else self.env['mail.alias']
|
||||
if not existing:
|
||||
return
|
||||
if existing.alias_parent_model_id and existing.alias_parent_thread_id:
|
||||
parent_name = self.env[existing.alias_parent_model_id.model].sudo().browse(existing.alias_parent_thread_id).display_name
|
||||
msg_begin = _(
|
||||
'Alias %(matching_name)s (%(current_id)s) is already linked with %(alias_model_name)s (%(matching_id)s) and used by the %(parent_name)s %(parent_model_name)s.',
|
||||
alias_model_name=existing.alias_model_id.name,
|
||||
current_id=self.ids if self else _('your alias'),
|
||||
matching_id=existing.id,
|
||||
matching_name=existing.display_name,
|
||||
parent_name=parent_name,
|
||||
parent_model_name=existing.alias_parent_model_id.name
|
||||
)
|
||||
else:
|
||||
msg_begin = _(
|
||||
'Alias %(matching_name)s (%(current_id)s) is already linked with %(alias_model_name)s (%(matching_id)s).',
|
||||
alias_model_name=existing.alias_model_id.name,
|
||||
current_id=self.ids if self else _('new'),
|
||||
matching_id=existing.id,
|
||||
matching_name=existing.display_name,
|
||||
)
|
||||
msg_end = _('Choose another value or change it on the other document.')
|
||||
raise UserError(f'{msg_begin} {msg_end}') # pylint: disable=missing-gettext
|
||||
|
||||
@api.model
|
||||
def _sanitize_allowed_domains(self, allowed_domains):
|
||||
""" When having aliases checked on email left-part only we may define
|
||||
an allowed list for right-part filtering, allowing more fine-grain than
|
||||
either alias domain, either everything. This method sanitized its value. """
|
||||
value = [domain.strip().lower() for domain in allowed_domains.split(',') if domain.strip()]
|
||||
if not value:
|
||||
raise ValidationError(_(
|
||||
"Value %(allowed_domains)s for `mail.catchall.domain.allowed` cannot be validated.\n"
|
||||
"It should be a comma separated list of domains e.g. example.com,example.org.",
|
||||
allowed_domains=allowed_domains
|
||||
))
|
||||
return ",".join(value)
|
||||
|
||||
@api.model
|
||||
def _sanitize_alias_name(self, name, is_email=False):
|
||||
""" Cleans and sanitizes the alias name. In some cases we want the alias
|
||||
to be a complete email instead of just a left-part (when sanitizing
|
||||
default.from for example). In that case we extract the right part and
|
||||
put it back after sanitizing the left part.
|
||||
|
||||
:param str name: the alias name to sanitize;
|
||||
:param bool is_email: whether to keep a right part, otherwise only
|
||||
left part is kept;
|
||||
|
||||
:returns: sanitized alias name
|
||||
:rtype: str
|
||||
"""
|
||||
sanitized_name = name.strip() if name else ''
|
||||
if is_email:
|
||||
right_part = sanitized_name.lower().partition('@')[2]
|
||||
else:
|
||||
right_part = False
|
||||
if sanitized_name:
|
||||
sanitized_name = remove_accents(sanitized_name).lower().split('@')[0]
|
||||
# cannot start and end with dot
|
||||
sanitized_name = re.sub(r'^\.+|\.+$|\.+(?=\.)', '', sanitized_name)
|
||||
# subset of allowed characters
|
||||
sanitized_name = re.sub(r'[^\w!#$%&\'*+\-/=?^_`{|}~.]+', '-', sanitized_name)
|
||||
sanitized_name = sanitized_name.encode('ascii', errors='replace').decode()
|
||||
if not sanitized_name.strip():
|
||||
return False
|
||||
return f'{sanitized_name}@{right_part}' if is_email and right_part else sanitized_name
|
||||
|
||||
@api.model
|
||||
def _is_encodable(self, alias_name, charset='ascii'):
|
||||
""" Check if alias_name is encodable. Standard charset is ascii, as
|
||||
UTF-8 requires a specific extension. Not recommended for outgoing
|
||||
aliases. 'remove_accents' is performed as sanitization process of
|
||||
the name will do it anyway. """
|
||||
try:
|
||||
remove_accents(alias_name).encode(charset)
|
||||
except UnicodeEncodeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# ACTIONS
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def open_document(self):
|
||||
if not self.alias_model_id or not self.alias_force_thread_id:
|
||||
|
|
@ -234,36 +428,23 @@ class Alias(models.Model):
|
|||
'type': 'ir.actions.act_window',
|
||||
}
|
||||
|
||||
def _get_alias_bounced_body_fallback(self, message_dict):
|
||||
contact_description = self._get_alias_contact_description()
|
||||
default_email = self.env.company.partner_id.email_formatted if self.env.company.partner_id.email else self.env.company.name
|
||||
return Markup(
|
||||
_("""<p>Dear Sender,<br /><br />
|
||||
The message below could not be accepted by the address %(alias_display_name)s.
|
||||
Only %(contact_description)s are allowed to contact it.<br /><br />
|
||||
Please make sure you are using the correct address or contact us at %(default_email)s instead.<br /><br />
|
||||
Kind Regards,</p>"""
|
||||
)) % {
|
||||
'alias_display_name': self.display_name,
|
||||
'contact_description': contact_description,
|
||||
'default_email': default_email,
|
||||
}
|
||||
|
||||
def _get_alias_contact_description(self):
|
||||
if self.alias_contact == 'partners':
|
||||
return _('addresses linked to registered partners')
|
||||
return _('some specific addresses')
|
||||
# ------------------------------------------------------------
|
||||
# MAIL GATEWAY
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def _get_alias_bounced_body(self, message_dict):
|
||||
"""Get the body of the email return in case of bounced email.
|
||||
"""Get the body of the email return in case of bounced email when the
|
||||
alias does not accept incoming email e.g. contact is not allowed.
|
||||
|
||||
:param message_dict: dictionary of mail values
|
||||
:param dict message_dict: dictionary holding parsed message variables
|
||||
|
||||
:return: HTML to use as email body
|
||||
"""
|
||||
lang_author = False
|
||||
if message_dict.get('author_id'):
|
||||
try:
|
||||
lang_author = self.env['res.partner'].browse(message_dict['author_id']).lang
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if lang_author:
|
||||
|
|
@ -277,3 +458,77 @@ Kind Regards,</p>"""
|
|||
'body': body,
|
||||
'message': message_dict
|
||||
}, minimal_qcontext=True)
|
||||
|
||||
def _get_alias_bounced_body_fallback(self, message_dict):
|
||||
""" Default body of bounced emails. See '_get_alias_bounced_body' """
|
||||
contact_description = self._get_alias_contact_description()
|
||||
default_email = self.env.company.partner_id.email_formatted if self.env.company.partner_id.email else self.env.company.name
|
||||
content = Markup(
|
||||
_("""The message below could not be accepted by the address %(alias_display_name)s.
|
||||
Only %(contact_description)s are allowed to contact it.<br /><br />
|
||||
Please make sure you are using the correct address or contact us at %(default_email)s instead."""
|
||||
)
|
||||
) % {
|
||||
'alias_display_name': self.display_name,
|
||||
'contact_description': contact_description,
|
||||
'default_email': default_email,
|
||||
}
|
||||
return Markup('<p>%(header)s,<br /><br />%(content)s<br /><br />%(regards)s</p>') % {
|
||||
'content': content,
|
||||
'header': _('Dear Sender'),
|
||||
'regards': _('Kind Regards'),
|
||||
}
|
||||
|
||||
def _get_alias_contact_description(self):
|
||||
if self.alias_contact == 'partners':
|
||||
return _('addresses linked to registered partners')
|
||||
return _('some specific addresses')
|
||||
|
||||
def _get_alias_invalid_body(self, message_dict):
|
||||
"""Get the body of the bounced email returned when the alias is incorrectly
|
||||
configured e.g. error in alias_defaults.
|
||||
|
||||
:param dict message_dict: dictionary holding parsed message variables
|
||||
|
||||
:return: HTML to use as email body
|
||||
"""
|
||||
content = Markup(
|
||||
_("""The message below could not be accepted by the address %(alias_display_name)s.
|
||||
Please try again later or contact %(company_name)s instead."""
|
||||
)
|
||||
) % {
|
||||
'alias_display_name': self.display_name,
|
||||
'company_name': self.env.company.name,
|
||||
}
|
||||
return self.env['ir.qweb']._render('mail.mail_bounce_alias_security', {
|
||||
'body': Markup('<p>%(header)s,<br /><br />%(content)s<br /><br />%(regards)s</p>') % {
|
||||
'content': content,
|
||||
'header': _('Dear Sender'),
|
||||
'regards': _('Kind Regards'),
|
||||
},
|
||||
'message': message_dict
|
||||
}, minimal_qcontext=True)
|
||||
|
||||
def _alias_bounce_incoming_email(self, message, message_dict, set_invalid=True):
|
||||
"""Set alias status to invalid and create bounce message to the sender.
|
||||
|
||||
This method must be called when a message received on the alias has
|
||||
caused an error due to the mis-configuration of the alias.
|
||||
|
||||
:param EmailMessage message: email message that is invalid and is about
|
||||
to bounce;
|
||||
:param dict message_dict: dictionary holding parsed message variables
|
||||
:param bool set_invalid: set alias as invalid, to be done notably if
|
||||
bounce is considered as coming from a configuration error instead of
|
||||
being rejected due to alias rules;
|
||||
"""
|
||||
self.ensure_one()
|
||||
if set_invalid:
|
||||
self.alias_status = 'invalid'
|
||||
body = self._get_alias_invalid_body(message_dict)
|
||||
else:
|
||||
body = self._get_alias_bounced_body(message_dict)
|
||||
self.env['mail.thread']._routing_create_bounce_email(
|
||||
message_dict['email_from'], body, message,
|
||||
references=message_dict['message_id'],
|
||||
)
|
||||
|
|
|
|||
258
odoo-bringout-oca-ocb-mail/mail/models/mail_alias_domain.py
Normal file
258
odoo-bringout-oca-ocb-mail/mail/models/mail_alias_domain.py
Normal file
|
|
@ -0,0 +1,258 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, exceptions, fields, models, _
|
||||
from odoo.addons.mail.models.mail_alias import dot_atom_text
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class MailAliasDomain(models.Model):
|
||||
""" Model alias domains, now company-specific. Alias domains are email
|
||||
domains used to receive emails through catchall and bounce aliases, as
|
||||
well as using mail.alias records to redirect email replies.
|
||||
|
||||
This replaces ``mail.alias.domain`` configuration parameter use until v16.
|
||||
"""
|
||||
_name = 'mail.alias.domain'
|
||||
_description = "Email Domain"
|
||||
_order = 'sequence ASC, id ASC'
|
||||
|
||||
name = fields.Char(
|
||||
'Name', required=True,
|
||||
help="Email domain e.g. 'example.com' in 'odoo@example.com'")
|
||||
company_ids = fields.One2many(
|
||||
'res.company', 'alias_domain_id', string='Companies',
|
||||
help="Companies using this domain as default for sending mails")
|
||||
sequence = fields.Integer(default=10)
|
||||
bounce_alias = fields.Char(
|
||||
'Bounce Alias', default='bounce', required=True,
|
||||
help="Local-part of email used for Return-Path used when emails bounce e.g. "
|
||||
"'bounce' in 'bounce@example.com'")
|
||||
bounce_email = fields.Char('Bounce Email', compute='_compute_bounce_email')
|
||||
catchall_alias = fields.Char(
|
||||
'Catchall Alias', default='catchall', required=True,
|
||||
help="Local-part of email used for Reply-To to catch answers e.g. "
|
||||
"'catchall' in 'catchall@example.com'")
|
||||
catchall_email = fields.Char('Catchall Email', compute='_compute_catchall_email')
|
||||
default_from = fields.Char(
|
||||
'Default From Alias', default='notifications',
|
||||
help="Default from when it does not match outgoing server filters. Can be either "
|
||||
"a local-part e.g. 'notifications' either a complete email address e.g. "
|
||||
"'notifications@example.com' to override all outgoing emails.")
|
||||
default_from_email = fields.Char('Default From', compute='_compute_default_from_email')
|
||||
|
||||
_bounce_email_uniques = models.Constraint(
|
||||
'UNIQUE(bounce_alias, name)',
|
||||
'Bounce emails should be unique',
|
||||
)
|
||||
_catchall_email_uniques = models.Constraint(
|
||||
'UNIQUE(catchall_alias, name)',
|
||||
'Catchall emails should be unique',
|
||||
)
|
||||
|
||||
@api.depends('bounce_alias', 'name')
|
||||
def _compute_bounce_email(self):
|
||||
self.bounce_email = ''
|
||||
for domain in self.filtered('bounce_alias'):
|
||||
domain.bounce_email = f'{domain.bounce_alias}@{domain.name}'
|
||||
|
||||
@api.depends('catchall_alias', 'name')
|
||||
def _compute_catchall_email(self):
|
||||
self.catchall_email = ''
|
||||
for domain in self.filtered('catchall_alias'):
|
||||
domain.catchall_email = f'{domain.catchall_alias}@{domain.name}'
|
||||
|
||||
@api.depends('default_from', 'name')
|
||||
def _compute_default_from_email(self):
|
||||
""" Default from may be a valid complete email and not only a left-part
|
||||
like bounce or catchall aliases. Adding domain name should therefore
|
||||
be done only if necessary. """
|
||||
self.default_from_email = ''
|
||||
for domain in self.filtered('default_from'):
|
||||
if "@" in domain.default_from:
|
||||
domain.default_from_email = domain.default_from
|
||||
else:
|
||||
domain.default_from_email = f'{domain.default_from}@{domain.name}'
|
||||
|
||||
@api.constrains('bounce_alias', 'catchall_alias')
|
||||
def _check_bounce_catchall_uniqueness(self):
|
||||
names = self.filtered('bounce_alias').mapped('bounce_alias') + self.filtered('catchall_alias').mapped('catchall_alias')
|
||||
if not names:
|
||||
return
|
||||
|
||||
similar_domains = self.env['mail.alias.domain'].search([('name', 'in', self.mapped('name'))])
|
||||
for tocheck in self:
|
||||
if any(similar.bounce_alias == tocheck.bounce_alias
|
||||
for similar in similar_domains if similar != tocheck and similar.name == tocheck.name):
|
||||
raise exceptions.ValidationError(
|
||||
_('Bounce alias %(bounce)s is already used for another domain with same name. '
|
||||
'Use another bounce or simply use the other alias domain.',
|
||||
bounce=tocheck.bounce_email)
|
||||
)
|
||||
if any(similar.catchall_alias == tocheck.catchall_alias
|
||||
for similar in similar_domains if similar != tocheck and similar.name == tocheck.name):
|
||||
raise exceptions.ValidationError(
|
||||
_('Catchall alias %(catchall)s is already used for another domain with same name. '
|
||||
'Use another catchall or simply use the other alias domain.',
|
||||
catchall=tocheck.catchall_email)
|
||||
)
|
||||
|
||||
# search on left-part only to speedup, then filter on right part
|
||||
potential_aliases = self.env['mail.alias'].search([
|
||||
('alias_name', 'in', list(set(names))),
|
||||
('alias_domain_id', '!=', False)
|
||||
])
|
||||
existing = next(
|
||||
(alias for alias in potential_aliases
|
||||
if alias.display_name in (self.mapped('bounce_email') + self.mapped('catchall_email'))),
|
||||
self.env['mail.alias']
|
||||
)
|
||||
if existing:
|
||||
document_name = False
|
||||
# If owner or target: display document name also in the warning
|
||||
if existing.alias_parent_model_id and existing.alias_parent_thread_id:
|
||||
document_name = self.env[existing.alias_parent_model_id.model].sudo().browse(existing.alias_parent_thread_id).display_name
|
||||
elif existing.alias_model_id and existing.alias_force_thread_id:
|
||||
document_name = self.env[existing.alias_model_id.model].sudo().browse(existing.alias_force_thread_id).display_name
|
||||
if document_name:
|
||||
raise exceptions.ValidationError(
|
||||
_("Bounce/Catchall '%(matching_alias_name)s' is already used by %(document_name)s. Choose another alias or change it on the other document.",
|
||||
matching_alias_name=existing.display_name,
|
||||
document_name=document_name)
|
||||
)
|
||||
raise exceptions.ValidationError(
|
||||
_("Bounce/Catchall '%(matching_alias_name)s' is already used. Choose another alias or change it on the linked model.",
|
||||
matching_alias_name=existing.display_name)
|
||||
)
|
||||
|
||||
@api.constrains('name')
|
||||
def _check_name(self):
|
||||
""" Should match a sanitized version of itself, otherwise raise to warn
|
||||
user (do not dynamically change it, would be confusing). """
|
||||
for domain in self:
|
||||
if not dot_atom_text.match(domain.name):
|
||||
raise exceptions.ValidationError(
|
||||
_("You cannot use anything else than unaccented latin characters in the domain name %(domain_name)s.",
|
||||
domain_name=domain.name)
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Sanitize bounce_alias / catchall_alias / default_from """
|
||||
for vals in vals_list:
|
||||
self._sanitize_configuration(vals)
|
||||
|
||||
alias_domains = super().create(vals_list)
|
||||
alias_domains._check_default_from_not_used_by_users()
|
||||
|
||||
# alias domain init: populate companies and aliases at first creation
|
||||
if alias_domains and self.search_count([]) == len(alias_domains):
|
||||
# during first init we assume that we want to attribute this
|
||||
# alias domain to all companies, irrespective of the fact
|
||||
# that they are archived or not. So we run active_test=False
|
||||
# on the just created alias domain
|
||||
|
||||
self.env['res.company'].with_context(active_test=False).search(
|
||||
[('alias_domain_id', '=', False)]
|
||||
).alias_domain_id = alias_domains[0].id
|
||||
self.env['mail.alias'].sudo().search(
|
||||
[('alias_domain_id', '=', False)]
|
||||
).alias_domain_id = alias_domains[0].id
|
||||
|
||||
return alias_domains
|
||||
|
||||
def write(self, vals):
|
||||
""" Sanitize bounce_alias / catchall_alias / default_from """
|
||||
self._sanitize_configuration(vals)
|
||||
ret = super().write(vals)
|
||||
self._check_default_from_not_used_by_users()
|
||||
return ret
|
||||
|
||||
def _check_default_from_not_used_by_users(self):
|
||||
"""Check that the default from is not used by a personal mail servers."""
|
||||
match_from_filter = self.env["ir.mail_server"]._match_from_filter
|
||||
personal_mail_servers = self.env["ir.mail_server"].sudo().search([("owner_user_id", "!=", False)])
|
||||
if any(
|
||||
match_from_filter(e, server.from_filter)
|
||||
for e in self.mapped("default_from_email")
|
||||
for server in personal_mail_servers
|
||||
):
|
||||
raise UserError(_("A personal mail server is using that address, you can not use it."))
|
||||
|
||||
@api.model
|
||||
def _sanitize_configuration(self, config_values):
|
||||
""" Tool sanitizing configuration values for domains """
|
||||
if config_values.get('bounce_alias'):
|
||||
config_values['bounce_alias'] = self.env['mail.alias']._sanitize_alias_name(config_values['bounce_alias'])
|
||||
if config_values.get('catchall_alias'):
|
||||
config_values['catchall_alias'] = self.env['mail.alias']._sanitize_alias_name(config_values['catchall_alias'])
|
||||
if config_values.get('default_from'):
|
||||
config_values['default_from'] = self.env['mail.alias']._sanitize_alias_name(
|
||||
config_values['default_from'], is_email=True
|
||||
)
|
||||
return config_values
|
||||
|
||||
@api.model
|
||||
def _find_aliases(self, email_list):
|
||||
""" Utility method to find both alias domains aliases (bounce, catchall
|
||||
or default from) and mail aliases from an email list.
|
||||
|
||||
:param email_list: list of normalized emails; normalization / removing
|
||||
wrong emails is considered as being caller's job
|
||||
"""
|
||||
filtered_emails = [e for e in email_list if e and '@' in e]
|
||||
if not filtered_emails:
|
||||
return filtered_emails
|
||||
all_domains = self.search([])
|
||||
aliases = all_domains.mapped('bounce_email') + all_domains.mapped('catchall_email') + all_domains.mapped('default_from_email')
|
||||
|
||||
catchall_domains_allowed = list(filter(None, (self.env["ir.config_parameter"].sudo().get_param(
|
||||
"mail.catchall.domain.allowed") or '').split(',')))
|
||||
if catchall_domains_allowed:
|
||||
catchall_domains_allowed += all_domains.mapped('name')
|
||||
email_localparts_tocheck = [
|
||||
email.partition('@')[0] for email in filtered_emails if (
|
||||
email.partition('@')[2] in catchall_domains_allowed
|
||||
)]
|
||||
else:
|
||||
email_localparts_tocheck = [email.partition('@')[0] for email in filtered_emails if email]
|
||||
|
||||
# search on aliases using the proposed list, as we could have a lot of aliases
|
||||
# better than returning 'all alias emails'
|
||||
potential_aliases = self.env['mail.alias'].search([
|
||||
'|',
|
||||
('alias_full_name', 'in', filtered_emails),
|
||||
'&', ('alias_name', 'in', email_localparts_tocheck), ('alias_incoming_local', '=', True),
|
||||
])
|
||||
# global alias: email match
|
||||
aliases += potential_aliases.filtered(lambda x: not x.alias_incoming_local).mapped('alias_full_name')
|
||||
# compat-mode alias: left-part only (filter on allowed domains already done)
|
||||
local_alias_names = potential_aliases.filtered(lambda x: x.alias_incoming_local).mapped('alias_name')
|
||||
return [
|
||||
email for email in filtered_emails if (
|
||||
email in aliases or
|
||||
email.partition('@')[0] in local_alias_names
|
||||
)]
|
||||
|
||||
@api.model
|
||||
def _migrate_icp_to_domain(self):
|
||||
""" Compatibility layer helping going from pre-v17 ICP to alias
|
||||
domains. Mainly used when base mail configuration is done with 'base'
|
||||
module only and 'mail' is installed afterwards: configuration should
|
||||
not be lost (odoo.sh use case). """
|
||||
Icp = self.env['ir.config_parameter'].sudo()
|
||||
alias_domain = Icp.get_param('mail.catchall.domain')
|
||||
if alias_domain:
|
||||
existing = self.search([('name', '=', alias_domain)])
|
||||
if existing:
|
||||
return existing
|
||||
bounce_alias = Icp.get_param('mail.bounce.alias')
|
||||
catchall_alias = Icp.get_param('mail.catchall.alias')
|
||||
default_from = Icp.get_param('mail.default.from')
|
||||
return self.create({
|
||||
'bounce_alias': bounce_alias or 'bounce',
|
||||
'catchall_alias': catchall_alias or 'catchall',
|
||||
'default_from': default_from or 'notifications',
|
||||
'name': alias_domain,
|
||||
})
|
||||
return self.browse()
|
||||
|
|
@ -3,93 +3,34 @@
|
|||
|
||||
import logging
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo import fields, models
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AliasMixin(models.AbstractModel):
|
||||
""" A mixin for models that inherits mail.alias. This mixin initializes the
|
||||
alias_id column in database, and manages the expected one-to-one
|
||||
relation between your model and mail aliases.
|
||||
"""
|
||||
class MailAliasMixin(models.AbstractModel):
|
||||
""" A mixin for models that inherits mail.alias to have a one-to-one relation
|
||||
between the model and its alias. """
|
||||
_name = 'mail.alias.mixin'
|
||||
_inherit = ['mail.alias.mixin.optional']
|
||||
_inherits = {'mail.alias': 'alias_id'}
|
||||
_description = 'Email Aliases Mixin'
|
||||
ALIAS_WRITEABLE_FIELDS = ['alias_name', 'alias_contact', 'alias_defaults', 'alias_bounced_content']
|
||||
|
||||
alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=True)
|
||||
alias_id = fields.Many2one(required=True)
|
||||
alias_name = fields.Char(inherited=True)
|
||||
alias_defaults = fields.Text(inherited=True)
|
||||
|
||||
# --------------------------------------------------
|
||||
# CRUD
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Create a record with each ``vals`` or ``vals_list`` and create a corresponding alias. """
|
||||
# prepare all alias values
|
||||
alias_vals_list, record_vals_list = [], []
|
||||
for vals in vals_list:
|
||||
new_alias = not vals.get('alias_id')
|
||||
if new_alias:
|
||||
alias_vals, record_vals = self._alias_filter_fields(vals)
|
||||
alias_vals.update(self._alias_get_creation_values())
|
||||
alias_vals_list.append(alias_vals)
|
||||
record_vals_list.append(record_vals)
|
||||
|
||||
# create all aliases
|
||||
alias_ids = []
|
||||
if alias_vals_list:
|
||||
alias_ids = iter(self.env['mail.alias'].sudo().create(alias_vals_list).ids)
|
||||
|
||||
# update alias values in create vals directly
|
||||
valid_vals_list = []
|
||||
record_vals_iter = iter(record_vals_list)
|
||||
for vals in vals_list:
|
||||
new_alias = not vals.get('alias_id')
|
||||
if new_alias:
|
||||
record_vals = next(record_vals_iter)
|
||||
record_vals['alias_id'] = next(alias_ids)
|
||||
valid_vals_list.append(record_vals)
|
||||
else:
|
||||
valid_vals_list.append(vals)
|
||||
|
||||
records = super(AliasMixin, self).create(valid_vals_list)
|
||||
|
||||
for record in records:
|
||||
record.alias_id.sudo().write(record._alias_get_creation_values())
|
||||
|
||||
return records
|
||||
|
||||
def write(self, vals):
|
||||
""" Split writable fields of mail.alias and other fields alias fields will
|
||||
write with sudo and the other normally """
|
||||
alias_vals, record_vals = self._alias_filter_fields(vals, filters=self.ALIAS_WRITEABLE_FIELDS)
|
||||
if record_vals:
|
||||
super(AliasMixin, self).write(record_vals)
|
||||
if alias_vals and (record_vals or self.check_access_rights('write', raise_exception=False)):
|
||||
self.mapped('alias_id').sudo().write(alias_vals)
|
||||
|
||||
return True
|
||||
|
||||
def unlink(self):
|
||||
""" Delete the given records, and cascade-delete their corresponding alias. """
|
||||
aliases = self.mapped('alias_id')
|
||||
res = super(AliasMixin, self).unlink()
|
||||
aliases.sudo().unlink()
|
||||
return res
|
||||
|
||||
@api.returns(None, lambda value: value[0])
|
||||
def copy_data(self, default=None):
|
||||
data = super(AliasMixin, self).copy_data(default)[0]
|
||||
for fields_not_writable in set(self.env['mail.alias']._fields.keys()) - set(self.ALIAS_WRITEABLE_FIELDS):
|
||||
if fields_not_writable in data:
|
||||
del data[fields_not_writable]
|
||||
return [data]
|
||||
def _require_new_alias(self, record_vals):
|
||||
""" alias_id field is always required, due to inherits """
|
||||
return not record_vals.get('alias_id')
|
||||
|
||||
def _init_column(self, name):
|
||||
""" Create aliases for existing rows. """
|
||||
super(AliasMixin, self)._init_column(name)
|
||||
super()._init_column(name)
|
||||
if name == 'alias_id':
|
||||
# as 'mail.alias' records refer to 'ir.model' records, create
|
||||
# aliases after the reflection of models
|
||||
|
|
@ -104,34 +45,10 @@ class AliasMixin(models.AbstractModel):
|
|||
child_model = self.sudo().with_context(child_ctx)
|
||||
|
||||
for record in child_model.search([('alias_id', '=', False)]):
|
||||
# create the alias, and link it to the current record
|
||||
alias = self.env['mail.alias'].sudo().create(record._alias_get_creation_values())
|
||||
# create the alias associated with its company if one exists,
|
||||
# and link it to the current record
|
||||
record_company = record._mail_get_companies()[record.id]
|
||||
alias = self.env['mail.alias'].sudo().with_company(record_company).create(record._alias_get_creation_values())
|
||||
record.with_context(mail_notrack=True).alias_id = alias
|
||||
_logger.info('Mail alias created for %s %s (id %s)',
|
||||
record._name, record.display_name, record.id)
|
||||
|
||||
# --------------------------------------------------
|
||||
# MIXIN TOOL OVERRIDE METHODS
|
||||
# --------------------------------------------------
|
||||
|
||||
def _alias_get_creation_values(self):
|
||||
""" Return values to create an alias, or to write on the alias after its
|
||||
creation.
|
||||
"""
|
||||
return {
|
||||
'alias_parent_thread_id': self.id if self.id else False,
|
||||
'alias_parent_model_id': self.env['ir.model']._get(self._name).id,
|
||||
}
|
||||
|
||||
def _alias_filter_fields(self, values, filters=False):
|
||||
""" Split the vals dict into two dictionnary of vals, one for alias
|
||||
field and the other for other fields """
|
||||
if not filters:
|
||||
filters = self.env['mail.alias']._fields.keys()
|
||||
alias_values, record_values = {}, {}
|
||||
for fname in values.keys():
|
||||
if fname in filters:
|
||||
alias_values[fname] = values.get(fname)
|
||||
else:
|
||||
record_values[fname] = values.get(fname)
|
||||
return alias_values, record_values
|
||||
|
|
|
|||
|
|
@ -0,0 +1,204 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MailAliasMixinOptional(models.AbstractModel):
|
||||
""" A mixin for models that handles underlying 'mail.alias' records to use
|
||||
the mail gateway. Field is not mandatory and its creation is done dynamically
|
||||
based on given 'alias_name', allowing to gradually populate the alias table
|
||||
without having void aliases as when used with an inherits-like implementation.
|
||||
"""
|
||||
_name = 'mail.alias.mixin.optional'
|
||||
_description = 'Email Aliases Mixin (light)'
|
||||
ALIAS_WRITEABLE_FIELDS = ['alias_domain_id', 'alias_name', 'alias_contact', 'alias_defaults', 'alias_bounced_content']
|
||||
|
||||
alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=False, copy=False)
|
||||
alias_name = fields.Char(related='alias_id.alias_name', readonly=False)
|
||||
alias_domain_id = fields.Many2one(
|
||||
'mail.alias.domain', string='Alias Domain',
|
||||
related='alias_id.alias_domain_id', readonly=False)
|
||||
alias_domain = fields.Char('Alias Domain Name', related='alias_id.alias_domain')
|
||||
alias_defaults = fields.Text(related='alias_id.alias_defaults')
|
||||
alias_email = fields.Char('Email Alias', compute='_compute_alias_email', search='_search_alias_email')
|
||||
|
||||
@api.depends('alias_domain', 'alias_name')
|
||||
def _compute_alias_email(self):
|
||||
""" Alias email can be used in views, as it is Falsy when having no domain
|
||||
or no name. Alias display name itself contains more info and cannot be
|
||||
used as it is in views. """
|
||||
self.alias_email = False
|
||||
for record in self.filtered(lambda rec: rec.alias_name and rec.alias_domain):
|
||||
record.alias_email = f"{record.alias_name}@{record.alias_domain}"
|
||||
|
||||
def _search_alias_email(self, operator, operand):
|
||||
return [('alias_id.alias_full_name', operator, operand)]
|
||||
|
||||
# --------------------------------------------------
|
||||
# CRUD
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
""" Create aliases using sudo if an alias is required, notably if its
|
||||
name is given. """
|
||||
# prefetch company information, used for alias domain
|
||||
company_fname = self._mail_get_company_field()
|
||||
if company_fname:
|
||||
company_id_default = self.default_get([company_fname]).get(company_fname) or self.env.company.id
|
||||
company_prefetch_ids = {vals[company_fname] for vals in vals_list if vals.get(company_fname)}
|
||||
company_prefetch_ids.add(company_id_default)
|
||||
else:
|
||||
company_id_default = self.env.company.id
|
||||
company_prefetch_ids = {company_id_default}
|
||||
|
||||
# prepare all alias values
|
||||
alias_vals_list, record_vals_list = [], []
|
||||
for vals in vals_list:
|
||||
if vals.get('alias_name'):
|
||||
vals['alias_name'] = self.env['mail.alias']._sanitize_alias_name(vals['alias_name'])
|
||||
if self._require_new_alias(vals):
|
||||
company_id = vals.get(company_fname) or company_id_default
|
||||
company = self.env['res.company'].with_prefetch(company_prefetch_ids).browse(company_id)
|
||||
alias_vals, record_vals = self._alias_filter_fields(vals)
|
||||
# generate record-agnostic base alias values
|
||||
alias_vals.update(self.env[self._name].with_context(
|
||||
default_alias_domain_id=alias_vals.get('alias_domain_id', company.alias_domain_id.id),
|
||||
)._alias_get_creation_values())
|
||||
alias_vals_list.append(alias_vals)
|
||||
record_vals_list.append(record_vals)
|
||||
|
||||
# create all aliases
|
||||
alias_ids = []
|
||||
if alias_vals_list:
|
||||
alias_ids = iter(self.env['mail.alias'].sudo().create(alias_vals_list).ids)
|
||||
|
||||
# update alias values in create vals directly
|
||||
valid_vals_list = []
|
||||
record_vals_iter = iter(record_vals_list)
|
||||
for vals in vals_list:
|
||||
if self._require_new_alias(vals):
|
||||
record_vals = next(record_vals_iter)
|
||||
record_vals['alias_id'] = next(alias_ids)
|
||||
valid_vals_list.append(record_vals)
|
||||
else:
|
||||
valid_vals_list.append(vals)
|
||||
|
||||
records = super().create(valid_vals_list)
|
||||
|
||||
# update alias values with values coming from record, post-create to have
|
||||
# access to all its values (notably its ID)
|
||||
records_walias = records.filtered('alias_id')
|
||||
for record in records_walias:
|
||||
alias_values = record._alias_get_creation_values()
|
||||
record.alias_id.sudo().write(alias_values)
|
||||
|
||||
return records
|
||||
|
||||
def write(self, vals):
|
||||
""" Split writable fields of mail.alias and other fields alias fields will
|
||||
write with sudo and the other normally. Also handle alias_domain_id
|
||||
update. If alias does not exist and we try to set a name, create the
|
||||
alias automatically. """
|
||||
# create missing aliases
|
||||
if vals.get('alias_name'):
|
||||
alias_create_values = [
|
||||
dict(
|
||||
record._alias_get_creation_values(),
|
||||
alias_name=self.env['mail.alias']._sanitize_alias_name(vals['alias_name']),
|
||||
)
|
||||
for record in self.filtered(lambda rec: not rec.alias_id)
|
||||
]
|
||||
if alias_create_values:
|
||||
aliases = self.env['mail.alias'].sudo().create(alias_create_values)
|
||||
for record, alias in zip(self.filtered(lambda rec: not rec.alias_id), aliases):
|
||||
record.alias_id = alias.id
|
||||
|
||||
alias_vals, record_vals = self._alias_filter_fields(vals, filters=self.ALIAS_WRITEABLE_FIELDS)
|
||||
if record_vals:
|
||||
super().write(record_vals)
|
||||
|
||||
# synchronize alias domain if company environment changed
|
||||
company_fname = self._mail_get_company_field()
|
||||
if company_fname in vals:
|
||||
alias_domain_values = self.filtered('alias_id')._alias_get_alias_domain_id()
|
||||
for record, alias_domain_id in alias_domain_values.items():
|
||||
record.sudo().alias_domain_id = alias_domain_id.id
|
||||
|
||||
if alias_vals and (record_vals or self.browse().has_access('write')):
|
||||
self.mapped('alias_id').sudo().write(alias_vals)
|
||||
|
||||
return True
|
||||
|
||||
def unlink(self):
|
||||
""" Delete the given records, and cascade-delete their corresponding alias. """
|
||||
aliases = self.mapped('alias_id')
|
||||
res = super().unlink()
|
||||
aliases.sudo().unlink()
|
||||
return res
|
||||
|
||||
def copy_data(self, default=None):
|
||||
vals_list = super().copy_data(default=default)
|
||||
not_writable_fields = set(self.env['mail.alias']._fields.keys()) - set(self.ALIAS_WRITEABLE_FIELDS)
|
||||
for vals in vals_list:
|
||||
for not_writable_field in not_writable_fields:
|
||||
if not_writable_field in vals:
|
||||
del vals[not_writable_field]
|
||||
return vals_list
|
||||
|
||||
@api.model
|
||||
def _require_new_alias(self, record_vals):
|
||||
""" Create only if no existing alias, and if a name is given, to avoid
|
||||
creating inactive aliases (falsy name). """
|
||||
return not record_vals.get('alias_id') and record_vals.get('alias_name')
|
||||
|
||||
# --------------------------------------------------
|
||||
# MIXIN TOOL OVERRIDE METHODS
|
||||
# --------------------------------------------------
|
||||
|
||||
def _alias_get_alias_domain_id(self):
|
||||
""" Return alias domain value to synchronize with owner's company.
|
||||
Implementing it with a compute is complicated, as its 'alias_domain_id'
|
||||
is a field on 'mail.alias' model, coming from 'alias_id' field and due
|
||||
to current implementation of the mixin, notably the create / write
|
||||
overrides, compute is not called in all cases. We therefore use a tool
|
||||
method to call in the mixin. """
|
||||
alias_domain_values = {}
|
||||
record_companies = self._mail_get_companies()
|
||||
for record in self:
|
||||
record_company = record_companies[record.id]
|
||||
alias_domain_values[record] = (
|
||||
record_company.alias_domain_id
|
||||
or record.alias_domain_id or self.env.company.alias_domain_id
|
||||
)
|
||||
return alias_domain_values
|
||||
|
||||
def _alias_get_creation_values(self):
|
||||
""" Return values to create an alias, or to write on the alias after its
|
||||
creation.
|
||||
"""
|
||||
values = {
|
||||
'alias_parent_thread_id': self.id if self.id else False,
|
||||
'alias_parent_model_id': self.env['ir.model']._get_id(self._name),
|
||||
}
|
||||
if 'default_alias_domain_id' in self.env.context:
|
||||
values['alias_domain_id'] = self.env.context['default_alias_domain_id']
|
||||
return values
|
||||
|
||||
def _alias_filter_fields(self, values, filters=False):
|
||||
""" Split the vals dict into two dictionnary of vals, one for alias
|
||||
field and the other for other fields """
|
||||
if not filters:
|
||||
filters = self.env['mail.alias']._fields.keys()
|
||||
alias_values, record_values = {}, {}
|
||||
for fname in values.keys():
|
||||
if fname in filters:
|
||||
alias_values[fname] = values.get(fname)
|
||||
else:
|
||||
record_values[fname] = values.get(fname)
|
||||
return alias_values, record_values
|
||||
|
|
@ -1,11 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.fields import Domain
|
||||
|
||||
|
||||
class MailBlackList(models.Model):
|
||||
class MailBlacklist(models.Model):
|
||||
""" Model of blacklisted email addresses to stop sending emails."""
|
||||
_name = 'mail.blacklist'
|
||||
_inherit = ['mail.thread']
|
||||
|
|
@ -13,22 +13,23 @@ class MailBlackList(models.Model):
|
|||
_rec_name = 'email'
|
||||
|
||||
email = fields.Char(string='Email Address', required=True, index='trigram', help='This field is case insensitive.',
|
||||
tracking=True)
|
||||
active = fields.Boolean(default=True, tracking=True)
|
||||
tracking=1)
|
||||
active = fields.Boolean(default=True, tracking=2)
|
||||
|
||||
_sql_constraints = [
|
||||
('unique_email', 'unique (email)', 'Email address already exists!')
|
||||
]
|
||||
_unique_email = models.Constraint(
|
||||
'unique (email)',
|
||||
'Email address already exists!',
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, values):
|
||||
def create(self, vals_list):
|
||||
# First of all, extract values to ensure emails are really unique (and don't modify values in place)
|
||||
new_values = []
|
||||
all_emails = []
|
||||
for value in values:
|
||||
for value in vals_list:
|
||||
email = tools.email_normalize(value.get('email'))
|
||||
if not email:
|
||||
raise UserError(_('Invalid email address %r', value['email']))
|
||||
raise UserError(_('Invalid email address “%s”', value['email']))
|
||||
if email in all_emails:
|
||||
continue
|
||||
all_emails.append(email)
|
||||
|
|
@ -36,71 +37,76 @@ class MailBlackList(models.Model):
|
|||
new_values.append(new_value)
|
||||
|
||||
""" To avoid crash during import due to unique email, return the existing records if any """
|
||||
sql = '''SELECT email, id FROM mail_blacklist WHERE email = ANY(%s)'''
|
||||
emails = [v['email'] for v in new_values]
|
||||
self._cr.execute(sql, (emails,))
|
||||
bl_entries = dict(self._cr.fetchall())
|
||||
to_create = [v for v in new_values if v['email'] not in bl_entries]
|
||||
to_create = []
|
||||
bl_entries = {}
|
||||
if new_values:
|
||||
sql = '''SELECT email, id FROM mail_blacklist WHERE email = ANY(%s)'''
|
||||
emails = [v['email'] for v in new_values]
|
||||
self.env.cr.execute(sql, (emails,))
|
||||
bl_entries = dict(self.env.cr.fetchall())
|
||||
to_create = [v for v in new_values if v['email'] not in bl_entries]
|
||||
|
||||
# TODO DBE Fixme : reorder ids according to incoming ids.
|
||||
results = super(MailBlackList, self).create(to_create)
|
||||
results = super().create(to_create)
|
||||
return self.env['mail.blacklist'].browse(bl_entries.values()) | results
|
||||
|
||||
def write(self, values):
|
||||
if 'email' in values:
|
||||
values['email'] = tools.email_normalize(values['email'])
|
||||
return super(MailBlackList, self).write(values)
|
||||
def write(self, vals):
|
||||
if 'email' in vals:
|
||||
vals['email'] = tools.email_normalize(vals['email'])
|
||||
return super().write(vals)
|
||||
|
||||
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
|
||||
def _search(self, domain, *args, **kwargs):
|
||||
""" Override _search in order to grep search on email field and make it
|
||||
lower-case and sanitized """
|
||||
if args:
|
||||
new_args = []
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple)) and arg[0] == 'email' and isinstance(arg[2], str):
|
||||
normalized = tools.email_normalize(arg[2])
|
||||
if normalized:
|
||||
new_args.append([arg[0], arg[1], normalized])
|
||||
else:
|
||||
new_args.append(arg)
|
||||
else:
|
||||
new_args.append(arg)
|
||||
else:
|
||||
new_args = args
|
||||
return super(MailBlackList, self)._search(new_args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
|
||||
domain = Domain(domain).map_conditions(
|
||||
lambda cond: Domain(cond.field_expr, cond.operator, norm_value)
|
||||
if cond.field_expr == 'email'
|
||||
and isinstance(cond.value, str)
|
||||
and (norm_value := tools.email_normalize(cond.value))
|
||||
else cond
|
||||
)
|
||||
return super()._search(domain, *args, **kwargs)
|
||||
|
||||
def _add(self, email):
|
||||
def _add(self, email, message=None):
|
||||
normalized = tools.email_normalize(email)
|
||||
record = self.env["mail.blacklist"].with_context(active_test=False).search([('email', '=', normalized)])
|
||||
if len(record) > 0:
|
||||
if message:
|
||||
record._track_set_log_message(message)
|
||||
record.action_unarchive()
|
||||
else:
|
||||
record = self.create({'email': email})
|
||||
if message:
|
||||
record.with_context(mail_post_autofollow_author_skip=True).message_post(
|
||||
body=message,
|
||||
subtype_xmlid='mail.mt_note',
|
||||
)
|
||||
return record
|
||||
|
||||
def action_remove_with_reason(self, email, reason=None):
|
||||
record = self._remove(email)
|
||||
if reason:
|
||||
record.message_post(body=_("Unblacklisting Reason: %s", reason))
|
||||
|
||||
return record
|
||||
|
||||
def _remove(self, email):
|
||||
def _remove(self, email, message=None):
|
||||
normalized = tools.email_normalize(email)
|
||||
record = self.env["mail.blacklist"].with_context(active_test=False).search([('email', '=', normalized)])
|
||||
if len(record) > 0:
|
||||
if message:
|
||||
record._track_set_log_message(message)
|
||||
record.action_archive()
|
||||
else:
|
||||
record = record.create({'email': email, 'active': False})
|
||||
if message:
|
||||
record.with_context(mail_post_autofollow_author_skip=True).message_post(
|
||||
body=message,
|
||||
subtype_xmlid='mail.mt_note',
|
||||
)
|
||||
return record
|
||||
|
||||
def mail_action_blacklist_remove(self):
|
||||
return {
|
||||
'name': _('Are you sure you want to unblacklist this Email Address?'),
|
||||
'name': _('Are you sure you want to unblacklist this email address?'),
|
||||
'type': 'ir.actions.act_window',
|
||||
'view_mode': 'form',
|
||||
'res_model': 'mail.blacklist.remove',
|
||||
'target': 'new',
|
||||
'context': {'dialog_size': 'medium'},
|
||||
}
|
||||
|
||||
def action_add(self):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,85 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models, api
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailCannedResponse(models.Model):
|
||||
""" Canned Response: content that automatically replaces shortcuts of your
|
||||
choosing. This content can still be adapted before sending your message. """
|
||||
_name = 'mail.canned.response'
|
||||
_description = "Canned Response"
|
||||
_order = "id desc"
|
||||
_rec_name = "source"
|
||||
|
||||
source = fields.Char(
|
||||
"Shortcut", required=True, index="trigram",
|
||||
help="Canned response that will automatically be substituted with longer content in your messages."
|
||||
" Type '::' followed by the name of your shortcut (e.g. ::hello) to use in your messages.",
|
||||
)
|
||||
substitution = fields.Text(
|
||||
"Substitution",
|
||||
required=True,
|
||||
help="Content that will automatically replace the shortcut of your choosing. This content can still be adapted before sending your message.",
|
||||
)
|
||||
last_used = fields.Datetime("Last Used", help="Last time this canned_response was used")
|
||||
group_ids = fields.Many2many(
|
||||
"res.groups",
|
||||
string="Authorized Groups",
|
||||
domain=lambda self: [("id", "in", self.env.user.all_group_ids.ids)],
|
||||
)
|
||||
is_shared = fields.Boolean(
|
||||
string="Determines if the canned_response is currently shared with other users",
|
||||
compute="_compute_is_shared",
|
||||
store=True,
|
||||
)
|
||||
is_editable = fields.Boolean(
|
||||
string="Determines if the canned response can be edited by the current user",
|
||||
compute="_compute_is_editable"
|
||||
)
|
||||
|
||||
@api.depends("group_ids")
|
||||
def _compute_is_shared(self):
|
||||
for canned_response in self:
|
||||
canned_response.is_shared = bool(canned_response.group_ids)
|
||||
|
||||
@api.depends_context('uid')
|
||||
@api.depends("create_uid")
|
||||
def _compute_is_editable(self):
|
||||
creating = self.filtered(lambda c: not c.id)
|
||||
updating = self - creating
|
||||
editable = creating._filtered_access("create") + updating._filtered_access("write")
|
||||
editable.is_editable = True
|
||||
(self - editable).is_editable = False
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super().create(vals_list)
|
||||
res._broadcast()
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
res = super().write(vals)
|
||||
self._broadcast()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
self._broadcast(delete=True)
|
||||
return super().unlink()
|
||||
|
||||
def _broadcast(self, /, *, delete=False):
|
||||
for canned_response in self:
|
||||
stores = [Store(bus_channel=group) for group in canned_response.group_ids]
|
||||
for user in self.env.user | canned_response.create_uid:
|
||||
if not user.all_group_ids & canned_response.group_ids:
|
||||
stores.append(Store(bus_channel=user))
|
||||
for store in stores:
|
||||
if delete:
|
||||
store.delete(canned_response)
|
||||
else:
|
||||
store.add(canned_response)
|
||||
for store in stores:
|
||||
store.bus_send()
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return ["source", "substitution"]
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,242 +0,0 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import AccessError
|
||||
from odoo.osv import expression
|
||||
|
||||
|
||||
class ChannelMember(models.Model):
|
||||
_name = 'mail.channel.member'
|
||||
_description = 'Listeners of a Channel'
|
||||
_table = 'mail_channel_member'
|
||||
_rec_names_search = ['partner_id', 'guest_id']
|
||||
_bypass_create_check = {}
|
||||
|
||||
# identity
|
||||
partner_id = fields.Many2one('res.partner', string='Recipient', ondelete='cascade', index=True)
|
||||
guest_id = fields.Many2one(string="Guest", comodel_name='mail.guest', ondelete='cascade', readonly=True, index=True)
|
||||
partner_email = fields.Char('Email', related='partner_id.email', related_sudo=False)
|
||||
# channel
|
||||
channel_id = fields.Many2one('mail.channel', string='Channel', ondelete='cascade', readonly=True, required=True)
|
||||
# state
|
||||
custom_channel_name = fields.Char('Custom channel name')
|
||||
fetched_message_id = fields.Many2one('mail.message', string='Last Fetched', index='btree_not_null')
|
||||
seen_message_id = fields.Many2one('mail.message', string='Last Seen', index='btree_not_null')
|
||||
message_unread_counter = fields.Integer('Unread Messages Counter', compute='_compute_message_unread', compute_sudo=True)
|
||||
fold_state = fields.Selection([('open', 'Open'), ('folded', 'Folded'), ('closed', 'Closed')], string='Conversation Fold State', default='open')
|
||||
is_minimized = fields.Boolean("Conversation is minimized")
|
||||
is_pinned = fields.Boolean("Is pinned on the interface", default=True)
|
||||
last_interest_dt = fields.Datetime("Last Interest", default=fields.Datetime.now, help="Contains the date and time of the last interesting event that happened in this channel for this partner. This includes: creating, joining, pinning, and new message posted.")
|
||||
last_seen_dt = fields.Datetime("Last seen date")
|
||||
# RTC
|
||||
rtc_session_ids = fields.One2many(string="RTC Sessions", comodel_name='mail.channel.rtc.session', inverse_name='channel_member_id')
|
||||
rtc_inviting_session_id = fields.Many2one('mail.channel.rtc.session', string='Ringing session')
|
||||
|
||||
@api.depends('channel_id.message_ids', 'seen_message_id')
|
||||
def _compute_message_unread(self):
|
||||
if self.ids:
|
||||
self.env['mail.message'].flush_model()
|
||||
self.flush_recordset(['channel_id', 'seen_message_id'])
|
||||
self.env.cr.execute("""
|
||||
SELECT count(mail_message.id) AS count,
|
||||
mail_channel_member.id
|
||||
FROM mail_message
|
||||
INNER JOIN mail_channel_member
|
||||
ON mail_channel_member.channel_id = mail_message.res_id
|
||||
WHERE mail_message.model = 'mail.channel'
|
||||
AND mail_message.message_type NOT IN ('notification', 'user_notification')
|
||||
AND (
|
||||
mail_message.id > mail_channel_member.seen_message_id
|
||||
OR mail_channel_member.seen_message_id IS NULL
|
||||
)
|
||||
AND mail_channel_member.id IN %(ids)s
|
||||
GROUP BY mail_channel_member.id
|
||||
""", {'ids': tuple(self.ids)})
|
||||
unread_counter_by_member = {res['id']: res['count'] for res in self.env.cr.dictfetchall()}
|
||||
for member in self:
|
||||
member.message_unread_counter = unread_counter_by_member.get(member.id)
|
||||
else:
|
||||
self.message_unread_counter = 0
|
||||
|
||||
def name_get(self):
|
||||
return [(record.id, record.partner_id.name or record.guest_id.name) for record in self]
|
||||
|
||||
def init(self):
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_channel_member_partner_unique ON %s (channel_id, partner_id) WHERE partner_id IS NOT NULL" % self._table)
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_channel_member_guest_unique ON %s (channel_id, guest_id) WHERE guest_id IS NOT NULL" % self._table)
|
||||
|
||||
_sql_constraints = [
|
||||
("partner_or_guest_exists", "CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))", "A channel member must be a partner or a guest."),
|
||||
]
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
"""Similar access rule as the access rule of the mail channel.
|
||||
|
||||
It can not be implemented in XML, because when the record will be created, the
|
||||
partner will be added in the channel and the security rule will always authorize
|
||||
the creation.
|
||||
"""
|
||||
if not self.env.is_admin() and not self.env.context.get('mail_create_bypass_create_check') is self._bypass_create_check:
|
||||
for vals in vals_list:
|
||||
if 'channel_id' in vals:
|
||||
channel_id = self.env['mail.channel'].browse(vals['channel_id'])
|
||||
if not channel_id._can_invite(vals.get('partner_id')):
|
||||
raise AccessError(_('This user can not be added in this channel'))
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
for channel_member in self:
|
||||
for field_name in {'channel_id', 'partner_id', 'guest_id'}:
|
||||
if field_name in vals and vals[field_name] != channel_member[field_name].id:
|
||||
raise AccessError(_('You can not write on %(field_name)s.', field_name=field_name))
|
||||
return super().write(vals)
|
||||
|
||||
def unlink(self):
|
||||
self.sudo().rtc_session_ids.unlink()
|
||||
return super().unlink()
|
||||
|
||||
@api.model
|
||||
def _get_as_sudo_from_request_or_raise(self, request, channel_id):
|
||||
channel_member = self._get_as_sudo_from_request(request=request, channel_id=channel_id)
|
||||
if not channel_member:
|
||||
raise NotFound()
|
||||
return channel_member
|
||||
|
||||
@api.model
|
||||
def _get_as_sudo_from_request(self, request, channel_id):
|
||||
""" Seeks a channel member matching the provided `channel_id` and the
|
||||
current user or guest.
|
||||
|
||||
:param channel_id: The id of the channel of which the user/guest is
|
||||
expected to be member.
|
||||
:type channel_id: int
|
||||
:return: A record set containing the channel member if found, or an
|
||||
empty record set otherwise. In case of guest, the record is returned
|
||||
with the 'guest' record in the context.
|
||||
:rtype: mail.channel.member
|
||||
"""
|
||||
if request.session.uid:
|
||||
return self.env['mail.channel.member'].sudo().search([('channel_id', '=', channel_id), ('partner_id', '=', self.env.user.partner_id.id)], limit=1)
|
||||
guest = self.env['mail.guest']._get_guest_from_request(request)
|
||||
if guest:
|
||||
return guest.env['mail.channel.member'].sudo().search([('channel_id', '=', channel_id), ('guest_id', '=', guest.id)], limit=1)
|
||||
return self.env['mail.channel.member'].sudo()
|
||||
|
||||
def _notify_typing(self, is_typing):
|
||||
""" Broadcast the typing notification to channel members
|
||||
:param is_typing: (boolean) tells whether the members are typing or not
|
||||
"""
|
||||
notifications = []
|
||||
for member in self:
|
||||
formatted_member = member._mail_channel_member_format().get(member)
|
||||
formatted_member['isTyping'] = is_typing
|
||||
notifications.append([member.channel_id, 'mail.channel.member/typing_status', formatted_member])
|
||||
notifications.append([member.channel_id.uuid, 'mail.channel.member/typing_status', formatted_member]) # notify livechat users
|
||||
self.env['bus.bus']._sendmany(notifications)
|
||||
|
||||
def _mail_channel_member_format(self, fields=None):
|
||||
if not fields:
|
||||
fields = {'id': True, 'channel': {}, 'persona': {}}
|
||||
members_formatted_data = {}
|
||||
for member in self:
|
||||
data = {}
|
||||
if 'id' in fields:
|
||||
data['id'] = member.id
|
||||
if 'channel' in fields:
|
||||
data['channel'] = member.channel_id._channel_format(fields=fields.get('channel')).get(member.channel_id)
|
||||
if 'persona' in fields:
|
||||
if member.partner_id:
|
||||
persona = {'partner': member._get_partner_data(fields=fields.get('persona', {}).get('partner'))}
|
||||
if member.guest_id:
|
||||
persona = {'guest': member.guest_id.sudo()._guest_format(fields=fields.get('persona', {}).get('guest')).get(member.guest_id)}
|
||||
data['persona'] = persona
|
||||
members_formatted_data[member] = data
|
||||
return members_formatted_data
|
||||
|
||||
def _get_partner_data(self, fields=None):
|
||||
self.ensure_one()
|
||||
return self.partner_id.mail_partner_format(fields=fields).get(self.partner_id)
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# RTC (voice/video)
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
def _rtc_join_call(self, check_rtc_session_ids=None):
|
||||
self.ensure_one()
|
||||
check_rtc_session_ids = (check_rtc_session_ids or []) + self.rtc_session_ids.ids
|
||||
self.channel_id._rtc_cancel_invitations(member_ids=self.ids)
|
||||
self.rtc_session_ids.unlink()
|
||||
rtc_session = self.env['mail.channel.rtc.session'].create({'channel_member_id': self.id})
|
||||
current_rtc_sessions, outdated_rtc_sessions = self._rtc_sync_sessions(check_rtc_session_ids=check_rtc_session_ids)
|
||||
res = {
|
||||
'iceServers': self.env['mail.ice.server']._get_ice_servers() or False,
|
||||
'rtcSessions': [
|
||||
('insert', [rtc_session_sudo._mail_rtc_session_format() for rtc_session_sudo in current_rtc_sessions]),
|
||||
('insert-and-unlink', [{'id': missing_rtc_session_sudo.id} for missing_rtc_session_sudo in outdated_rtc_sessions]),
|
||||
],
|
||||
'sessionId': rtc_session.id,
|
||||
}
|
||||
if len(self.channel_id.rtc_session_ids) == 1 and self.channel_id.channel_type in {'chat', 'group'}:
|
||||
self.channel_id.message_post(body=_("%s started a live conference", self.partner_id.name or self.guest_id.name), message_type='notification')
|
||||
invited_members = self._rtc_invite_members()
|
||||
if invited_members:
|
||||
res['invitedMembers'] = [('insert', list(invited_members._mail_channel_member_format(fields={'id': True, 'channel': {}, 'persona': {'partner': {'id', 'name', 'im_status'}, 'guest': {'id', 'name', 'im_status'}}}).values()))]
|
||||
return res
|
||||
|
||||
def _rtc_leave_call(self):
|
||||
self.ensure_one()
|
||||
if self.rtc_session_ids:
|
||||
self.rtc_session_ids.unlink()
|
||||
else:
|
||||
return self.channel_id._rtc_cancel_invitations(member_ids=self.ids)
|
||||
|
||||
def _rtc_sync_sessions(self, check_rtc_session_ids=None):
|
||||
"""Synchronize the RTC sessions for self channel member.
|
||||
- Inactive sessions of the channel are deleted.
|
||||
- Current sessions are returned.
|
||||
- Sessions given in check_rtc_session_ids that no longer exists
|
||||
are returned as non-existing.
|
||||
:param list check_rtc_session_ids: list of the ids of the sessions to check
|
||||
:returns tuple: (current_rtc_sessions, outdated_rtc_sessions)
|
||||
"""
|
||||
self.ensure_one()
|
||||
self.channel_id.rtc_session_ids._delete_inactive_rtc_sessions()
|
||||
check_rtc_sessions = self.env['mail.channel.rtc.session'].browse([int(check_rtc_session_id) for check_rtc_session_id in (check_rtc_session_ids or [])])
|
||||
return self.channel_id.rtc_session_ids, check_rtc_sessions - self.channel_id.rtc_session_ids
|
||||
|
||||
def _rtc_invite_members(self, member_ids=None):
|
||||
""" Sends invitations to join the RTC call to all connected members of the thread who are not already invited,
|
||||
if member_ids is set, only the specified ids will be invited.
|
||||
|
||||
:param list member_ids: list of the partner ids to invite
|
||||
"""
|
||||
self.ensure_one()
|
||||
channel_member_domain = [
|
||||
('channel_id', '=', self.channel_id.id),
|
||||
('rtc_inviting_session_id', '=', False),
|
||||
('rtc_session_ids', '=', False),
|
||||
]
|
||||
if member_ids:
|
||||
channel_member_domain = expression.AND([channel_member_domain, [('id', 'in', member_ids)]])
|
||||
invitation_notifications = []
|
||||
members = self.env['mail.channel.member'].search(channel_member_domain)
|
||||
for member in members:
|
||||
member.rtc_inviting_session_id = self.rtc_session_ids.id
|
||||
if member.partner_id:
|
||||
target = member.partner_id
|
||||
else:
|
||||
target = member.guest_id
|
||||
invitation_notifications.append((target, 'mail.thread/insert', {
|
||||
'id': self.channel_id.id,
|
||||
'model': 'mail.channel',
|
||||
'rtcInvitingSession': self.rtc_session_ids._mail_rtc_session_format(),
|
||||
}))
|
||||
self.env['bus.bus']._sendmany(invitation_notifications)
|
||||
if members:
|
||||
channel_data = {'id': self.channel_id.id, 'model': 'mail.channel'}
|
||||
channel_data['invitedMembers'] = [('insert', list(members._mail_channel_member_format(fields={'id': True, 'channel': {}, 'persona': {'partner': {'id', 'name', 'im_status'}, 'guest': {'id', 'name', 'im_status'}}}).values()))]
|
||||
self.env['bus.bus']._sendone(self.channel_id, 'mail.thread/insert', channel_data)
|
||||
return members
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from collections import defaultdict
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class MailRtcSession(models.Model):
|
||||
_name = 'mail.channel.rtc.session'
|
||||
_description = 'Mail RTC session'
|
||||
|
||||
channel_member_id = fields.Many2one('mail.channel.member', required=True, ondelete='cascade')
|
||||
channel_id = fields.Many2one('mail.channel', related='channel_member_id.channel_id', store=True, readonly=True)
|
||||
partner_id = fields.Many2one('res.partner', related='channel_member_id.partner_id', string="Partner")
|
||||
guest_id = fields.Many2one('mail.guest', related='channel_member_id.guest_id')
|
||||
|
||||
write_date = fields.Datetime("Last Updated On", index=True)
|
||||
|
||||
is_screen_sharing_on = fields.Boolean(string="Is sharing the screen")
|
||||
is_camera_on = fields.Boolean(string="Is sending user video")
|
||||
is_muted = fields.Boolean(string="Is microphone muted")
|
||||
is_deaf = fields.Boolean(string="Has disabled incoming sound")
|
||||
|
||||
_sql_constraints = [
|
||||
('channel_member_unique', 'UNIQUE(channel_member_id)',
|
||||
'There can only be one rtc session per channel member')
|
||||
]
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
rtc_sessions = super().create(vals_list)
|
||||
self.env['bus.bus']._sendmany([(channel, 'mail.channel/rtc_sessions_update', {
|
||||
'id': channel.id,
|
||||
'rtcSessions': [('insert', sessions_data)],
|
||||
}) for channel, sessions_data in rtc_sessions._mail_rtc_session_format_by_channel().items()])
|
||||
return rtc_sessions
|
||||
|
||||
def unlink(self):
|
||||
channels = self.channel_id
|
||||
for channel in channels:
|
||||
if channel.rtc_session_ids and len(channel.rtc_session_ids - self) == 0:
|
||||
# If there is no member left in the RTC call, all invitations are cancelled.
|
||||
# Note: invitation depends on field `rtc_inviting_session_id` so the cancel must be
|
||||
# done before the delete to be able to know who was invited.
|
||||
channel._rtc_cancel_invitations()
|
||||
notifications = [(channel, 'mail.channel/rtc_sessions_update', {
|
||||
'id': channel.id,
|
||||
'rtcSessions': [('insert-and-unlink', [{'id': session_data['id']} for session_data in sessions_data])],
|
||||
}) for channel, sessions_data in self._mail_rtc_session_format_by_channel().items()]
|
||||
for rtc_session in self:
|
||||
target = rtc_session.guest_id or rtc_session.partner_id
|
||||
notifications.append((target, 'mail.channel.rtc.session/ended', {'sessionId': rtc_session.id}))
|
||||
self.env['bus.bus']._sendmany(notifications)
|
||||
return super().unlink()
|
||||
|
||||
def _update_and_broadcast(self, values):
|
||||
""" Updates the session and notifies all members of the channel
|
||||
of the change.
|
||||
"""
|
||||
valid_values = {'is_screen_sharing_on', 'is_camera_on', 'is_muted', 'is_deaf'}
|
||||
self.write({key: values[key] for key in valid_values if key in valid_values})
|
||||
session_data = self._mail_rtc_session_format()
|
||||
self.env['bus.bus']._sendone(self.channel_id, 'mail.channel.rtc.session/insert', session_data)
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_inactive_sessions(self):
|
||||
""" Garbage collect sessions that aren't active anymore,
|
||||
this can happen when the server or the user's browser crash
|
||||
or when the user's odoo session ends.
|
||||
"""
|
||||
self.search(self._inactive_rtc_session_domain()).unlink()
|
||||
|
||||
def action_disconnect(self):
|
||||
self.unlink()
|
||||
|
||||
def _delete_inactive_rtc_sessions(self):
|
||||
"""Deletes the inactive sessions from self."""
|
||||
self.filtered_domain(self._inactive_rtc_session_domain()).unlink()
|
||||
|
||||
def _notify_peers(self, notifications):
|
||||
""" Used for peer-to-peer communication,
|
||||
guarantees that the sender is the current guest or partner.
|
||||
|
||||
:param notifications: list of tuple with the following elements:
|
||||
- target_session_ids: a list of mail.channel.rtc.session ids
|
||||
- content: a string with the content to be sent to the targets
|
||||
"""
|
||||
self.ensure_one()
|
||||
payload_by_target = defaultdict(lambda: {'sender': self.id, 'notifications': []})
|
||||
for target_session_ids, content in notifications:
|
||||
for target_session in self.env['mail.channel.rtc.session'].browse(target_session_ids).exists():
|
||||
target = target_session.guest_id or target_session.partner_id
|
||||
payload_by_target[target]['notifications'].append(content)
|
||||
return self.env['bus.bus']._sendmany([(target, 'mail.channel.rtc.session/peer_notification', payload) for target, payload in payload_by_target.items()])
|
||||
|
||||
def _mail_rtc_session_format(self, fields=None):
|
||||
self.ensure_one()
|
||||
if not fields:
|
||||
fields = {'id': True, 'channelMember': {'id': True, 'channel': {}, 'persona': {'partner': {'id', 'name', 'im_status'}, 'guest': {'id', 'name', 'im_status'}}}, 'isCameraOn': True, 'isDeaf': True, 'isSelfMuted': True, 'isScreenSharingOn': True}
|
||||
vals = {}
|
||||
if 'id' in fields:
|
||||
vals['id'] = self.id
|
||||
if 'channelMember' in fields:
|
||||
vals['channelMember'] = self.channel_member_id._mail_channel_member_format(fields=fields.get('channelMember')).get(self.channel_member_id)
|
||||
if 'isCameraOn' in fields:
|
||||
vals['isCameraOn'] = self.is_camera_on
|
||||
if 'isDeaf' in fields:
|
||||
vals['isDeaf'] = self.is_deaf
|
||||
if 'isSelfMuted' in fields:
|
||||
vals['isSelfMuted'] = self.is_muted
|
||||
if 'isScreenSharingOn' in fields:
|
||||
vals['isScreenSharingOn'] = self.is_screen_sharing_on
|
||||
return vals
|
||||
|
||||
def _mail_rtc_session_format_by_channel(self):
|
||||
data = {}
|
||||
for rtc_session in self:
|
||||
data.setdefault(rtc_session.channel_id, []).append(rtc_session._mail_rtc_session_format())
|
||||
return data
|
||||
|
||||
@api.model
|
||||
def _inactive_rtc_session_domain(self):
|
||||
return [('write_date', '<', fields.Datetime.now() - relativedelta(minutes=1))]
|
||||
|
|
@ -17,33 +17,84 @@ class MailComposerMixin(models.AbstractModel):
|
|||
grain control of rendering access.
|
||||
"""
|
||||
_name = 'mail.composer.mixin'
|
||||
_inherit = 'mail.render.mixin'
|
||||
_inherit = ['mail.render.mixin']
|
||||
_description = 'Mail Composer Mixin'
|
||||
|
||||
# Content
|
||||
subject = fields.Char('Subject', compute='_compute_subject', readonly=False, store=True, compute_sudo=False)
|
||||
body = fields.Html('Contents', compute='_compute_body', render_engine='qweb', store=True, readonly=False, sanitize=False, compute_sudo=False)
|
||||
body = fields.Html(
|
||||
'Contents', compute='_compute_body', readonly=False, store=True, compute_sudo=False,
|
||||
render_engine='qweb', render_options={'post_process': True}, sanitize='email_outgoing')
|
||||
body_has_template_value = fields.Boolean(
|
||||
'Body content is the same as the template',
|
||||
compute='_compute_body_has_template_value',
|
||||
)
|
||||
template_id = fields.Many2one('mail.template', 'Mail Template', domain="[('model', '=', render_model)]")
|
||||
# Language: override mail.render.mixin field, copy template value
|
||||
lang = fields.Char(compute='_compute_lang', precompute=True, readonly=False, store=True, compute_sudo=False)
|
||||
# Access
|
||||
is_mail_template_editor = fields.Boolean('Is Editor', compute='_compute_is_mail_template_editor')
|
||||
can_edit_body = fields.Boolean('Can Edit Body', compute='_compute_can_edit_body')
|
||||
|
||||
@api.depends('template_id')
|
||||
def _compute_subject(self):
|
||||
""" Computation is coming either from template, either reset. When
|
||||
having a template with a value set, copy it. When removing the
|
||||
template, reset it. """
|
||||
for composer_mixin in self:
|
||||
if composer_mixin.template_id:
|
||||
if composer_mixin.template_id.subject:
|
||||
composer_mixin.subject = composer_mixin.template_id.subject
|
||||
elif not composer_mixin.subject:
|
||||
elif not composer_mixin.template_id:
|
||||
composer_mixin.subject = False
|
||||
|
||||
@api.depends('template_id')
|
||||
def _compute_body(self):
|
||||
""" Computation is coming either from template, either reset. When
|
||||
having a template with a value set, copy it. When removing the
|
||||
template, reset it. """
|
||||
for composer_mixin in self:
|
||||
if composer_mixin.template_id:
|
||||
if not tools.is_html_empty(composer_mixin.template_id.body_html):
|
||||
composer_mixin.body = composer_mixin.template_id.body_html
|
||||
elif not composer_mixin.body:
|
||||
elif not composer_mixin.template_id:
|
||||
composer_mixin.body = False
|
||||
|
||||
@api.depends('body', 'template_id')
|
||||
def _compute_body_has_template_value(self):
|
||||
""" Computes if the current body is the same as the one from template.
|
||||
Both real and sanitized values are considered, to avoid editor issues
|
||||
as much as possible. """
|
||||
for composer_mixin in self:
|
||||
if not tools.is_html_empty(composer_mixin.body) and composer_mixin.template_id:
|
||||
template_value = composer_mixin.template_id.body_html
|
||||
# matching email_outgoing sanitize level
|
||||
sanitize_vals = {
|
||||
'output_method': 'xml',
|
||||
'sanitize_attributes': False,
|
||||
'sanitize_conditional_comments': False,
|
||||
'sanitize_form': True,
|
||||
'sanitize_style': True,
|
||||
'sanitize_tags': False,
|
||||
'silent': True,
|
||||
'strip_classes': False,
|
||||
'strip_style': False,
|
||||
}
|
||||
sanitized_template_value = tools.html_sanitize(template_value, **sanitize_vals)
|
||||
composer_mixin.body_has_template_value = composer_mixin.body in (template_value,
|
||||
sanitized_template_value)
|
||||
else:
|
||||
composer_mixin.body_has_template_value = False
|
||||
|
||||
@api.depends('template_id')
|
||||
def _compute_lang(self):
|
||||
""" Computation is coming either from template, either reset. When
|
||||
having a template with a value set, copy it. When removing the
|
||||
template, reset it. """
|
||||
for composer_mixin in self:
|
||||
if composer_mixin.template_id.lang:
|
||||
composer_mixin.lang = composer_mixin.template_id.lang
|
||||
elif not composer_mixin.template_id:
|
||||
composer_mixin.lang = False
|
||||
|
||||
@api.depends_context('uid')
|
||||
def _compute_is_mail_template_editor(self):
|
||||
is_mail_template_editor = self.env.is_admin() or self.env.user.has_group('mail.group_mail_template_editor')
|
||||
|
|
@ -58,36 +109,97 @@ class MailComposerMixin(models.AbstractModel):
|
|||
or not record.template_id
|
||||
)
|
||||
|
||||
def _render_field(self, field, *args, **kwargs):
|
||||
"""Render the given field on the given records.
|
||||
This method bypass the rights when needed to
|
||||
be able to render the template values in mass mode.
|
||||
def _render_lang(self, res_ids, engine='inline_template'):
|
||||
""" Given some record ids, return the lang for each record based on
|
||||
lang field of template or through specific context-based key.
|
||||
This method enters sudo mode to allow qweb rendering (which
|
||||
is otherwise reserved for the 'mail template editor' group')
|
||||
if we consider it safe. Safe means content comes from the template
|
||||
which is a validated master data. As a summary the heuristic is :
|
||||
|
||||
* if no template, do not bypass the check;
|
||||
* if record lang and template lang are the same, bypass the check;
|
||||
"""
|
||||
if field not in self._fields:
|
||||
raise ValueError(_("The field %s does not exist on the model %s", field, self._name))
|
||||
|
||||
if not self.template_id:
|
||||
# Do not need to bypass the verification
|
||||
return super()._render_lang(res_ids, engine=engine)
|
||||
|
||||
composer_value = self.lang
|
||||
template_value = self.template_id.lang
|
||||
|
||||
call_sudo = False
|
||||
equality = composer_value == template_value or (not composer_value and not template_value)
|
||||
if not self.is_mail_template_editor and equality:
|
||||
call_sudo = True
|
||||
|
||||
record = self.sudo() if call_sudo else self
|
||||
return super(MailComposerMixin, record)._render_lang(res_ids, engine=engine)
|
||||
|
||||
def _render_field(self, field, res_ids, *args, **kwargs):
|
||||
""" Render the given field on the given records. This method enters
|
||||
sudo mode to allow qweb rendering (which is otherwise reserved for
|
||||
the 'mail template editor' group') if we consider it safe. Safe
|
||||
means content comes from the template which is a validated master
|
||||
data. As a summary the heuristic is :
|
||||
|
||||
* if no template, do not bypass the check;
|
||||
* if current user is a template editor, do not bypass the check;
|
||||
* if record value and template value are the same (or equals the
|
||||
sanitized value in case of an HTML field), bypass the check;
|
||||
* for body: if current user cannot edit it, force template value back
|
||||
then bypass the check;
|
||||
|
||||
Also provide support to fetch translations on the remote template.
|
||||
Indeed translations are often done on the master template, not on the
|
||||
specific composer itself. In that case we need to work on template
|
||||
value when it has not been modified in the composer. """
|
||||
if field not in self:
|
||||
raise ValueError(
|
||||
_('Rendering of %(field_name)s is not possible as not defined on template.',
|
||||
field_name=field
|
||||
)
|
||||
)
|
||||
|
||||
if not self.template_id:
|
||||
# Do not need to bypass the verification
|
||||
return super()._render_field(field, res_ids, *args, **kwargs)
|
||||
|
||||
# template-based access check + translation check
|
||||
template_field = {
|
||||
'body': 'body_html',
|
||||
}.get(field, field)
|
||||
if template_field not in self.template_id:
|
||||
raise ValueError(
|
||||
_('Rendering of %(field_name)s is not possible as no counterpart on template.',
|
||||
field_name=field
|
||||
)
|
||||
)
|
||||
|
||||
composer_value = self[field]
|
||||
|
||||
if (
|
||||
not self.template_id
|
||||
or self.is_mail_template_editor
|
||||
):
|
||||
# Do not need to bypass the verification
|
||||
return super(MailComposerMixin, self)._render_field(field, *args, **kwargs)
|
||||
|
||||
template_field = 'body_html' if field == 'body' else field
|
||||
assert template_field in self.template_id._fields
|
||||
template_value = self.template_id[template_field]
|
||||
translation_asked = kwargs.get('compute_lang') or kwargs.get('set_lang')
|
||||
equality = self.body_has_template_value if field == 'body' else composer_value == template_value
|
||||
|
||||
if field == 'body':
|
||||
sanitized_template_value = tools.html_sanitize(template_value)
|
||||
if not self.can_edit_body or composer_value in (sanitized_template_value, template_value):
|
||||
# Take the previous body which we can trust without HTML editor reformatting
|
||||
self.body = self.template_id.body_html
|
||||
return super(MailComposerMixin, self.sudo())._render_field(field, *args, **kwargs)
|
||||
call_sudo = False
|
||||
if (not self.is_mail_template_editor and field == 'body' and
|
||||
(not self.can_edit_body or self.body_has_template_value)):
|
||||
call_sudo = True
|
||||
# take the previous body which we can trust without HTML editor reformatting
|
||||
self.body = self.template_id.body_html
|
||||
if (not self.is_mail_template_editor and field != 'body' and
|
||||
composer_value == template_value):
|
||||
call_sudo = True
|
||||
|
||||
elif composer_value == template_value:
|
||||
# The value is the same as the mail template so we trust it
|
||||
return super(MailComposerMixin, self.sudo())._render_field(field, *args, **kwargs)
|
||||
if translation_asked and equality:
|
||||
# use possibly custom lang template changed on composer instead of
|
||||
# original template one
|
||||
if not kwargs.get('res_ids_lang'):
|
||||
kwargs['res_ids_lang'] = self._render_lang(res_ids)
|
||||
template = self.template_id.sudo() if call_sudo else self.template_id
|
||||
return template._render_field(
|
||||
template_field, res_ids, *args, **kwargs,
|
||||
)
|
||||
|
||||
return super(MailComposerMixin, self)._render_field(field, *args, **kwargs)
|
||||
record = self.sudo() if call_sudo else self
|
||||
return super(MailComposerMixin, record)._render_field(field, res_ids, *args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ from collections import defaultdict
|
|||
import itertools
|
||||
|
||||
from odoo import api, fields, models, Command
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class Followers(models.Model):
|
||||
class MailFollowers(models.Model):
|
||||
""" mail_followers holds the data related to the follow mechanism inside
|
||||
Odoo. Partners can choose to follow documents (records) of any kind
|
||||
that inherits from mail.thread. Following documents allow to receive
|
||||
|
|
@ -17,7 +18,6 @@ class Followers(models.Model):
|
|||
:param: res_id: ID of resource (may be 0 for every objects)
|
||||
"""
|
||||
_name = 'mail.followers'
|
||||
_rec_name = 'partner_id'
|
||||
_log_access = False
|
||||
_description = 'Document Followers'
|
||||
|
||||
|
|
@ -29,7 +29,7 @@ class Followers(models.Model):
|
|||
res_id = fields.Many2oneReference(
|
||||
'Related Document ID', index=True, help='Id of the followed resource', model_field='res_model')
|
||||
partner_id = fields.Many2one(
|
||||
'res.partner', string='Related Partner', index=True, ondelete='cascade', required=True, domain=[('type', '!=', 'private')])
|
||||
'res.partner', string='Related Partner', index=True, ondelete='cascade', required=True)
|
||||
subtype_ids = fields.Many2many(
|
||||
'mail.message.subtype', string='Subtype',
|
||||
help="Message subtypes followed, meaning subtypes that will be pushed onto the user's Wall.")
|
||||
|
|
@ -50,69 +50,113 @@ class Followers(models.Model):
|
|||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(Followers, self).create(vals_list)
|
||||
res = super().create(vals_list)
|
||||
res._invalidate_documents(vals_list)
|
||||
return res
|
||||
|
||||
def write(self, vals):
|
||||
if 'res_model' in vals or 'res_id' in vals:
|
||||
self._invalidate_documents()
|
||||
res = super(Followers, self).write(vals)
|
||||
res = super().write(vals)
|
||||
if any(x in vals for x in ['res_model', 'res_id', 'partner_id']):
|
||||
self._invalidate_documents()
|
||||
return res
|
||||
|
||||
def unlink(self):
|
||||
self._invalidate_documents()
|
||||
return super(Followers, self).unlink()
|
||||
return super().unlink()
|
||||
|
||||
_sql_constraints = [
|
||||
('mail_followers_res_partner_res_model_id_uniq', 'unique(res_model,res_id,partner_id)', 'Error, a partner cannot follow twice the same object.'),
|
||||
]
|
||||
_mail_followers_res_partner_res_model_id_uniq = models.Constraint(
|
||||
'unique(res_model,res_id,partner_id)',
|
||||
'Error, a partner cannot follow twice the same object.',
|
||||
)
|
||||
|
||||
@api.depends("partner_id")
|
||||
def _compute_display_name(self):
|
||||
for follower in self:
|
||||
# sudo: res.partner - can read partners of accessible followers, in particular allows
|
||||
# by-passing multi-company ACL for portal partners
|
||||
follower.display_name = follower.partner_id.sudo().display_name
|
||||
|
||||
# --------------------------------------------------
|
||||
# Private tools methods to fetch followers data
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _get_mail_doc_to_followers(self, mail_ids):
|
||||
""" Get partner mail recipients that follows the related record of the mails.
|
||||
|
||||
:param list mail_ids: mail_mail ids
|
||||
|
||||
:return: for each (model, document_id): list of partner ids that are followers
|
||||
:rtype: dict
|
||||
"""
|
||||
if not mail_ids:
|
||||
return {}
|
||||
self.env['mail.mail'].flush_model(['mail_message_id', 'recipient_ids'])
|
||||
self.env['mail.followers'].flush_model(['partner_id', 'res_model', 'res_id'])
|
||||
self.env['mail.message'].flush_model(['model', 'res_id'])
|
||||
# mail_mail_res_partner_rel is the join table for the m2m recipient_ids field
|
||||
self.env.cr.execute("""
|
||||
SELECT message.model, message.res_id, mail_partner.res_partner_id
|
||||
FROM mail_mail mail
|
||||
JOIN mail_mail_res_partner_rel mail_partner ON mail_partner.mail_mail_id = mail.id
|
||||
JOIN mail_message message ON mail.mail_message_id = message.id
|
||||
JOIN mail_followers follower ON message.model = follower.res_model
|
||||
AND message.res_id = follower.res_id
|
||||
AND mail_partner.res_partner_id = follower.partner_id
|
||||
WHERE mail.id IN %(mail_ids)s
|
||||
""", {'mail_ids': tuple(mail_ids)})
|
||||
res = defaultdict(list)
|
||||
for model, doc_id, partner_id in self.env.cr.fetchall():
|
||||
res[(model, doc_id)].append(partner_id)
|
||||
return res
|
||||
|
||||
def _get_recipient_data(self, records, message_type, subtype_id, pids=None):
|
||||
""" Private method allowing to fetch recipients data based on a subtype.
|
||||
Purpose of this method is to fetch all data necessary to notify recipients
|
||||
in a single query. It fetches data from
|
||||
|
||||
* followers (partners and channels) of records that follow the given
|
||||
subtype if records and subtype are set;
|
||||
* followers of records that follow the given subtype if records and
|
||||
subtype are set;
|
||||
* partners if pids is given;
|
||||
|
||||
:param records: fetch data from followers of ``records`` that follow
|
||||
``subtype_id``;
|
||||
:param message_type: mail.message.message_type in order to allow custom
|
||||
:param str message_type: mail.message.message_type in order to allow custom
|
||||
behavior depending on it (SMS for example);
|
||||
:param subtype_id: mail.message.subtype to check against followers;
|
||||
:param int subtype_id: mail.message.subtype to check against followers;
|
||||
:param pids: additional set of partner IDs from which to fetch recipient
|
||||
data independently from following status;
|
||||
|
||||
:return dict: recipients data based on record.ids if given, else a generic
|
||||
:returns: recipients data based on record.ids if given, else a generic
|
||||
'0' key to keep a dict-like return format. Each item is a dict based on
|
||||
recipients partner ids formatted like
|
||||
{'active': whether partner is active;
|
||||
'id': res.partner ID;
|
||||
'is_follower': True if linked to a record and if partner is a follower;
|
||||
'lang': lang of the partner;
|
||||
'groups': groups of the partner's user. If several users exist preference
|
||||
is given to internal user, then share users. In case of multiples
|
||||
users of same kind groups are unioned;
|
||||
recipients partner ids formatted like {
|
||||
'active': partner.active;
|
||||
'email_normalized': partner.email_normalized;
|
||||
'id': res.partner ID;
|
||||
'is_follower': True if linked to a record and if partner is a follower;
|
||||
'lang': partner.lang;
|
||||
'name': partner.name;
|
||||
'groups': groups of the partner's user (see 'uid'). If several users
|
||||
of the same kind (e.g. several internal users) exist groups are
|
||||
concatenated;
|
||||
'notif': notification type ('inbox' or 'email'). Overrides may change
|
||||
this value (e.g. 'sms' in sms module);
|
||||
'share': if partner is a customer (no user or share user);
|
||||
'ushare': if partner has users, whether all are shared (public or portal);
|
||||
'type': summary of partner 'usage' (portal, customer, internal user);
|
||||
'type': summary of partner 'usage' (a string among 'portal', 'customer',
|
||||
'internal user');
|
||||
'uid': linked 'res.users' ID. If several users exist preference is
|
||||
given to internal user, then share users;
|
||||
}
|
||||
:rtype: dict
|
||||
"""
|
||||
self.env['mail.followers'].flush_model(['partner_id', 'subtype_ids'])
|
||||
self.env['mail.message.subtype'].flush_model(['internal'])
|
||||
self.env['res.users'].flush_model(['notification_type', 'active', 'partner_id', 'groups_id'])
|
||||
self.env['res.partner'].flush_model(['active', 'partner_share'])
|
||||
self.env['res.groups'].flush_model(['users'])
|
||||
self.env['res.users'].flush_model(['notification_type', 'active', 'partner_id', 'group_ids'])
|
||||
self.env['res.partner'].flush_model(['active', 'email_normalized', 'name', 'partner_share'])
|
||||
self.env['res.groups'].flush_model(['user_ids'])
|
||||
# if we have records and a subtype: we have to fetch followers, unless being
|
||||
# in user notification mode (contact only pids)
|
||||
if message_type != 'user_notification' and records and subtype_id:
|
||||
|
|
@ -148,7 +192,9 @@ class Followers(models.Model):
|
|||
)
|
||||
SELECT partner.id as pid,
|
||||
partner.active as active,
|
||||
partner.email_normalized AS email_normalized,
|
||||
partner.lang as lang,
|
||||
partner.name as name,
|
||||
partner.partner_share as pshare,
|
||||
sub_user.uid as uid,
|
||||
COALESCE(sub_user.share, FALSE) as ushare,
|
||||
|
|
@ -185,7 +231,9 @@ class Followers(models.Model):
|
|||
query = """
|
||||
SELECT partner.id as pid,
|
||||
partner.active as active,
|
||||
partner.email_normalized AS email_normalized,
|
||||
partner.lang as lang,
|
||||
partner.name as name,
|
||||
partner.partner_share as pshare,
|
||||
sub_user.uid as uid,
|
||||
COALESCE(sub_user.share, FALSE) as ushare,
|
||||
|
|
@ -237,7 +285,9 @@ class Followers(models.Model):
|
|||
query = """
|
||||
SELECT partner.id as pid,
|
||||
partner.active as active,
|
||||
partner.email_normalized AS email_normalized,
|
||||
partner.lang as lang,
|
||||
partner.name as name,
|
||||
partner.partner_share as pshare,
|
||||
sub_user.uid as uid,
|
||||
COALESCE(sub_user.share, FALSE) as ushare,
|
||||
|
|
@ -276,17 +326,26 @@ class Followers(models.Model):
|
|||
|
||||
res_ids = records.ids if records else [0]
|
||||
doc_infos = dict((res_id, {}) for res_id in res_ids)
|
||||
for (partner_id, is_active, lang, pshare, uid, ushare, notif, groups, res_id, is_follower) in res:
|
||||
for (
|
||||
partner_id, is_active, email_normalized, lang, name,
|
||||
pshare, uid, ushare, notif, groups, res_id, is_follower
|
||||
) in res:
|
||||
to_update = [res_id] if res_id else res_ids
|
||||
# add transitive closure of implied groups; note that the field
|
||||
# all_implied_ids relies on ormcache'd data, which shouldn't add
|
||||
# more queries
|
||||
groups = self.env['res.groups'].browse(set(groups or [])).all_implied_ids.ids
|
||||
for res_id_to_update in to_update:
|
||||
# avoid updating already existing information, unnecessary dict update
|
||||
if not res_id and partner_id in doc_infos[res_id_to_update]:
|
||||
continue
|
||||
follower_data = {
|
||||
'active': is_active,
|
||||
'email_normalized': email_normalized,
|
||||
'id': partner_id,
|
||||
'is_follower': is_follower,
|
||||
'lang': lang,
|
||||
'name': name,
|
||||
'groups': set(groups or []),
|
||||
'notif': notif,
|
||||
'share': pshare,
|
||||
|
|
@ -306,7 +365,7 @@ class Followers(models.Model):
|
|||
|
||||
def _get_subscription_data(self, doc_data, pids, include_pshare=False, include_active=False):
|
||||
""" Private method allowing to fetch follower data from several documents of a given model.
|
||||
Followers can be filtered given partner IDs and channel IDs.
|
||||
MailFollowers can be filtered given partner IDs and channel IDs.
|
||||
|
||||
:param doc_data: list of pair (res_model, res_ids) that are the documents from which we
|
||||
want to have subscription data;
|
||||
|
|
@ -322,8 +381,8 @@ class Followers(models.Model):
|
|||
share status of partner (returned only if include_pshare is True)
|
||||
active flag status of partner (returned only if include_active is True)
|
||||
"""
|
||||
self.env['mail.followers'].flush_model()
|
||||
self.env['res.partner'].flush_model()
|
||||
self.env['mail.followers'].flush_model(['partner_id', 'res_id', 'res_model', 'subtype_ids'])
|
||||
self.env['res.partner'].flush_model(['active', 'partner_share'])
|
||||
# base query: fetch followers of given documents
|
||||
where_clause = ' OR '.join(['fol.res_model = %s AND fol.res_id IN %s'] * len(doc_data))
|
||||
where_params = list(itertools.chain.from_iterable((rm, tuple(rids)) for rm, rids in doc_data))
|
||||
|
|
@ -433,14 +492,11 @@ GROUP BY fol.id%s%s""" % (
|
|||
:param subtypes: optional subtypes for new partner followers. This
|
||||
is a dict whose keys are partner IDs and value subtype IDs for that
|
||||
partner.
|
||||
:param channel_subtypes: optional subtypes for new channel followers. This
|
||||
is a dict whose keys are channel IDs and value subtype IDs for that
|
||||
channel.
|
||||
:param check_existing: if True, check for existing followers for given
|
||||
documents and handle them according to existing_policy parameter.
|
||||
Setting to False allows to save some computation if caller is sure
|
||||
there are no conflict for followers;
|
||||
:param existing policy: if check_existing, tells what to do with already
|
||||
:param existing_policy: if check_existing, tells what to do with already
|
||||
existing followers:
|
||||
|
||||
* skip: simply skip existing followers, do not touch them;
|
||||
|
|
@ -483,3 +539,19 @@ GROUP BY fol.id%s%s""" % (
|
|||
update[fol_id] = {'subtype_ids': update_cmd}
|
||||
|
||||
return new, update
|
||||
|
||||
# --------------------------------------------------
|
||||
# Misc discuss
|
||||
# --------------------------------------------------
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
"display_name",
|
||||
"email",
|
||||
"is_active",
|
||||
"name",
|
||||
# sudo: res.partner - can read partners of found followers, in particular allows
|
||||
# by-passing multi-company ACL for portal partners
|
||||
Store.One("partner_id", sudo=True),
|
||||
Store.One("thread", [], as_thread=True),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from markupsafe import Markup
|
||||
from odoo import _, api, fields, models, tools
|
||||
|
||||
|
||||
|
|
@ -16,11 +17,10 @@ class MailGatewayAllowed(models.Model):
|
|||
from an automated-source. This model stores those trusted source and this restriction
|
||||
won't apply to them.
|
||||
"""
|
||||
|
||||
_description = 'Mail Gateway Allowed'
|
||||
_name = 'mail.gateway.allowed'
|
||||
_description = 'Mail Gateway Allowed'
|
||||
|
||||
email = fields.Char('Email Address')
|
||||
email = fields.Char('Email Address', required=True)
|
||||
email_normalized = fields.Char(
|
||||
string='Normalized Email', compute='_compute_email_normalized', store=True, index=True)
|
||||
|
||||
|
|
@ -35,13 +35,15 @@ class MailGatewayAllowed(models.Model):
|
|||
LOOP_MINUTES = int(get_param('mail.gateway.loop.minutes', 120))
|
||||
LOOP_THRESHOLD = int(get_param('mail.gateway.loop.threshold', 20))
|
||||
|
||||
return _('''
|
||||
return Markup(_('''
|
||||
<p class="o_view_nocontent_smiling_face">
|
||||
Add addresses to the Allowed List
|
||||
</p><p>
|
||||
To protect you from spam and reply loops, Odoo automatically blocks emails
|
||||
coming to your gateway past a threshold of <b>%i</b> emails every <b>%i</b>
|
||||
coming to your gateway past a threshold of <b>%(threshold)i</b> emails every <b>%(minutes)i</b>
|
||||
minutes. If there are some addresses from which you need to receive very frequent
|
||||
updates, you can however add them below and Odoo will let them go through.
|
||||
</p>
|
||||
''', LOOP_THRESHOLD, LOOP_MINUTES)
|
||||
</p>''')) % {
|
||||
'threshold': LOOP_THRESHOLD,
|
||||
'minutes': LOOP_MINUTES,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,140 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import pytz
|
||||
import uuid
|
||||
|
||||
from odoo.tools import consteq
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.addons.base.models.res_partner import _tz_get
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.addons.bus.models.bus_presence import AWAY_TIMER, DISCONNECTION_TIMER
|
||||
|
||||
|
||||
class MailGuest(models.Model):
|
||||
_name = 'mail.guest'
|
||||
_description = "Guest"
|
||||
_inherit = ['avatar.mixin']
|
||||
_avatar_name_field = "name"
|
||||
_cookie_name = 'dgid'
|
||||
_cookie_separator = '|'
|
||||
|
||||
@api.model
|
||||
def _lang_get(self):
|
||||
return self.env['res.lang'].get_installed()
|
||||
|
||||
name = fields.Char(string="Name", required=True)
|
||||
access_token = fields.Char(string="Access Token", default=lambda self: str(uuid.uuid4()), groups='base.group_system', required=True, readonly=True, copy=False)
|
||||
country_id = fields.Many2one(string="Country", comodel_name='res.country')
|
||||
lang = fields.Selection(string="Language", selection=_lang_get)
|
||||
timezone = fields.Selection(string="Timezone", selection=_tz_get)
|
||||
channel_ids = fields.Many2many(string="Channels", comodel_name='mail.channel', relation='mail_channel_member', column1='guest_id', column2='channel_id', copy=False)
|
||||
im_status = fields.Char('IM Status', compute='_compute_im_status')
|
||||
|
||||
def _compute_im_status(self):
|
||||
self.env.cr.execute("""
|
||||
SELECT
|
||||
guest_id as id,
|
||||
CASE WHEN age(now() AT TIME ZONE 'UTC', last_poll) > interval %s THEN 'offline'
|
||||
WHEN age(now() AT TIME ZONE 'UTC', last_presence) > interval %s THEN 'away'
|
||||
ELSE 'online'
|
||||
END as status
|
||||
FROM bus_presence
|
||||
WHERE guest_id IN %s
|
||||
""", ("%s seconds" % DISCONNECTION_TIMER, "%s seconds" % AWAY_TIMER, tuple(self.ids)))
|
||||
res = dict(((status['id'], status['status']) for status in self.env.cr.dictfetchall()))
|
||||
for guest in self:
|
||||
guest.im_status = res.get(guest.id, 'offline')
|
||||
|
||||
def _get_guest_from_context(self):
|
||||
"""Returns the current guest record from the context, if applicable."""
|
||||
guest = self.env.context.get('guest')
|
||||
if isinstance(guest, self.pool['mail.guest']):
|
||||
return guest
|
||||
return self.env['mail.guest']
|
||||
|
||||
def _get_guest_from_request(self, request):
|
||||
parts = request.httprequest.cookies.get(self._cookie_name, '').split(self._cookie_separator)
|
||||
if len(parts) != 2:
|
||||
return self.env['mail.guest']
|
||||
guest_id, guest_access_token = parts
|
||||
if not guest_id or not guest_access_token:
|
||||
return self.env['mail.guest']
|
||||
guest = self.env['mail.guest'].browse(int(guest_id)).sudo().exists()
|
||||
if not guest or not guest.access_token or not consteq(guest.access_token, guest_access_token):
|
||||
return self.env['mail.guest']
|
||||
if not guest.timezone:
|
||||
timezone = self._get_timezone_from_request(request)
|
||||
if timezone:
|
||||
guest._update_timezone(timezone)
|
||||
return guest.sudo(False).with_context(guest=guest)
|
||||
|
||||
def _get_timezone_from_request(self, request):
|
||||
timezone = request.httprequest.cookies.get('tz')
|
||||
return timezone if timezone in pytz.all_timezones else False
|
||||
|
||||
def _update_name(self, name):
|
||||
self.ensure_one()
|
||||
name = name.strip()
|
||||
if len(name) < 1:
|
||||
raise UserError(_("Guest's name cannot be empty."))
|
||||
if len(name) > 512:
|
||||
raise UserError(_("Guest's name is too long."))
|
||||
self.name = name
|
||||
guest_data = {
|
||||
'id': self.id,
|
||||
'name': self.name
|
||||
}
|
||||
bus_notifs = [(channel, 'mail.guest/insert', guest_data) for channel in self.channel_ids]
|
||||
bus_notifs.append((self, 'mail.guest/insert', guest_data))
|
||||
self.env['bus.bus']._sendmany(bus_notifs)
|
||||
|
||||
def _update_timezone(self, timezone):
|
||||
query = """
|
||||
UPDATE mail_guest
|
||||
SET timezone = %s
|
||||
WHERE id IN (
|
||||
SELECT id FROM mail_guest WHERE id = %s
|
||||
FOR NO KEY UPDATE SKIP LOCKED
|
||||
)
|
||||
"""
|
||||
self.env.cr.execute(query, (timezone, self.id))
|
||||
|
||||
def _init_messaging(self):
|
||||
self.ensure_one()
|
||||
partner_root = self.env.ref('base.partner_root')
|
||||
return {
|
||||
'channels': self.channel_ids.channel_info(),
|
||||
'companyName': self.env.company.name,
|
||||
'currentGuest': {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
},
|
||||
'current_partner': False,
|
||||
'current_user_id': False,
|
||||
'current_user_settings': False,
|
||||
'hasLinkPreviewFeature': self.env['mail.link.preview']._is_link_preview_enabled(),
|
||||
'menu_id': False,
|
||||
'needaction_inbox_counter': False,
|
||||
'partner_root': {
|
||||
'id': partner_root.id,
|
||||
'name': partner_root.name,
|
||||
},
|
||||
'shortcodes': [],
|
||||
'starred_counter': False,
|
||||
}
|
||||
|
||||
def _guest_format(self, fields=None):
|
||||
if not fields:
|
||||
fields = {'id': True, 'name': True, 'im_status': True}
|
||||
guests_formatted_data = {}
|
||||
for guest in self:
|
||||
data = {}
|
||||
if 'id' in fields:
|
||||
data['id'] = guest.id
|
||||
if 'name' in fields:
|
||||
data['name'] = guest.name
|
||||
if 'im_status' in fields:
|
||||
data['im_status'] = guest.im_status
|
||||
guests_formatted_data[guest] = data
|
||||
return guests_formatted_data
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.addons.mail.tools.credentials import get_twilio_credentials
|
||||
from odoo.addons.mail.tools.discuss import get_twilio_credentials
|
||||
import logging
|
||||
import requests
|
||||
|
||||
|
|
@ -10,7 +10,8 @@ _logger = logging.getLogger(__name__)
|
|||
|
||||
class MailIceServer(models.Model):
|
||||
_name = 'mail.ice.server'
|
||||
_description = 'ICE server'
|
||||
_description = 'ICE Server'
|
||||
_rec_name = "uri"
|
||||
|
||||
server_type = fields.Selection([('stun', 'stun:'), ('turn', 'turn:')], string='Type', required=True, default='stun')
|
||||
uri = fields.Char('URI', required=True)
|
||||
|
|
@ -40,15 +41,14 @@ class MailIceServer(models.Model):
|
|||
:return: List of dict, each of which representing a stun or turn server,
|
||||
formatted as expected by the specifications of RTCConfiguration.iceServers
|
||||
"""
|
||||
if self.env['ir.config_parameter'].sudo().get_param('mail.use_twilio_rtc_servers'):
|
||||
(account_sid, auth_token) = get_twilio_credentials(self.env)
|
||||
if account_sid and auth_token:
|
||||
url = f'https://api.twilio.com/2010-04-01/Accounts/{account_sid}/Tokens.json'
|
||||
response = requests.post(url, auth=(account_sid, auth_token), timeout=60)
|
||||
if response.ok:
|
||||
response_content = response.json()
|
||||
if response_content:
|
||||
return response_content['ice_servers']
|
||||
else:
|
||||
_logger.warning(f"Failed to obtain TURN servers, status code: {response.status_code}, content: {response.content}.")
|
||||
(account_sid, auth_token) = get_twilio_credentials(self.env)
|
||||
if account_sid and auth_token:
|
||||
url = f'https://api.twilio.com/2010-04-01/Accounts/{account_sid}/Tokens.json'
|
||||
response = requests.post(url, auth=(account_sid, auth_token), timeout=60)
|
||||
if response.ok:
|
||||
response_content = response.json()
|
||||
if response_content:
|
||||
return response_content['ice_servers']
|
||||
else:
|
||||
_logger.warning("Failed to obtain TURN servers, status code: %s, content:%s", response.status_code, response.content)
|
||||
return self._get_local_ice_servers()
|
||||
|
|
|
|||
|
|
@ -1,155 +1,143 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
import requests
|
||||
|
||||
from datetime import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from lxml import html, etree
|
||||
from lxml import html
|
||||
from urllib.parse import urlparse
|
||||
import requests
|
||||
|
||||
from odoo import api, models, fields
|
||||
from odoo import api, models, fields, tools
|
||||
from odoo.tools.misc import OrderedSet
|
||||
from odoo.addons.mail.tools.link_preview import get_link_preview_from_url
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class LinkPreview(models.Model):
|
||||
class MailLinkPreview(models.Model):
|
||||
_name = 'mail.link.preview'
|
||||
_inherit = ["bus.listener.mixin"]
|
||||
_description = "Store link preview data"
|
||||
|
||||
message_id = fields.Many2one('mail.message', string='Message', index=True, ondelete='cascade', required=True)
|
||||
source_url = fields.Char('URL', required=True)
|
||||
og_type = fields.Char('Type')
|
||||
og_title = fields.Char('Title')
|
||||
og_site_name = fields.Char('Site name')
|
||||
og_image = fields.Char('Image')
|
||||
og_description = fields.Text('Description')
|
||||
og_mimetype = fields.Char('MIME type')
|
||||
image_mimetype = fields.Char('Image MIME type')
|
||||
create_date = fields.Datetime(index=True)
|
||||
message_link_preview_ids = fields.One2many(
|
||||
"mail.message.link.preview", "link_preview_id", groups="base.group_erp_manager"
|
||||
)
|
||||
|
||||
_unique_source_url = models.UniqueIndex("(source_url)")
|
||||
|
||||
@api.model
|
||||
def _create_link_previews(self, message):
|
||||
if not message.body:
|
||||
return
|
||||
tree = html.fromstring(message.body)
|
||||
urls = tree.xpath('//a[not(@data-oe-model)]/@href')
|
||||
link_previews = self.env['mail.link.preview']
|
||||
def _create_from_message_and_notify(self, message, request_url=None):
|
||||
urls = []
|
||||
if not tools.is_html_empty(message.body):
|
||||
urls = OrderedSet(html.fromstring(message.body).xpath("//a[not(@data-oe-model)]/@href"))
|
||||
if request_url:
|
||||
ignore_pattern = re.compile(f"{re.escape(request_url)}(odoo|web|chat)(/|$|#|\\?)")
|
||||
urls = list(filter(lambda url: not ignore_pattern.match(url), urls))
|
||||
requests_session = requests.Session()
|
||||
# Some websites are blocking non browser user agent.
|
||||
requests_session.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'
|
||||
})
|
||||
for url in set(urls):
|
||||
if len(link_previews) >= 5:
|
||||
break
|
||||
link_previews |= self.env['mail.link.preview']._create_link_preview(url, message.id, requests_session)
|
||||
if not link_previews:
|
||||
return
|
||||
guest = self.env['mail.guest']._get_guest_from_context()
|
||||
if message.model == 'mail.channel' and message.res_id:
|
||||
target = self.env['mail.channel'].browse(message.res_id)
|
||||
elif self.env.user._is_public() and guest:
|
||||
target = guest
|
||||
else:
|
||||
target = self.env.user.partner_id
|
||||
self.env['bus.bus']._sendmany([(target, 'mail.link.preview/insert', link_previews._link_preview_format())])
|
||||
|
||||
@api.model
|
||||
def _create_link_preview(self, url, message_id, request_session):
|
||||
if self._is_domain_throttled(url):
|
||||
return self.env['mail.link.preview']
|
||||
link_preview_data = self._get_link_preview_from_url(url, request_session)
|
||||
if link_preview_data:
|
||||
link_preview_data['message_id'] = message_id
|
||||
return self.create(link_preview_data)
|
||||
return self.env['mail.link.preview']
|
||||
|
||||
def _delete_and_notify(self):
|
||||
notifications = []
|
||||
guest = self.env['mail.guest']._get_guest_from_context()
|
||||
for link_preview in self:
|
||||
if link_preview.message_id.model == 'mail.channel' and link_preview.message_id.res_id:
|
||||
target = self.env['mail.channel'].browse(link_preview.message_id.res_id)
|
||||
elif self.env.user._is_public() and guest:
|
||||
target = guest
|
||||
message_link_previews_ok = self.env["mail.message.link.preview"]
|
||||
link_previews_values = [] # list of (sequence, values)
|
||||
message_link_previews_values = [] # list of (sequence, mail.link.preview record)
|
||||
message_link_preview_by_url = {
|
||||
message_link_preview.link_preview_id.source_url: message_link_preview
|
||||
for message_link_preview in message.sudo().message_link_preview_ids
|
||||
}
|
||||
link_preview_by_url = {}
|
||||
if len(message_link_preview_by_url) != len(urls):
|
||||
# don't make the query if all `mail.message.link.preview` have been found
|
||||
link_preview_by_url = {
|
||||
link_preview.source_url: link_preview
|
||||
for link_preview in self.env["mail.link.preview"].search(
|
||||
[("source_url", "in", urls)]
|
||||
)
|
||||
}
|
||||
for index, url in enumerate(urls):
|
||||
if message_link_preview := message_link_preview_by_url.get(url):
|
||||
message_link_preview.sequence = index
|
||||
message_link_previews_ok += message_link_preview
|
||||
else:
|
||||
target = self.env.user.partner_id
|
||||
notifications.append((target, 'mail.link.preview/delete', {'id': link_preview.id}))
|
||||
self.env['bus.bus']._sendmany(notifications)
|
||||
self.unlink()
|
||||
if link_preview := link_preview_by_url.get(url):
|
||||
message_link_previews_values.append((index, link_preview))
|
||||
elif not self._is_domain_thottled(url):
|
||||
if link_preview_values := get_link_preview_from_url(url, requests_session):
|
||||
link_previews_values.append((index, link_preview_values))
|
||||
if (
|
||||
len(message_link_previews_ok)
|
||||
+ len(message_link_previews_values)
|
||||
+ len(link_previews_values)
|
||||
> 5
|
||||
):
|
||||
break
|
||||
new_link_preview_by_url = {
|
||||
link_preview.source_url: link_preview
|
||||
for link_preview in self.env["mail.link.preview"].create(
|
||||
[values for sequence, values in link_previews_values]
|
||||
)
|
||||
}
|
||||
for sequence, values in link_previews_values:
|
||||
message_link_previews_values.append(
|
||||
(sequence, new_link_preview_by_url[values["source_url"]])
|
||||
)
|
||||
message_link_previews_ok += self.env["mail.message.link.preview"].create(
|
||||
[
|
||||
{
|
||||
"sequence": sequence,
|
||||
"link_preview_id": link_preview.id,
|
||||
"message_id": message.id,
|
||||
}
|
||||
for sequence, link_preview in message_link_previews_values
|
||||
]
|
||||
)
|
||||
(message.sudo().message_link_preview_ids - message_link_previews_ok)._unlink_and_notify()
|
||||
Store(
|
||||
bus_channel=message._bus_channel(),
|
||||
).add(message, "message_link_preview_ids").bus_send()
|
||||
|
||||
@api.model
|
||||
def _is_link_preview_enabled(self):
|
||||
link_preview_throttle = int(self.env['ir.config_parameter'].sudo().get_param('mail.link_preview_throttle', 99))
|
||||
return link_preview_throttle > 0
|
||||
|
||||
@api.model
|
||||
def _is_domain_throttled(self, url):
|
||||
def _is_domain_thottled(self, url):
|
||||
domain = urlparse(url).netloc
|
||||
date_interval = fields.Datetime.to_string((datetime.now() - relativedelta(seconds=10)))
|
||||
call_counter = self.search_count([
|
||||
('source_url', 'ilike', domain),
|
||||
('create_date', '>', date_interval),
|
||||
])
|
||||
link_preview_throttle = int(self.env['ir.config_parameter'].get_param('mail.link_preview_throttle', 99))
|
||||
call_counter = self.env["mail.link.preview"].search_count(
|
||||
[("source_url", "ilike", domain), ("create_date", ">", date_interval)]
|
||||
)
|
||||
link_preview_throttle = int(
|
||||
self.env["ir.config_parameter"].get_param("mail.link_preview_throttle", 99)
|
||||
)
|
||||
return call_counter > link_preview_throttle
|
||||
|
||||
@api.model
|
||||
def _get_link_preview_from_url(self, url, request_session):
|
||||
try:
|
||||
response = request_session.head(url, timeout=3, allow_redirects=True)
|
||||
except requests.exceptions.RequestException:
|
||||
return False
|
||||
if response.status_code != requests.codes.ok:
|
||||
return False
|
||||
image_mimetype = (
|
||||
'image/bmp',
|
||||
'image/gif',
|
||||
'image/jpeg',
|
||||
'image/png',
|
||||
'image/tiff',
|
||||
'image/x-icon',
|
||||
)
|
||||
if not response.headers.get('Content-Type'):
|
||||
return False
|
||||
# Content-Type header can return a charset, but we just need the
|
||||
# mimetype (eg: image/jpeg;charset=ISO-8859-1)
|
||||
content_type = response.headers['Content-Type'].split(';')
|
||||
if response.headers['Content-Type'].startswith(image_mimetype):
|
||||
return {
|
||||
'image_mimetype': content_type[0],
|
||||
'source_url': url,
|
||||
}
|
||||
if response.headers['Content-Type'].startswith('text/html'):
|
||||
return self._get_link_preview_from_html(url, request_session)
|
||||
return False
|
||||
def _search_or_create_from_url(self, url):
|
||||
"""Return the URL preview, first from the database if available otherwise make the request."""
|
||||
preview = self.env["mail.link.preview"].search([("source_url", "=", url)])
|
||||
if not preview:
|
||||
if self._is_domain_thottled(url):
|
||||
return self.env["mail.link.preview"]
|
||||
preview_values = get_link_preview_from_url(url)
|
||||
if not preview_values:
|
||||
return self.env["mail.link.preview"]
|
||||
preview = self.env['mail.link.preview'].create(preview_values)
|
||||
return preview
|
||||
|
||||
def _get_link_preview_from_html(self, url, request_session):
|
||||
response = request_session.get(url, timeout=3)
|
||||
parser = etree.HTMLParser(encoding=response.encoding)
|
||||
tree = html.fromstring(response.content, parser=parser)
|
||||
og_title = tree.xpath('//meta[@property="og:title"]/@content')
|
||||
if not og_title:
|
||||
return False
|
||||
og_description = tree.xpath('//meta[@property="og:description"]/@content')
|
||||
og_type = tree.xpath('//meta[@property="og:type"]/@content')
|
||||
og_image = tree.xpath('//meta[@property="og:image"]/@content')
|
||||
og_mimetype = tree.xpath('//meta[@property="og:image:type"]/@content')
|
||||
return {
|
||||
'og_description': og_description[0] if og_description else None,
|
||||
'og_image': og_image[0] if og_image else None,
|
||||
'og_mimetype': og_mimetype[0] if og_mimetype else None,
|
||||
'og_title': og_title[0],
|
||||
'og_type': og_type[0] if og_type else None,
|
||||
'source_url': url,
|
||||
}
|
||||
|
||||
def _link_preview_format(self):
|
||||
return [{
|
||||
'id': preview.id,
|
||||
'message': {'id': preview.message_id.id},
|
||||
'image_mimetype': preview.image_mimetype,
|
||||
'og_description': preview.og_description,
|
||||
'og_image': preview.og_image,
|
||||
'og_mimetype': preview.og_mimetype,
|
||||
'og_title': preview.og_title,
|
||||
'og_type': preview.og_type,
|
||||
'source_url': preview.source_url,
|
||||
} for preview in self]
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
"image_mimetype",
|
||||
"og_description",
|
||||
"og_image",
|
||||
"og_mimetype",
|
||||
"og_site_name",
|
||||
"og_title",
|
||||
"og_type",
|
||||
"source_url",
|
||||
]
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,44 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MessageMailLinkPreview(models.Model):
|
||||
_name = "mail.message.link.preview"
|
||||
_inherit = ["bus.listener.mixin"]
|
||||
_description = "Link between link previews and messages"
|
||||
_order = "sequence, id"
|
||||
|
||||
message_id = fields.Many2one("mail.message", required=True, index=True, ondelete="cascade")
|
||||
link_preview_id = fields.Many2one(
|
||||
"mail.link.preview", index=True, required=True, ondelete="cascade"
|
||||
)
|
||||
sequence = fields.Integer("Sequence")
|
||||
is_hidden = fields.Boolean()
|
||||
author_id = fields.Many2one(related="message_id.author_id")
|
||||
|
||||
_unique_message_link_preview = models.UniqueIndex("(message_id, link_preview_id)")
|
||||
|
||||
def _bus_channel(self):
|
||||
return self.message_id._bus_channel()
|
||||
|
||||
def _hide_and_notify(self):
|
||||
if not self:
|
||||
return
|
||||
self.is_hidden = True
|
||||
for message_link_preview in self:
|
||||
Store(bus_channel=self._bus_channel()).delete(message_link_preview).bus_send()
|
||||
|
||||
def _unlink_and_notify(self):
|
||||
if not self:
|
||||
return
|
||||
for message_link_preview in self:
|
||||
Store(bus_channel=self._bus_channel()).delete(message_link_preview).bus_send()
|
||||
self.unlink()
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
Store.One("link_preview_id", sudo=True),
|
||||
Store.One("message_id", [], sudo=True),
|
||||
]
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models, fields
|
||||
from odoo.tools import groupby
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailMessageReaction(models.Model):
|
||||
|
|
@ -10,15 +11,33 @@ class MailMessageReaction(models.Model):
|
|||
_order = 'id desc'
|
||||
_log_access = False
|
||||
|
||||
message_id = fields.Many2one(string="Message", comodel_name='mail.message', ondelete='cascade', required=True, readonly=True)
|
||||
message_id = fields.Many2one(string="Message", comodel_name='mail.message', ondelete='cascade', required=True, readonly=True, index=True)
|
||||
content = fields.Char(string="Content", required=True, readonly=True)
|
||||
partner_id = fields.Many2one(string="Reacting Partner", comodel_name='res.partner', ondelete='cascade', readonly=True)
|
||||
guest_id = fields.Many2one(string="Reacting Guest", comodel_name='mail.guest', ondelete='cascade', readonly=True)
|
||||
|
||||
def init(self):
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_message_reaction_partner_unique ON %s (message_id, content, partner_id) WHERE partner_id IS NOT NULL" % self._table)
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_message_reaction_guest_unique ON %s (message_id, content, guest_id) WHERE guest_id IS NOT NULL" % self._table)
|
||||
_partner_unique = models.UniqueIndex("(message_id, content, partner_id) WHERE partner_id IS NOT NULL")
|
||||
_guest_unique = models.UniqueIndex("(message_id, content, guest_id) WHERE guest_id IS NOT NULL")
|
||||
|
||||
_sql_constraints = [
|
||||
("partner_or_guest_exists", "CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))", "A message reaction must be from a partner or from a guest."),
|
||||
]
|
||||
_partner_or_guest_exists = models.Constraint(
|
||||
'CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))',
|
||||
'A message reaction must be from a partner or from a guest.',
|
||||
)
|
||||
|
||||
def _to_store(self, store: Store, fields):
|
||||
if fields:
|
||||
raise NotImplementedError("Fields are not supported for reactions.")
|
||||
for (message, content), reactions in groupby(self, lambda r: (r.message_id, r.content)):
|
||||
reactions = self.env["mail.message.reaction"].union(*reactions)
|
||||
data = {
|
||||
"content": content,
|
||||
"count": len(reactions),
|
||||
"guests": Store.Many(reactions.guest_id, ["avatar_128", "name"]),
|
||||
"message": message.id,
|
||||
"partners": Store.Many(
|
||||
reactions.partner_id,
|
||||
["avatar_128", *message._get_store_partner_name_fields()],
|
||||
),
|
||||
"sequence": min(reactions.ids),
|
||||
}
|
||||
store.add_model_values("MessageReactions", data)
|
||||
|
|
|
|||
|
|
@ -64,10 +64,14 @@ class MailMessageSchedule(models.Model):
|
|||
for model, schedules in self._group_by_model().items():
|
||||
if model:
|
||||
records = self.env[model].browse(schedules.mapped('mail_message_id.res_id'))
|
||||
existing = records.exists()
|
||||
else:
|
||||
records = [self.env['mail.thread']] * len(schedules)
|
||||
existing = records
|
||||
|
||||
for record, schedule in zip(records, schedules):
|
||||
if record not in existing:
|
||||
continue
|
||||
notify_kwargs = dict(default_notify_kwargs or {}, skip_existing=True)
|
||||
try:
|
||||
schedule_notify_kwargs = json.loads(schedule.notification_parameters)
|
||||
|
|
@ -92,7 +96,8 @@ class MailMessageSchedule(models.Model):
|
|||
``notify_thread``. Those are default values overridden by content of
|
||||
``notification_parameters`` field.
|
||||
|
||||
:return bool: False if no schedule has been found, True otherwise
|
||||
:returns: False if no schedule has been found, True otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
messages_scheduled = self.search(
|
||||
[('mail_message_id', 'in', messages.ids)]
|
||||
|
|
@ -112,7 +117,8 @@ class MailMessageSchedule(models.Model):
|
|||
:param datetime new_datetime: new datetime for sending. New triggers
|
||||
are created based on it;
|
||||
|
||||
:return bool: False if no schedule has been found, True otherwise
|
||||
:returns: False if no schedule has been found, True otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
messages_scheduled = self.search(
|
||||
[('mail_message_id', 'in', messages.ids)]
|
||||
|
|
|
|||
|
|
@ -45,15 +45,15 @@ class MailMessageSubtype(models.Model):
|
|||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.clear_caches()
|
||||
self.env.registry.clear_cache() # _get_auto_subscription_subtypes
|
||||
return super(MailMessageSubtype, self).create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
self.clear_caches()
|
||||
self.env.registry.clear_cache() # _get_auto_subscription_subtypes
|
||||
return super(MailMessageSubtype, self).write(vals)
|
||||
|
||||
def unlink(self):
|
||||
self.clear_caches()
|
||||
self.env.registry.clear_cache() # _get_auto_subscription_subtypes
|
||||
return super(MailMessageSubtype, self).unlink()
|
||||
|
||||
@tools.ormcache('model_name')
|
||||
|
|
@ -89,7 +89,7 @@ class MailMessageSubtype(models.Model):
|
|||
child_ids += subtype.ids
|
||||
if subtype.default:
|
||||
def_ids += subtype.ids
|
||||
elif subtype.relation_field:
|
||||
if subtype.relation_field:
|
||||
parent[subtype.id] = subtype.parent_id.id
|
||||
relation.setdefault(subtype.res_model, set()).add(subtype.relation_field)
|
||||
# required for backward compatibility
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class MailMessageTranslation(models.Model):
|
||||
_name = 'mail.message.translation'
|
||||
_description = "Message Translation"
|
||||
|
||||
message_id = fields.Many2one("mail.message", "Message", required=True, ondelete="cascade")
|
||||
source_lang = fields.Char(
|
||||
"Source Language", required=True, help="Result of the language detection based on its content."
|
||||
)
|
||||
target_lang = fields.Char(
|
||||
"Target Language", required=True, help="Shortened language code used as the target for the translation request."
|
||||
)
|
||||
body = fields.Html(
|
||||
"Translation Body", required=True, sanitize_style=True, help="String received from the translation request."
|
||||
)
|
||||
create_date = fields.Datetime(index=True)
|
||||
|
||||
_unique = models.UniqueIndex("(message_id, target_lang)")
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_translations(self):
|
||||
treshold = fields.Datetime().now() - relativedelta(weeks=2)
|
||||
self.search([("create_date", "<", treshold)]).unlink()
|
||||
|
|
@ -1,11 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.exceptions import AccessError
|
||||
from odoo.tools.constants import GC_UNLINK_LIMIT
|
||||
from odoo.tools.translate import _
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailNotification(models.Model):
|
||||
|
|
@ -21,16 +22,21 @@ class MailNotification(models.Model):
|
|||
mail_mail_id = fields.Many2one('mail.mail', 'Mail', index=True, help='Optional mail_mail ID. Used mainly to optimize searches.')
|
||||
# recipient
|
||||
res_partner_id = fields.Many2one('res.partner', 'Recipient', index=True, ondelete='cascade')
|
||||
# set if no matching partner exists (mass mail)
|
||||
# must be normalized except if notification is cancel/failure from invalid email
|
||||
mail_email_address = fields.Char(help='Recipient email address')
|
||||
# status
|
||||
notification_type = fields.Selection([
|
||||
('inbox', 'Inbox'), ('email', 'Email')
|
||||
], string='Notification Type', default='inbox', index=True, required=True)
|
||||
notification_status = fields.Selection([
|
||||
('ready', 'Ready to Send'),
|
||||
('sent', 'Sent'),
|
||||
('process', 'Processing'), # being checked by intermediary like IAP for sms
|
||||
('pending', 'Sent'), # used with SMS; mail does not differentiate sent from delivered
|
||||
('sent', 'Delivered'),
|
||||
('bounce', 'Bounced'),
|
||||
('exception', 'Exception'),
|
||||
('canceled', 'Canceled')
|
||||
('canceled', 'Cancelled')
|
||||
], string='Status', default='ready', index=True)
|
||||
is_read = fields.Boolean('Is Read', index=True)
|
||||
read_date = fields.Datetime('Read Date', copy=False)
|
||||
|
|
@ -38,42 +44,39 @@ class MailNotification(models.Model):
|
|||
# generic
|
||||
("unknown", "Unknown error"),
|
||||
# mail
|
||||
("mail_bounce", "Bounce"),
|
||||
("mail_spam", "Detected As Spam"),
|
||||
("mail_email_invalid", "Invalid email address"),
|
||||
("mail_email_missing", "Missing email address"),
|
||||
("mail_from_invalid", "Invalid from address"),
|
||||
("mail_from_missing", "Missing from address"),
|
||||
("mail_smtp", "Connection failed (outgoing mail server problem)"),
|
||||
], string='Failure type')
|
||||
# mass mode
|
||||
("mail_bl", "Blacklisted Address"),
|
||||
("mail_optout", "Opted Out"),
|
||||
("mail_dup", "Duplicated Email")], string='Failure type')
|
||||
failure_reason = fields.Text('Failure reason', copy=False)
|
||||
|
||||
_sql_constraints = [
|
||||
# email notification: partner is required
|
||||
('notification_partner_required',
|
||||
"CHECK(notification_type NOT IN ('email', 'inbox') OR res_partner_id IS NOT NULL)",
|
||||
'Customer is required for inbox / email notification'),
|
||||
]
|
||||
_notification_partner_required = models.Constraint(
|
||||
"CHECK(notification_type != 'inbox' OR res_partner_id IS NOT NULL)",
|
||||
'Customer is required for inbox notification',
|
||||
)
|
||||
_notification_partner_or_email_required = models.Constraint(
|
||||
"CHECK(notification_type != 'email' OR failure_type IS NOT NULL OR res_partner_id IS NOT NULL OR COALESCE(mail_email_address, '') != '')",
|
||||
'Customer or email is required for inbox / email notification',
|
||||
)
|
||||
_res_partner_id_is_read_notification_status_mail_message_id = models.Index("(res_partner_id, is_read, notification_status, mail_message_id)")
|
||||
_author_id_notification_status_failure = models.Index("(author_id, notification_status) WHERE notification_status IN ('bounce', 'exception')")
|
||||
_unique_mail_message_id_res_partner_id_ = models.UniqueIndex("(mail_message_id, res_partner_id) WHERE res_partner_id IS NOT NULL")
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# CRUD
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def init(self):
|
||||
self._cr.execute("""
|
||||
CREATE INDEX IF NOT EXISTS mail_notification_res_partner_id_is_read_notification_status_mail_message_id
|
||||
ON mail_notification (res_partner_id, is_read, notification_status, mail_message_id);
|
||||
CREATE INDEX IF NOT EXISTS mail_notification_author_id_notification_status_failure
|
||||
ON mail_notification (author_id, notification_status)
|
||||
WHERE notification_status IN ('bounce', 'exception');
|
||||
""")
|
||||
self.env.cr.execute(
|
||||
"""CREATE UNIQUE INDEX IF NOT EXISTS unique_mail_message_id_res_partner_id_if_set
|
||||
ON %s (mail_message_id, res_partner_id)
|
||||
WHERE res_partner_id IS NOT NULL""" % self._table
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
messages = self.env['mail.message'].browse(vals['mail_message_id'] for vals in vals_list)
|
||||
messages.check_access_rights('read')
|
||||
messages.check_access_rule('read')
|
||||
messages.check_access('read')
|
||||
for vals in vals_list:
|
||||
if vals.get('is_read'):
|
||||
vals['read_date'] = fields.Datetime.now()
|
||||
|
|
@ -94,10 +97,9 @@ class MailNotification(models.Model):
|
|||
('res_partner_id.partner_share', '=', False),
|
||||
('notification_status', 'in', ('sent', 'canceled'))
|
||||
]
|
||||
records = self.search(domain, limit=models.GC_UNLINK_LIMIT)
|
||||
if len(records) >= models.GC_UNLINK_LIMIT:
|
||||
self.env.ref('base.autovacuum_job')._trigger()
|
||||
return records.unlink()
|
||||
records = self.search(domain, limit=GC_UNLINK_LIMIT)
|
||||
records.unlink()
|
||||
return len(records), len(records) == GC_UNLINK_LIMIT # done, remaining
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# TOOLS
|
||||
|
|
@ -108,7 +110,9 @@ class MailNotification(models.Model):
|
|||
if self.failure_type != 'unknown':
|
||||
return dict(self._fields['failure_type'].selection).get(self.failure_type, _('No Error'))
|
||||
else:
|
||||
return _("Unknown error") + ": %s" % (self.failure_reason or '')
|
||||
if self.failure_reason:
|
||||
return _("Unknown error: %(error)s", error=self.failure_reason)
|
||||
return _("Unknown error")
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# DISCUSS
|
||||
|
|
@ -118,20 +122,26 @@ class MailNotification(models.Model):
|
|||
"""Returns only the notifications to show on the web client."""
|
||||
def _filter_unimportant_notifications(notif):
|
||||
if notif.notification_status in ['bounce', 'exception', 'canceled'] \
|
||||
or notif.res_partner_id.partner_share:
|
||||
or notif.res_partner_id.partner_share or notif.mail_email_address:
|
||||
return True
|
||||
subtype = notif.mail_message_id.subtype_id
|
||||
return not subtype or subtype.track_recipients
|
||||
|
||||
return self.filtered(_filter_unimportant_notifications)
|
||||
|
||||
def _notification_format(self):
|
||||
"""Returns the current notifications in the format expected by the web
|
||||
client."""
|
||||
return [{
|
||||
'id': notif.id,
|
||||
'notification_type': notif.notification_type,
|
||||
'notification_status': notif.notification_status,
|
||||
'failure_type': notif.failure_type,
|
||||
'res_partner_id': [notif.res_partner_id.id, notif.res_partner_id.display_name] if notif.res_partner_id else False,
|
||||
} for notif in self]
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
"mail_email_address",
|
||||
"failure_type",
|
||||
"mail_message_id",
|
||||
"notification_status",
|
||||
"notification_type",
|
||||
Store.One(
|
||||
"res_partner_id",
|
||||
[
|
||||
"name",
|
||||
"email",
|
||||
Store.Attr("display_name", predicate=lambda p: not p.name),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
|
|
|||
116
odoo-bringout-oca-ocb-mail/mail/models/mail_presence.py
Normal file
116
odoo-bringout-oca-ocb-mail/mail/models/mail_presence.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from odoo import api, fields, models, tools
|
||||
from odoo.service.model import PG_CONCURRENCY_EXCEPTIONS_TO_RETRY
|
||||
|
||||
UPDATE_PRESENCE_DELAY = 60
|
||||
DISCONNECTION_TIMER = UPDATE_PRESENCE_DELAY + 5
|
||||
AWAY_TIMER = 1800 # 30 minutes
|
||||
PRESENCE_OUTDATED_TIMER = 12 * 60 * 60 # 12 hours
|
||||
|
||||
|
||||
class MailPresence(models.Model):
|
||||
"""User/Guest Presence
|
||||
Its status is 'online', 'away' or 'offline'. This model should be a one2one, but is not
|
||||
attached to res_users to avoid database concurrency errors.
|
||||
"""
|
||||
|
||||
_name = "mail.presence"
|
||||
_inherit = "bus.listener.mixin"
|
||||
_description = "User/Guest Presence"
|
||||
_log_access = False
|
||||
|
||||
user_id = fields.Many2one("res.users", "Users", ondelete="cascade")
|
||||
guest_id = fields.Many2one("mail.guest", "Guest", ondelete="cascade")
|
||||
last_poll = fields.Datetime("Last Poll", default=lambda self: fields.Datetime.now())
|
||||
last_presence = fields.Datetime("Last Presence", default=lambda self: fields.Datetime.now())
|
||||
status = fields.Selection(
|
||||
[("online", "Online"), ("away", "Away"), ("offline", "Offline")],
|
||||
"IM Status",
|
||||
default="offline",
|
||||
)
|
||||
|
||||
_guest_unique = models.UniqueIndex("(guest_id) WHERE guest_id IS NOT NULL")
|
||||
_user_unique = models.UniqueIndex("(user_id) WHERE user_id IS NOT NULL")
|
||||
|
||||
_partner_or_guest_exists = models.Constraint(
|
||||
"CHECK((user_id IS NOT NULL AND guest_id IS NULL) OR (user_id IS NULL AND guest_id IS NOT NULL))",
|
||||
"A mail presence must have a user or a guest.",
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
presences = super().create(vals_list)
|
||||
presences._send_presence()
|
||||
return presences
|
||||
|
||||
def write(self, vals):
|
||||
status_by_presence = {presence: presence.status for presence in self}
|
||||
result = super().write(vals)
|
||||
updated = self.filtered(lambda p: status_by_presence[p] != p.status)
|
||||
updated._send_presence()
|
||||
return result
|
||||
|
||||
def unlink(self):
|
||||
self._send_presence("offline")
|
||||
return super().unlink()
|
||||
|
||||
@api.model
|
||||
def _try_update_presence(self, user_or_guest, inactivity_period=0):
|
||||
"""Updates the last_poll and last_presence of the current user
|
||||
:param inactivity_period: duration in milliseconds
|
||||
"""
|
||||
# This method is called in method _poll() and cursor is closed right
|
||||
# after; see bus/controllers/main.py.
|
||||
try:
|
||||
# Hide transaction serialization errors, which can be ignored, the presence update is not essential
|
||||
# The errors are supposed from presence.write(...) call only
|
||||
with tools.mute_logger("odoo.sql_db"):
|
||||
self._update_presence(user_or_guest, inactivity_period)
|
||||
# commit on success
|
||||
self.env.cr.commit()
|
||||
except PG_CONCURRENCY_EXCEPTIONS_TO_RETRY:
|
||||
# ignore concurrency error
|
||||
return self.env.cr.rollback()
|
||||
|
||||
@api.model
|
||||
def _update_presence(self, user_or_guest, inactivity_period=0):
|
||||
values = {
|
||||
"last_poll": fields.Datetime.now(),
|
||||
"last_presence": fields.Datetime.now() - timedelta(milliseconds=inactivity_period),
|
||||
"status": "away" if inactivity_period > AWAY_TIMER * 1000 else "online",
|
||||
}
|
||||
# sudo: res.users/mail.guest can update presence of accessible user/guest
|
||||
user_or_guest_sudo = user_or_guest.sudo()
|
||||
if presence := user_or_guest_sudo.presence_ids:
|
||||
presence.write(values)
|
||||
else:
|
||||
values["guest_id" if user_or_guest._name == "mail.guest" else "user_id"] = user_or_guest.id
|
||||
# sudo: res.users/mail.guest can update presence of accessible user/guest
|
||||
self.env["mail.presence"].sudo().create(values)
|
||||
|
||||
def _send_presence(self, im_status=None, bus_target=None):
|
||||
"""Send notification related to bus presence update.
|
||||
|
||||
:param im_status: 'online', 'away' or 'offline'
|
||||
"""
|
||||
for presence in self:
|
||||
target = bus_target or presence.guest_id or presence.user_id.partner_id
|
||||
target._bus_send(
|
||||
"bus.bus/im_status_updated",
|
||||
{
|
||||
"presence_status": im_status or presence.status,
|
||||
"im_status": target.im_status,
|
||||
"guest_id": presence.guest_id.id,
|
||||
"partner_id": presence.user_id.partner_id.id,
|
||||
},
|
||||
subchannel="presence" if not bus_target else None,
|
||||
)
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_bus_presence(self):
|
||||
self.search(
|
||||
[("last_poll", "<", fields.Datetime.now() - timedelta(seconds=PRESENCE_OUTDATED_TIMER))]
|
||||
).unlink()
|
||||
71
odoo-bringout-oca-ocb-mail/mail/models/mail_push.py
Normal file
71
odoo-bringout-oca-ocb-mail/mail/models/mail_push.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import logging
|
||||
|
||||
from requests import Session
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.addons.mail.tools.web_push import push_to_end_point, DeviceUnreachableError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MailPush(models.Model):
|
||||
_name = 'mail.push'
|
||||
_description = "Push Notifications"
|
||||
|
||||
mail_push_device_id = fields.Many2one('mail.push.device', string='devices', required=True, ondelete="cascade")
|
||||
payload = fields.Text()
|
||||
|
||||
@api.model
|
||||
def _push_notification_to_endpoint(self, batch_size=50):
|
||||
"""Send to web browser endpoint computed notification"""
|
||||
web_push_notifications_sudo = self.sudo().search_fetch([], ['mail_push_device_id', 'payload'], limit=batch_size)
|
||||
if not web_push_notifications_sudo:
|
||||
return
|
||||
|
||||
ir_parameter_sudo = self.env['ir.config_parameter'].sudo()
|
||||
vapid_private_key = ir_parameter_sudo.get_param('mail.web_push_vapid_private_key')
|
||||
vapid_public_key = ir_parameter_sudo.get_param('mail.web_push_vapid_public_key')
|
||||
if not vapid_private_key or not vapid_public_key:
|
||||
return
|
||||
|
||||
session = Session()
|
||||
devices_to_unlink = set()
|
||||
|
||||
# process send notif
|
||||
devices = web_push_notifications_sudo.mail_push_device_id.grouped('id')
|
||||
for web_push_notification_sudo in web_push_notifications_sudo:
|
||||
device = devices.get(web_push_notification_sudo.mail_push_device_id.id)
|
||||
if device.id in devices_to_unlink:
|
||||
continue
|
||||
try:
|
||||
push_to_end_point(
|
||||
base_url=self.get_base_url(),
|
||||
device={
|
||||
'id': device.id,
|
||||
'endpoint': device.endpoint,
|
||||
'keys': device.keys
|
||||
},
|
||||
payload=web_push_notification_sudo.payload,
|
||||
vapid_private_key=vapid_private_key,
|
||||
vapid_public_key=vapid_public_key,
|
||||
session=session,
|
||||
)
|
||||
except DeviceUnreachableError:
|
||||
devices_to_unlink.add(device.id)
|
||||
except Exception as e: # noqa: BLE001
|
||||
# Avoid blocking the whole cron just for a notification exception
|
||||
_logger.error('An error occurred while trying to send web push: %s', e)
|
||||
|
||||
# clean up notif
|
||||
web_push_notifications_sudo.unlink()
|
||||
|
||||
# clean up obsolete devices
|
||||
if devices_to_unlink:
|
||||
self.env['mail.push.device'].sudo().browse(devices_to_unlink).unlink()
|
||||
|
||||
# restart the cron if needed
|
||||
if self.search_count([]) > 0:
|
||||
self.env.ref('mail.ir_cron_web_push_notification')._trigger()
|
||||
89
odoo-bringout-oca-ocb-mail/mail/models/mail_push_device.py
Normal file
89
odoo-bringout-oca-ocb-mail/mail/models/mail_push_device.py
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
import logging as logger
|
||||
|
||||
from odoo import api, fields, models
|
||||
from ..tools.jwt import generate_vapid_keys, InvalidVapidError
|
||||
|
||||
_logger = logger.getLogger(__name__)
|
||||
|
||||
|
||||
class MailPushDevice(models.Model):
|
||||
_name = 'mail.push.device'
|
||||
_description = "Push Notification Device"
|
||||
|
||||
partner_id = fields.Many2one(
|
||||
'res.partner', string='Partner', index=True, required=True,
|
||||
default=lambda self: self.env.user.partner_id)
|
||||
endpoint = fields.Char(string='Browser endpoint', required=True)
|
||||
keys = fields.Char(string='Browser keys', required=True,
|
||||
help=("It's refer to browser keys used by the notification: \n"
|
||||
"- p256dh: It's the subscription public key generated by the browser. The browser will \n"
|
||||
" keep the private key secret and use it for decrypting the payload\n"
|
||||
"- auth: The auth value should be treated as a secret and not shared outside of Odoo"))
|
||||
expiration_time = fields.Datetime(string='Expiration Token Date')
|
||||
|
||||
_endpoint_unique = models.Constraint(
|
||||
'unique(endpoint)',
|
||||
'The endpoint must be unique !',
|
||||
)
|
||||
|
||||
@api.model
|
||||
def get_web_push_vapid_public_key(self):
|
||||
ir_params_sudo = self.env['ir.config_parameter'].sudo()
|
||||
public_key = 'mail.web_push_vapid_public_key'
|
||||
public_key_value = ir_params_sudo.get_param(public_key)
|
||||
# Regenerate new Keys if public key not present
|
||||
if not public_key_value:
|
||||
self.sudo().search([]).unlink() # Reset all devices (ServiceWorker)
|
||||
private_key_value, public_key_value = generate_vapid_keys()
|
||||
ir_params_sudo.set_param('mail.web_push_vapid_private_key', private_key_value)
|
||||
ir_params_sudo.set_param(public_key, public_key_value)
|
||||
_logger.info("WebPush: missing public key, new VAPID keys generated")
|
||||
return public_key_value
|
||||
|
||||
@api.model
|
||||
def register_devices(self, **kw):
|
||||
sw_vapid_public_key = kw.get('vapid_public_key')
|
||||
valid_sub = self._verify_vapid_public_key(sw_vapid_public_key)
|
||||
if not valid_sub:
|
||||
raise InvalidVapidError("Invalid VAPID public key")
|
||||
endpoint = kw.get('endpoint')
|
||||
browser_keys = kw.get('keys')
|
||||
if not endpoint or not browser_keys:
|
||||
return
|
||||
search_endpoint = kw.get('previousEndpoint', endpoint)
|
||||
mail_push_device = self.sudo().search([('endpoint', '=', search_endpoint)])
|
||||
if mail_push_device:
|
||||
if mail_push_device.partner_id is not self.env.user.partner_id:
|
||||
mail_push_device.write({
|
||||
'endpoint': endpoint,
|
||||
'expiration_time': kw.get('expirationTime'),
|
||||
'keys': json.dumps(browser_keys),
|
||||
'partner_id': self.env.user.partner_id,
|
||||
})
|
||||
else:
|
||||
self.sudo().create([{
|
||||
'endpoint': endpoint,
|
||||
'expiration_time': kw.get('expirationTime'),
|
||||
'keys': json.dumps(browser_keys),
|
||||
'partner_id': self.env.user.partner_id.id,
|
||||
}])
|
||||
|
||||
@api.model
|
||||
def unregister_devices(self, **kw):
|
||||
endpoint = kw.get('endpoint')
|
||||
if not endpoint:
|
||||
return
|
||||
mail_push_device = self.sudo().search([
|
||||
('endpoint', '=', endpoint)
|
||||
])
|
||||
if mail_push_device:
|
||||
mail_push_device.unlink()
|
||||
|
||||
def _verify_vapid_public_key(self, sw_public_key):
|
||||
ir_params_sudo = self.env['ir.config_parameter'].sudo()
|
||||
db_public_key = ir_params_sudo.get_param('mail.web_push_vapid_public_key')
|
||||
return db_public_key == sw_public_key
|
||||
|
|
@ -8,13 +8,14 @@ import re
|
|||
import traceback
|
||||
|
||||
from lxml import html
|
||||
from markupsafe import Markup
|
||||
from werkzeug import urls
|
||||
from functools import reduce
|
||||
from markupsafe import Markup, escape
|
||||
|
||||
from odoo import _, api, fields, models, tools
|
||||
from odoo.addons.base.models.ir_qweb import QWebException
|
||||
from odoo.addons.base.models.ir_qweb import QWebError
|
||||
from odoo.exceptions import UserError, AccessError
|
||||
from odoo.tools import is_html_empty, safe_eval
|
||||
from odoo.tools import urls
|
||||
from odoo.tools.mail import is_html_empty, prepend_html_content, html_normalize
|
||||
from odoo.tools.rendering_tools import convert_inline_template_to_qweb, parse_inline_template, render_inline_template, template_env_globals
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
|
@ -38,6 +39,7 @@ def format_time(env, time, tz=False, time_format='medium', lang_code=False):
|
|||
except babel.core.UnknownLocaleError:
|
||||
return time
|
||||
|
||||
|
||||
class MailRenderMixin(models.AbstractModel):
|
||||
_name = 'mail.render.mixin'
|
||||
_description = 'Mail Render Mixin'
|
||||
|
|
@ -50,7 +52,7 @@ class MailRenderMixin(models.AbstractModel):
|
|||
lang = fields.Char(
|
||||
'Language',
|
||||
help="Optional translation language (ISO code) to select when sending out an email. "
|
||||
"If not set, the english version will be used. This should usually be a placeholder expression "
|
||||
"If not set, the main partner's language will be used. This should usually be a placeholder expression "
|
||||
"that provides the appropriate language, e.g. {{ object.partner_id.lang }}.")
|
||||
# rendering context
|
||||
render_model = fields.Char("Rendering Model", compute='_compute_render_model', store=False)
|
||||
|
|
@ -76,7 +78,7 @@ class MailRenderMixin(models.AbstractModel):
|
|||
if sub_field_name:
|
||||
expression += "." + sub_field_name
|
||||
if null_value:
|
||||
expression += " or '''%s'''" % null_value
|
||||
expression += f" ||| {null_value}"
|
||||
expression += " }}"
|
||||
return expression
|
||||
|
||||
|
|
@ -89,8 +91,8 @@ class MailRenderMixin(models.AbstractModel):
|
|||
return name in ['render_engine', 'render_options'] or super()._valid_field_parameter(field, name)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, values_list):
|
||||
record = super().create(values_list)
|
||||
def create(self, vals_list):
|
||||
record = super().create(vals_list)
|
||||
if self._unrestricted_rendering:
|
||||
# If the rendering is unrestricted (e.g. mail.template),
|
||||
# check the user is part of the mail editor group to create a new template if the template is dynamic
|
||||
|
|
@ -105,8 +107,8 @@ class MailRenderMixin(models.AbstractModel):
|
|||
self._check_access_right_dynamic_template()
|
||||
return True
|
||||
|
||||
def _update_field_translations(self, fname, translations, digest=None):
|
||||
res = super()._update_field_translations(fname, translations, digest)
|
||||
def _update_field_translations(self, field_name, translations, digest=None, source_lang=''):
|
||||
res = super()._update_field_translations(field_name, translations, digest=digest, source_lang=source_lang)
|
||||
if self._unrestricted_rendering:
|
||||
for lang in translations:
|
||||
# If the rendering is unrestricted (e.g. mail.template),
|
||||
|
|
@ -134,17 +136,15 @@ class MailRenderMixin(models.AbstractModel):
|
|||
if not html:
|
||||
return html
|
||||
|
||||
wrapper = Markup if isinstance(html, Markup) else str
|
||||
html = tools.ustr(html)
|
||||
if isinstance(html, Markup):
|
||||
wrapper = Markup
|
||||
assert isinstance(html, str)
|
||||
Wrapper = html.__class__
|
||||
|
||||
def _sub_relative2absolute(match):
|
||||
# compute here to do it only if really necessary + cache will ensure it is done only once
|
||||
# if not base_url
|
||||
if not _sub_relative2absolute.base_url:
|
||||
_sub_relative2absolute.base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url")
|
||||
return match.group(1) + urls.url_join(_sub_relative2absolute.base_url, match.group(2))
|
||||
return match.group(1) + urls.urljoin(_sub_relative2absolute.base_url, match.group(2))
|
||||
|
||||
_sub_relative2absolute.base_url = base_url
|
||||
html = re.sub(r"""(<(?:img|v:fill|v:image)(?=\s)[^>]*\ssrc=")(/[^/][^"]+)""", _sub_relative2absolute, html)
|
||||
|
|
@ -159,19 +159,66 @@ class MailRenderMixin(models.AbstractModel):
|
|||
/(?:[^'")]|(?!&\#34;)|(?!&\#39;))+ # stop at the first closing quote
|
||||
)""", re.VERBOSE), _sub_relative2absolute, html)
|
||||
|
||||
return wrapper(html)
|
||||
return Wrapper(html)
|
||||
|
||||
@api.model
|
||||
def _render_encapsulate(self, layout_xmlid, html, add_context=None, context_record=None):
|
||||
""" Encapsulate html content (i.e. an email body) in a layout containing
|
||||
more complex html. Used to generate a 'email friendly' content from
|
||||
simple html content.
|
||||
|
||||
Typical usage: encapsulate content in email layouts like 'mail_notification_layout'
|
||||
or 'mail_notification_light'. Also used for digest layouts. This leads
|
||||
to some default rendering values being computed here, often used in those
|
||||
templates. """
|
||||
record_name = (add_context or {}).get('record_name', context_record.display_name if context_record else '')
|
||||
subtype = (add_context or {}).get('subtype', self.env['mail.message.subtype'].sudo())
|
||||
template_ctx = {
|
||||
'body': html,
|
||||
'record_name': context_record.display_name if context_record else '',
|
||||
'model_description': self.env['ir.model']._get(context_record._name).display_name if context_record else False,
|
||||
'company': context_record['company_id'] if (context_record and 'company_id' in context_record) else self.env.company,
|
||||
'record': context_record,
|
||||
'record_name': record_name,
|
||||
**(add_context or {}),
|
||||
}
|
||||
if add_context:
|
||||
template_ctx.update(**add_context)
|
||||
# the 'mail_notification_light' expects a mail.message 'message' context, let's give it one
|
||||
if not template_ctx.get('message'):
|
||||
msg_vals = {'body': html}
|
||||
if context_record:
|
||||
msg_vals.update({'model': context_record._name, 'res_id': context_record.id})
|
||||
template_ctx['message'] = self.env['mail.message'].sudo().new(msg_vals)
|
||||
# other message info
|
||||
if not subtype:
|
||||
template_ctx['is_discussion'] = False
|
||||
template_ctx['subtype_internal'] = False
|
||||
else:
|
||||
if 'is_discussion' not in template_ctx:
|
||||
template_ctx['is_discussion'] = subtype.id == self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
|
||||
if 'subtype_internal' not in template_ctx:
|
||||
template_ctx['subtype_internal'] = subtype.is_internal
|
||||
template_ctx.setdefault('subtype', subtype)
|
||||
template_ctx.setdefault('tracking_values', [])
|
||||
# record info
|
||||
if 'model_description' not in template_ctx:
|
||||
template_ctx['model_description'] = self.env['ir.model']._get(context_record._name).display_name if context_record else False
|
||||
template_ctx.setdefault('subtitles', [record_name])
|
||||
# user / environment
|
||||
template_ctx.setdefault('author_user', False)
|
||||
if 'company' not in template_ctx:
|
||||
template_ctx['company'] = context_record._mail_get_companies(default=self.env.company)[context_record.id] if context_record else self.env.company
|
||||
template_ctx.setdefault('email_add_signature', False)
|
||||
template_ctx.setdefault('lang', self.env.lang)
|
||||
template_ctx.setdefault('signature', '')
|
||||
template_ctx.setdefault('show_unfollow', False)
|
||||
template_ctx.setdefault('website_url', '')
|
||||
# display: actions / buttons
|
||||
template_ctx.setdefault('button_access', False)
|
||||
template_ctx.setdefault('has_button_access', False)
|
||||
# display
|
||||
template_ctx.setdefault('email_notification_force_header', self.env.context.get('email_notification_force_header', False))
|
||||
template_ctx.setdefault('email_notification_force_footer', self.env.context.get('email_notification_force_footer', False))
|
||||
template_ctx.setdefault('email_notification_allow_header', self.env.context.get('email_notification_allow_header', True))
|
||||
template_ctx.setdefault('email_notification_allow_footer', self.env.context.get('email_notification_allow_footer', False))
|
||||
# tools
|
||||
template_ctx.setdefault('is_html_empty', is_html_empty)
|
||||
|
||||
html = self.env['ir.qweb']._render(layout_xmlid, template_ctx, minimal_qcontext=True, raise_if_not_found=False)
|
||||
if not html:
|
||||
|
|
@ -200,49 +247,51 @@ class MailRenderMixin(models.AbstractModel):
|
|||
{}
|
||||
</div>
|
||||
""").format(preview_markup)
|
||||
return tools.prepend_html_content(html, html_preview)
|
||||
return prepend_html_content(html, html_preview)
|
||||
return html
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# SECURITY
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def _is_dynamic(self):
|
||||
def _has_unsafe_expression(self):
|
||||
for template in self.sudo():
|
||||
for fname, field in template._fields.items():
|
||||
engine = getattr(field, 'render_engine', 'inline_template')
|
||||
if engine in ('qweb', 'qweb_view'):
|
||||
if self._is_dynamic_template_qweb(template[fname]):
|
||||
if self._has_unsafe_expression_template_qweb(template[fname], template.render_model, fname):
|
||||
return True
|
||||
else:
|
||||
if self._is_dynamic_template_inline_template(template[fname]):
|
||||
if self._has_unsafe_expression_template_inline_template(template[fname], template.render_model, fname):
|
||||
return True
|
||||
return False
|
||||
|
||||
@api.model
|
||||
def _is_dynamic_template_qweb(self, template_src):
|
||||
def _has_unsafe_expression_template_qweb(self, template_src, model, fname=None):
|
||||
if template_src:
|
||||
try:
|
||||
node = html.fragment_fromstring(template_src, create_parent='div')
|
||||
self.env["ir.qweb"].with_context(raise_on_code=True)._compile(node)
|
||||
except QWebException as e:
|
||||
if isinstance(e.__cause__, PermissionError):
|
||||
return True
|
||||
raise
|
||||
self.env["ir.qweb"].with_context(raise_on_forbidden_code_for_model=model)._generate_code(node)
|
||||
except PermissionError:
|
||||
return True
|
||||
return False
|
||||
|
||||
@api.model
|
||||
def _is_dynamic_template_inline_template(self, template_txt):
|
||||
def _has_unsafe_expression_template_inline_template(self, template_txt, model, fname=None):
|
||||
if template_txt:
|
||||
template_instructions = parse_inline_template(str(template_txt))
|
||||
if len(template_instructions) > 1 or template_instructions[0][1]:
|
||||
expressions = [inst[1] for inst in template_instructions]
|
||||
if not all(self.env["ir.qweb"]._is_expression_allowed(e, model) for e in expressions if e):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _check_access_right_dynamic_template(self):
|
||||
if not self.env.su and not self.env.user.has_group('mail.group_mail_template_editor') and self._is_dynamic():
|
||||
if not self.env.su and not self.env.user.has_group('mail.group_mail_template_editor') and self._has_unsafe_expression():
|
||||
group = self.env.ref('mail.group_mail_template_editor')
|
||||
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name))
|
||||
raise AccessError(
|
||||
_('Only members of %(group_name)s group are allowed to edit templates containing sensible placeholders',
|
||||
group_name=group.name)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# RENDERING
|
||||
|
|
@ -257,14 +306,17 @@ class MailRenderMixin(models.AbstractModel):
|
|||
* various formatting tools;
|
||||
"""
|
||||
render_context = {
|
||||
'ctx': self.env.context,
|
||||
'format_addr': tools.formataddr,
|
||||
'format_date': lambda date, date_format=False, lang_code=False: format_date(self.env, date, date_format, lang_code),
|
||||
'format_datetime': lambda dt, tz=False, dt_format=False, lang_code=False: format_datetime(self.env, dt, tz, dt_format, lang_code),
|
||||
'format_time': lambda time, tz=False, time_format=False, lang_code=False: format_time(self.env, time, tz, time_format, lang_code),
|
||||
'format_amount': lambda amount, currency, lang_code=False: tools.format_amount(self.env, amount, currency, lang_code),
|
||||
'format_duration': lambda value: tools.format_duration(value),
|
||||
'user': self.env.user,
|
||||
'ctx': self._context,
|
||||
'format_duration': tools.format_duration,
|
||||
'is_html_empty': is_html_empty,
|
||||
'slug': self.env['ir.http']._slug,
|
||||
'user': self.env.user,
|
||||
'env': self.env,
|
||||
}
|
||||
render_context.update(copy.copy(template_env_globals))
|
||||
return render_context
|
||||
|
|
@ -274,6 +326,10 @@ class MailRenderMixin(models.AbstractModel):
|
|||
add_context=None, options=None):
|
||||
""" Render a raw QWeb template.
|
||||
|
||||
In addition to the generic evaluation context available, some other
|
||||
variables are added:
|
||||
* ``object``: record based on which the template is rendered;
|
||||
|
||||
:param str template_src: raw QWeb template to render;
|
||||
:param str model: see ``MailRenderMixin._render_template()``;
|
||||
:param list res_ids: see ``MailRenderMixin._render_template()``;
|
||||
|
|
@ -281,16 +337,20 @@ class MailRenderMixin(models.AbstractModel):
|
|||
:param dict add_context: additional context to give to renderer. It
|
||||
allows to add or update values to base rendering context generated
|
||||
by ``MailRenderMixin._render_eval_context()``;
|
||||
:param dict options: options for rendering (not used currently);
|
||||
:param dict options: options for rendering propagated to IrQweb render
|
||||
(see docstring for available options);
|
||||
|
||||
:return dict: {res_id: string of rendered template based on record}
|
||||
|
||||
:notice: Experimental. Use at your own risks only.
|
||||
:returns: {res_id: string of rendered template based on record}
|
||||
:rtype: dict
|
||||
"""
|
||||
results = dict.fromkeys(res_ids, u"")
|
||||
if not template_src:
|
||||
if not template_src or not res_ids:
|
||||
return results
|
||||
|
||||
if not self._has_unsafe_expression_template_qweb(template_src, model):
|
||||
# do not call the qweb engine
|
||||
return self._render_template_qweb_regex(template_src, model, res_ids)
|
||||
|
||||
# prepare template variables
|
||||
variables = self._render_eval_context()
|
||||
if add_context:
|
||||
|
|
@ -300,27 +360,74 @@ class MailRenderMixin(models.AbstractModel):
|
|||
|
||||
for record in self.env[model].browse(res_ids):
|
||||
variables['object'] = record
|
||||
options = options or {}
|
||||
if is_restricted:
|
||||
options['raise_on_forbidden_code_for_model'] = model
|
||||
try:
|
||||
render_result = self.env['ir.qweb']._render(
|
||||
html.fragment_fromstring(template_src, create_parent='div'),
|
||||
variables,
|
||||
raise_on_code=is_restricted,
|
||||
**(options or {})
|
||||
**options,
|
||||
)
|
||||
# remove the rendered tag <div> that was added in order to wrap potentially multiples nodes into one.
|
||||
render_result = render_result[5:-6]
|
||||
except Exception as e:
|
||||
if isinstance(e, QWebException) and isinstance(e.__cause__, PermissionError):
|
||||
if isinstance(e, QWebError) and isinstance(e.__cause__, PermissionError):
|
||||
group = self.env.ref('mail.group_mail_template_editor')
|
||||
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name)) from e
|
||||
_logger.info("Failed to render template : %s", template_src, exc_info=True)
|
||||
raise UserError(_("Failed to render QWeb template : %s\n\n%s)", template_src, traceback.format_exc())) from e
|
||||
raise AccessError(
|
||||
_('Only members of %(group_name)s group are allowed to edit templates containing sensible placeholders',
|
||||
group_name=group.name)
|
||||
) from e
|
||||
_logger.info("Failed to render template: %s", template_src, exc_info=True)
|
||||
raise UserError(
|
||||
_("Failed to render QWeb template: %(template_src)s\n\n%(template_traceback)s)",
|
||||
template_src=template_src,
|
||||
template_traceback=traceback.format_exc())
|
||||
) from e
|
||||
results[record.id] = render_result
|
||||
|
||||
return results
|
||||
|
||||
@api.model
|
||||
def _render_template_qweb_view(self, view_xmlid, model, res_ids,
|
||||
def _render_template_qweb_regex(self, template_src, model, res_ids):
|
||||
"""Render the template with regex instead of qweb to avoid `eval` call.
|
||||
|
||||
Supporting only QWeb allowed expressions, no custom variable in that mode.
|
||||
"""
|
||||
records = self.env[model].browse(res_ids)
|
||||
result = {}
|
||||
for record in records:
|
||||
def replace(match):
|
||||
tag = match.group(1)
|
||||
expr = match.group(3)
|
||||
default = match.group(9)
|
||||
if not self.env['ir.qweb']._is_expression_allowed(expr, model):
|
||||
raise SyntaxError(f"Invalid expression for the regex mode {expr!r}")
|
||||
|
||||
try:
|
||||
value = reduce(lambda rec, field: rec[field], expr.split('.')[1:], record) or default
|
||||
except KeyError:
|
||||
value = default
|
||||
|
||||
value = escape(value or '')
|
||||
return value if tag.lower() == 't' else f"<{tag}>{value}</{tag}>"
|
||||
|
||||
# normalize the HTML (add a parent div to avoid modification of the template)
|
||||
template_src = html_normalize(f'<div>{template_src}</div>')
|
||||
if template_src.startswith('<div>') and template_src.endswith('</div>'):
|
||||
template_src = template_src[5:-6]
|
||||
|
||||
result[record.id] = Markup(re.sub(
|
||||
r'''<(\w+)[\s|\n]+t-out=[\s|\n]*(\'|\")((\w|\.)+)(\2)[\s|\n]*((\/>)|(>[\s|\n]*([^<>]*?))[\s|\n]*<\/\1>)''',
|
||||
replace,
|
||||
template_src,
|
||||
flags=re.DOTALL,
|
||||
))
|
||||
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _render_template_qweb_view(self, view_ref, model, res_ids,
|
||||
add_context=None, options=None):
|
||||
""" Render a QWeb template based on an ir.ui.view content.
|
||||
|
||||
|
|
@ -328,37 +435,47 @@ class MailRenderMixin(models.AbstractModel):
|
|||
variables are added:
|
||||
* ``object``: record based on which the template is rendered;
|
||||
|
||||
:param str view_xmlid: source QWeb template. It should be a string
|
||||
XmlID allowing to fetch an ``ir.ui.view``;
|
||||
:param str/int/record view_ref: source QWeb template. It should be an
|
||||
XmlID allowing to fetch an ``ir.ui.view``, or an ID of a view or
|
||||
an ``ir.ui.view`` record;
|
||||
:param str model: see ``MailRenderMixin._render_template()``;
|
||||
:param list res_ids: see ``MailRenderMixin._render_template()``;
|
||||
|
||||
:param dict add_context: additional context to give to renderer. It
|
||||
allows to add or update values to base rendering context generated
|
||||
by ``MailRenderMixin._render_eval_context()``;
|
||||
:param dict options: options for rendering (not used currently);
|
||||
:param dict options: options for rendering propagated to IrQweb render
|
||||
(see docstring for available options);
|
||||
|
||||
:return dict: {res_id: string of rendered template based on record}
|
||||
:returns: {res_id: string of rendered template based on record}
|
||||
:rtype: dict
|
||||
"""
|
||||
# prevent wrong values (rendering on a void record set, ...)
|
||||
if any(r is None for r in res_ids):
|
||||
raise ValueError(_('Template rendering should be called on a valid record IDs.'))
|
||||
|
||||
results = {}
|
||||
if not res_ids:
|
||||
return results
|
||||
|
||||
# prepare template variables
|
||||
variables = self._render_eval_context()
|
||||
if add_context:
|
||||
variables.update(**add_context)
|
||||
|
||||
view_ref = view_ref.id if isinstance(view_ref, models.BaseModel) else view_ref
|
||||
for record in self.env[model].browse(res_ids):
|
||||
variables['object'] = record
|
||||
try:
|
||||
render_result = self.env['ir.qweb']._render(view_xmlid, variables, minimal_qcontext=True, raise_if_not_found=False, **(options or {}))
|
||||
render_result = self.env['ir.qweb']._render(
|
||||
view_ref,
|
||||
variables,
|
||||
minimal_qcontext=True,
|
||||
raise_if_not_found=False,
|
||||
**(options or {})
|
||||
)
|
||||
results[record.id] = render_result
|
||||
except Exception as e:
|
||||
_logger.info("Failed to render template : %s", view_xmlid, exc_info=True)
|
||||
raise UserError(_("Failed to render template : %s") % view_xmlid)
|
||||
_logger.info("Failed to render template: %s", view_ref, exc_info=True)
|
||||
raise UserError(
|
||||
_("Failed to render template: %(view_ref)s", view_ref=view_ref)
|
||||
) from e
|
||||
|
||||
return results
|
||||
|
||||
|
|
@ -379,33 +496,28 @@ class MailRenderMixin(models.AbstractModel):
|
|||
:param dict add_context: additional context to give to renderer. It
|
||||
allows to add or update values to base rendering context generated
|
||||
by ``MailRenderMixin._render_inline_template_eval_context()``;
|
||||
:param dict options: options for rendering;
|
||||
:param dict options: options for rendering (no options available
|
||||
currently);
|
||||
|
||||
:return dict: {res_id: string of rendered template based on record}
|
||||
:returns: {res_id: string of rendered template based on record}
|
||||
:rtype: dict
|
||||
"""
|
||||
# prevent wrong values (rendering on a void record set, ...)
|
||||
if any(r is None for r in res_ids):
|
||||
raise ValueError(_('Template rendering should be called on a valid record IDs.'))
|
||||
|
||||
results = dict.fromkeys(res_ids, u"")
|
||||
if not template_txt:
|
||||
results = dict.fromkeys(res_ids, "")
|
||||
if not template_txt or not res_ids:
|
||||
return results
|
||||
|
||||
template_instructions = parse_inline_template(str(template_txt))
|
||||
is_dynamic = len(template_instructions) > 1 or template_instructions[0][1]
|
||||
if not self._has_unsafe_expression_template_inline_template(str(template_txt), model):
|
||||
# do not call the qweb engine
|
||||
return self._render_template_inline_template_regex(str(template_txt), model, res_ids)
|
||||
|
||||
if (not self._unrestricted_rendering and is_dynamic and not self.env.is_admin() and
|
||||
not self.env.user.has_group('mail.group_mail_template_editor')):
|
||||
if (not self._unrestricted_rendering
|
||||
and not self.env.is_admin()
|
||||
and not self.env.user.has_group('mail.group_mail_template_editor')):
|
||||
group = self.env.ref('mail.group_mail_template_editor')
|
||||
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name))
|
||||
|
||||
if not is_dynamic:
|
||||
# Either the content is a raw text without placeholders, either we fail to
|
||||
# detect placeholders code. In both case we skip the rendering and return
|
||||
# the raw content, so even if we failed to detect dynamic code,
|
||||
# non "mail_template_editor" users will not gain rendering tools available
|
||||
# only for template specific group users
|
||||
return {record_id: template_instructions[0][0] for record_id in res_ids}
|
||||
raise AccessError(
|
||||
_('Only members of %(group_name)s group are allowed to edit templates containing sensible placeholders',
|
||||
group_name=group.name)
|
||||
)
|
||||
|
||||
# prepare template variables
|
||||
variables = self._render_eval_context()
|
||||
|
|
@ -416,25 +528,51 @@ class MailRenderMixin(models.AbstractModel):
|
|||
variables['object'] = record
|
||||
|
||||
try:
|
||||
results[record.id] = render_inline_template(template_instructions, variables)
|
||||
results[record.id] = render_inline_template(
|
||||
parse_inline_template(str(template_txt)),
|
||||
variables
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.info("Failed to render inline_template: \n%s", str(template_txt), exc_info=True)
|
||||
raise UserError(_("Failed to render inline_template template : %s)", e))
|
||||
raise UserError(
|
||||
_("Failed to render inline_template template: %(template_txt)s",
|
||||
template_txt=template_txt)
|
||||
) from e
|
||||
|
||||
return results
|
||||
|
||||
@api.model
|
||||
def _render_template_postprocess(self, rendered):
|
||||
def _render_template_inline_template_regex(self, template_txt, model, res_ids):
|
||||
"""Render the inline template in static mode, without calling safe eval."""
|
||||
template = parse_inline_template(str(template_txt))
|
||||
records = self.env[model].browse(res_ids)
|
||||
result = {}
|
||||
for record in records:
|
||||
renderer = []
|
||||
for string, expression, default in template:
|
||||
renderer.append(string)
|
||||
if expression:
|
||||
if not self.env['ir.qweb']._is_expression_allowed(expression, model):
|
||||
raise SyntaxError(f"Invalid expression for the regex mode {expression!r}")
|
||||
try:
|
||||
value = reduce(lambda rec, field: rec[field], expression.split('.')[1:], record) or default
|
||||
except KeyError:
|
||||
value = default
|
||||
renderer.append(str(value))
|
||||
result[record.id] = ''.join(renderer)
|
||||
return result
|
||||
|
||||
@api.model
|
||||
def _render_template_postprocess(self, model, rendered):
|
||||
""" Tool method for post processing. In this method we ensure local
|
||||
links ('/shop/Basil-1') are replaced by global links ('https://www.
|
||||
mygarden.com/shop/Basil-1').
|
||||
|
||||
:param rendered: result of ``_render_template``;
|
||||
|
||||
:return dict: updated version of rendered per record ID;
|
||||
:returns: updated version of rendered per record ID;
|
||||
:rtype: dict
|
||||
"""
|
||||
# TODO make this a parameter
|
||||
model = self.env.context.get('mail_render_postprocess_model')
|
||||
res_ids = list(rendered.keys())
|
||||
for res_id, rendered_html in rendered.items():
|
||||
base_url = None
|
||||
|
|
@ -443,9 +581,22 @@ class MailRenderMixin(models.AbstractModel):
|
|||
rendered[res_id] = self._replace_local_links(rendered_html, base_url)
|
||||
return rendered
|
||||
|
||||
@api.model
|
||||
def _process_scheduled_date(self, scheduled_date):
|
||||
if scheduled_date:
|
||||
# parse scheduled_date to make it timezone agnostic UTC as expected
|
||||
# by the ORM
|
||||
parsed_datetime = self.env['mail.mail']._parse_scheduled_datetime(scheduled_date)
|
||||
scheduled_date = parsed_datetime.replace(tzinfo=None) if parsed_datetime else False
|
||||
return scheduled_date
|
||||
|
||||
@api.model
|
||||
def _render_template_get_valid_options(self):
|
||||
return {'post_process', 'preserve_comments'}
|
||||
|
||||
@api.model
|
||||
def _render_template(self, template_src, model, res_ids, engine='inline_template',
|
||||
add_context=None, options=None, post_process=False):
|
||||
add_context=None, options=None):
|
||||
""" Render the given string on records designed by model / res_ids using
|
||||
the given rendering engine. Possible engine are small_web, qweb, or
|
||||
qweb_view.
|
||||
|
|
@ -460,16 +611,38 @@ class MailRenderMixin(models.AbstractModel):
|
|||
:param dict add_context: additional context to give to renderer. It
|
||||
allows to add or update values to base rendering context generated
|
||||
by ``MailRenderMixin._render_<engine>_eval_context()``;
|
||||
:param dict options: options for rendering;
|
||||
:param boolean post_process: perform a post processing on rendered result
|
||||
(notably html links management). See``_render_template_postprocess``;
|
||||
:param dict options: options for rendering. Use in this method and also
|
||||
propagated to rendering sub-methods. May contain notably
|
||||
|
||||
:return dict: {res_id: string of rendered template based on record}
|
||||
boolean post_process: perform a post processing on rendered result
|
||||
(notably html links management). See``_render_template_postprocess``;
|
||||
boolean preserve_comments: if set, comments are preserved. Default
|
||||
behavior is to remove them. It is used notably for browser-specific
|
||||
code implemented like comments;
|
||||
|
||||
:returns: ``{res_id: string of rendered template based on record}``
|
||||
:rtype: dict
|
||||
"""
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
if not isinstance(res_ids, (list, tuple)):
|
||||
raise ValueError(_('Template rendering should be called only using on a list of IDs.'))
|
||||
raise ValueError(
|
||||
_('Template rendering should only be called with a list of IDs. Received “%(res_ids)s” instead.',
|
||||
res_ids=res_ids)
|
||||
)
|
||||
if engine not in ('inline_template', 'qweb', 'qweb_view'):
|
||||
raise ValueError(_('Template rendering supports only inline_template, qweb, or qweb_view (view or raw).'))
|
||||
raise ValueError(
|
||||
_('Template rendering supports only inline_template, qweb, or qweb_view (view or raw); received %(engine)s instead.',
|
||||
engine=engine)
|
||||
)
|
||||
valid_render_options = self._render_template_get_valid_options()
|
||||
if not set((options or {}).keys()) <= valid_render_options:
|
||||
raise ValueError(
|
||||
_('Those values are not supported as options when rendering: %(param_names)s',
|
||||
param_names=', '.join(set(options.keys()) - valid_render_options)
|
||||
)
|
||||
)
|
||||
|
||||
if engine == 'qweb_view':
|
||||
rendered = self._render_template_qweb_view(template_src, model, res_ids,
|
||||
|
|
@ -480,8 +653,9 @@ class MailRenderMixin(models.AbstractModel):
|
|||
else:
|
||||
rendered = self._render_template_inline_template(template_src, model, res_ids,
|
||||
add_context=add_context, options=options)
|
||||
if post_process:
|
||||
rendered = self.with_context(mail_render_postprocess_model=model)._render_template_postprocess(rendered)
|
||||
|
||||
if options.get('post_process'):
|
||||
rendered = self._render_template_postprocess(model, rendered)
|
||||
|
||||
return rendered
|
||||
|
||||
|
|
@ -494,13 +668,21 @@ class MailRenderMixin(models.AbstractModel):
|
|||
Odoo model given by model;
|
||||
:param string engine: inline_template or qweb_view;
|
||||
|
||||
:return dict: {res_id: lang code (i.e. en_US)}
|
||||
:return: {res_id: lang code (i.e. en_US)}
|
||||
:rtype: dict
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not isinstance(res_ids, (list, tuple)):
|
||||
raise ValueError(_('Template rendering for language should be called with a list of IDs.'))
|
||||
if self.lang:
|
||||
rendered_langs = self._render_template(
|
||||
self.lang, self.render_model, res_ids, engine=engine)
|
||||
else:
|
||||
rendered_langs = dict.fromkeys(res_ids, "")
|
||||
records = self.env[self.render_model].browse(res_ids)
|
||||
customers = records._mail_get_partners()
|
||||
for record in records:
|
||||
partner = customers[record.id][0] if customers[record.id] else self.env['res.partner']
|
||||
rendered_langs[record.id] = partner.lang
|
||||
|
||||
rendered_langs = self._render_template(self.lang, self.render_model, res_ids, engine=engine)
|
||||
return dict(
|
||||
(res_id, lang)
|
||||
for res_id, lang in rendered_langs.items()
|
||||
|
|
@ -513,9 +695,9 @@ class MailRenderMixin(models.AbstractModel):
|
|||
:param list res_ids: list of ids of records (all belonging to same model
|
||||
defined by self.render_model)
|
||||
:param string engine: inline_template, qweb, or qweb_view;
|
||||
|
||||
:return dict: {lang: (template with lang=lang_code if specific lang computed
|
||||
:return: {lang: (template with lang=lang_code if specific lang computed
|
||||
or template, res_ids targeted by that language}
|
||||
:rtype: dict
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
|
|
@ -532,8 +714,10 @@ class MailRenderMixin(models.AbstractModel):
|
|||
)
|
||||
|
||||
def _render_field(self, field, res_ids, engine='inline_template',
|
||||
compute_lang=False, set_lang=False,
|
||||
add_context=None, options=None, post_process=False):
|
||||
# lang options
|
||||
compute_lang=False, res_ids_lang=False, set_lang=False,
|
||||
# rendering context and options
|
||||
add_context=None, options=None):
|
||||
""" Given some record ids, render a template located on field on all
|
||||
records. ``field`` should be a field of self (i.e. ``body_html`` on
|
||||
``mail.template``). res_ids are record IDs linked to ``model`` field
|
||||
|
|
@ -547,37 +731,64 @@ class MailRenderMixin(models.AbstractModel):
|
|||
:param boolean compute_lang: compute language to render on translated
|
||||
version of the template instead of default (probably english) one.
|
||||
Language will be computed based on ``self.lang``;
|
||||
:param dict res_ids_lang: record id to lang, e.g. already rendered
|
||||
using another way;
|
||||
:param string set_lang: force language for rendering. It should be a
|
||||
valid lang code matching an activate res.lang. Checked only if
|
||||
``compute_lang`` is False;
|
||||
|
||||
:param dict add_context: additional context to give to renderer;
|
||||
:param dict options: options for rendering;
|
||||
:param boolean post_process: perform a post processing on rendered result
|
||||
(notably html links management). See``_render_template_postprocess``);
|
||||
:param dict options: options for rendering. Use in this method and also
|
||||
propagated to rendering sub-methods. Base values come from the field
|
||||
(coming from ``render_options`` parameter) and are updated by this
|
||||
optional dictionary. May contain notably
|
||||
|
||||
:return dict: {res_id: string of rendered template based on record}
|
||||
boolean post_process: perform a post processing on rendered result
|
||||
(notably html links management). See``_render_template_postprocess``;
|
||||
boolean preserve_comments: if set, comments are preserved. Default
|
||||
behavior is to remove them. It is used notably for browser-specific
|
||||
code implemented like comments;
|
||||
|
||||
:return: {res_id: string of rendered template based on record}
|
||||
:rtype: dict
|
||||
"""
|
||||
if options is None:
|
||||
options = {}
|
||||
|
||||
if field not in self:
|
||||
raise ValueError(
|
||||
_('Rendering of %(field_name)s is not possible as not defined on template.',
|
||||
field_name=field
|
||||
)
|
||||
)
|
||||
self.ensure_one()
|
||||
if compute_lang:
|
||||
templates_res_ids = self._classify_per_lang(res_ids)
|
||||
elif res_ids_lang:
|
||||
templates_res_ids = {}
|
||||
for res_id, lang in res_ids_lang.items():
|
||||
lang_values = templates_res_ids.setdefault(lang, (self.with_context(lang=lang), []))
|
||||
lang_values[1].append(res_id)
|
||||
elif set_lang:
|
||||
templates_res_ids = {set_lang: (self.with_context(lang=set_lang), res_ids)}
|
||||
else:
|
||||
templates_res_ids = {self._context.get('lang'): (self, res_ids)}
|
||||
templates_res_ids = {self.env.context.get('lang'): (self, res_ids)}
|
||||
|
||||
# rendering options
|
||||
engine = getattr(self._fields[field], 'render_engine', engine)
|
||||
options.update(**getattr(self._fields[field], 'render_options', {}))
|
||||
post_process = options.get('post_process') or post_process
|
||||
# rendering options (update default defined on field by asked options)
|
||||
f = self._fields[field]
|
||||
if hasattr(f, 'render_engine') and f.render_engine:
|
||||
engine = f.render_engine
|
||||
|
||||
return dict(
|
||||
(res_id, rendered)
|
||||
for lang, (template, tpl_res_ids) in templates_res_ids.items()
|
||||
render_options = options.copy() if options else {}
|
||||
if hasattr(f, 'render_options') and f.render_options:
|
||||
render_options = {**f.render_options, **render_options}
|
||||
|
||||
return {
|
||||
res_id: rendered
|
||||
for (template, tpl_res_ids) in templates_res_ids.values()
|
||||
for res_id, rendered in template._render_template(
|
||||
template[field], template.render_model, tpl_res_ids, engine=engine,
|
||||
add_context=add_context, options=options, post_process=post_process
|
||||
template[field],
|
||||
template.render_model,
|
||||
tpl_res_ids,
|
||||
engine=engine,
|
||||
add_context=add_context,
|
||||
options=render_options,
|
||||
).items()
|
||||
)
|
||||
}
|
||||
|
|
|
|||
295
odoo-bringout-oca-ocb-mail/mail/models/mail_scheduled_message.py
Normal file
295
odoo-bringout-oca-ocb-mail/mail/models/mail_scheduled_message.py
Normal file
|
|
@ -0,0 +1,295 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import _, api, fields, models, modules
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
from odoo.tools.misc import clean_context
|
||||
|
||||
import logging
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MailScheduledMessage(models.Model):
|
||||
""" Scheduled message model (holds post values generated by the composer to delay the
|
||||
posting of the message). Different from mail.message.schedule that posts the message but
|
||||
delays the notification process.
|
||||
|
||||
Todo: when adding support for scheduling messages in mass_mail mode, could add a reference to
|
||||
the "parent" composer (by making 'mail.compose.message' not transient anymore). This reference
|
||||
could then be used to cancel every message scheduled "at the same time" (from one composer),
|
||||
and to get the static 'notification parameters' (mail_server_id, auto_delete,...) instead of
|
||||
duplicating them for each scheduled message.
|
||||
Currently as scheduling is allowed in monocomment only, we don't have duplicates and we only
|
||||
have static notification parameters, but some will become dynamic when adding mass_mail support
|
||||
such as 'email_from' and 'force_email_lang'.
|
||||
"""
|
||||
_name = 'mail.scheduled.message'
|
||||
_description = 'Scheduled Message'
|
||||
|
||||
# content
|
||||
subject = fields.Char('Subject')
|
||||
body = fields.Html('Contents', sanitize_style=True)
|
||||
scheduled_date = fields.Datetime('Scheduled Date', required=True)
|
||||
attachment_ids = fields.Many2many(
|
||||
'ir.attachment', 'scheduled_message_attachment_rel',
|
||||
'scheduled_message_id', 'attachment_id',
|
||||
string='Attachments', bypass_search_access=True)
|
||||
composition_comment_option = fields.Selection(
|
||||
[('reply_all', 'Reply-All'), ('forward', 'Forward')],
|
||||
string='Comment Options') # mainly used for view in specific comment modes
|
||||
# related document
|
||||
model = fields.Char('Related Document Model', required=True)
|
||||
res_id = fields.Many2oneReference('Related Document Id', model_field='model', required=True)
|
||||
# origin
|
||||
author_id = fields.Many2one('res.partner', 'Author', required=True)
|
||||
# recipients
|
||||
partner_ids = fields.Many2many('res.partner', string='Recipients')
|
||||
# characteristics
|
||||
is_note = fields.Boolean('Is a note', default=False, help="If the message will be posted as a Note.")
|
||||
# notify parameters (email_from, mail_server_id, force_email_lang,...)
|
||||
notification_parameters = fields.Text('Notification parameters')
|
||||
# context used when posting the message to trigger some actions (eg. change so state when sending quotation)
|
||||
send_context = fields.Json('Sending Context')
|
||||
|
||||
@api.constrains('model')
|
||||
def _check_model(self):
|
||||
if not all(model in self.pool and issubclass(self.pool[model], self.pool['mail.thread']) for model in self.mapped("model")):
|
||||
raise ValidationError(_("A message cannot be scheduled on a model that does not have a mail thread."))
|
||||
|
||||
@api.constrains('scheduled_date')
|
||||
def _check_scheduled_date(self):
|
||||
if any(scheduled_message.scheduled_date < fields.Datetime().now() for scheduled_message in self):
|
||||
raise ValidationError(_("A Scheduled Message cannot be scheduled in the past"))
|
||||
|
||||
# ------------------------------------------------------
|
||||
# CRUD / ORM
|
||||
# ------------------------------------------------------
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
# make sure user can post on the related records
|
||||
for vals in vals_list:
|
||||
self._check(vals)
|
||||
|
||||
# clean context to prevent usage of default_model and default_res_id
|
||||
scheduled_messages = super(MailScheduledMessage, self.with_context(clean_context(self.env.context))).create(vals_list)
|
||||
# transfer attachments from composer to scheduled messages
|
||||
for scheduled_message in scheduled_messages:
|
||||
if attachments := scheduled_message.attachment_ids:
|
||||
attachments.filtered(
|
||||
lambda a: a.res_model == 'mail.compose.message' and not a.res_id and a.create_uid.id == self.env.uid
|
||||
).write({
|
||||
'res_model': scheduled_message._name,
|
||||
'res_id': scheduled_message.id,
|
||||
})
|
||||
# schedule cron trigger
|
||||
if scheduled_messages:
|
||||
self.env.ref('mail.ir_cron_post_scheduled_message')._trigger_list(
|
||||
set(scheduled_messages.mapped('scheduled_date'))
|
||||
)
|
||||
return scheduled_messages
|
||||
|
||||
@api.model
|
||||
def _search(self, domain, offset=0, limit=None, order=None, *, bypass_access=False, **kwargs):
|
||||
""" Override that add specific access rights to only get the ids of the messages
|
||||
that are scheduled on the records on which the user has mail_post (or read) access
|
||||
"""
|
||||
if self.env.is_superuser() or bypass_access:
|
||||
return super()._search(domain, offset, limit, order, bypass_access=True, **kwargs)
|
||||
|
||||
# don't use the ORM to avoid cache pollution
|
||||
query = super()._search(domain, offset, limit, order, **kwargs)
|
||||
fnames_to_read = ['id', 'model', 'res_id']
|
||||
rows = self.env.execute_query(query.select(
|
||||
*[self._field_to_sql(self._table, fname) for fname in fnames_to_read],
|
||||
))
|
||||
|
||||
# group res_ids by model and determine accessible records
|
||||
model_ids = defaultdict(set)
|
||||
for __, model, res_id in rows:
|
||||
model_ids[model].add(res_id)
|
||||
|
||||
allowed_ids = defaultdict(set)
|
||||
for model, res_ids in model_ids.items():
|
||||
records = self.env[model].browse(res_ids)
|
||||
operation = getattr(records, '_mail_post_access', 'write')
|
||||
if records.has_access(operation):
|
||||
allowed_ids[model] = set(records._filtered_access(operation)._ids)
|
||||
|
||||
scheduled_messages = self.browse(
|
||||
msg_id
|
||||
for msg_id, res_model, res_id in rows
|
||||
if res_id in allowed_ids[res_model]
|
||||
)
|
||||
|
||||
return scheduled_messages._as_query(order)
|
||||
|
||||
def unlink(self):
|
||||
self._check()
|
||||
return super().unlink()
|
||||
|
||||
def write(self, vals):
|
||||
# prevent changing the records on which the messages are scheduled
|
||||
if vals.get('model') or vals.get('res_id'):
|
||||
raise UserError(_('You are not allowed to change the target record of a scheduled message.'))
|
||||
# make sure user can write on the record the messages are scheduled on
|
||||
self._check()
|
||||
res = super().write(vals)
|
||||
if new_scheduled_date := vals.get('scheduled_date'):
|
||||
self.env.ref('mail.ir_cron_post_scheduled_message')._trigger(fields.Datetime.to_datetime(new_scheduled_date))
|
||||
return res
|
||||
|
||||
# ------------------------------------------------------
|
||||
# Actions
|
||||
# ------------------------------------------------------
|
||||
|
||||
def open_edit_form(self):
|
||||
self.ensure_one()
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _("Edit Scheduled Note") if self.is_note else _("Edit Scheduled Message"),
|
||||
'res_model': self._name,
|
||||
'view_mode': 'form',
|
||||
'views': [[False, 'form']],
|
||||
'target': 'new',
|
||||
'res_id': self.id,
|
||||
}
|
||||
|
||||
def post_message(self):
|
||||
self.ensure_one()
|
||||
if self.env.is_admin() or self.create_uid.id == self.env.uid:
|
||||
self._post_message()
|
||||
else:
|
||||
raise UserError(_("You are not allowed to send this scheduled message"))
|
||||
|
||||
def _message_created_hook(self, message):
|
||||
"""Hook called after scheduled messages have been posted."""
|
||||
self.ensure_one()
|
||||
|
||||
def _post_message(self, raise_exception=True):
|
||||
""" Post the scheduled messages.
|
||||
They are posted using their creator as user so that one can check that the creator has
|
||||
still post permission on the related record, and to allow for the attachments to be
|
||||
transferred to the messages (see _process_attachments_for_post in mail.thread)
|
||||
if raise_exception is set to False, the method will skip the posting of a message
|
||||
instead of raising an error, and send a notification to the author about the failure.
|
||||
This is useful when scheduled messages are sent from the _post_messages_cron.
|
||||
"""
|
||||
notification_parameters_whitelist = self._notification_parameters_whitelist()
|
||||
auto_commit = not modules.module.current_test
|
||||
for scheduled_message in self:
|
||||
message_creator = scheduled_message.create_uid
|
||||
try:
|
||||
scheduled_message.with_user(message_creator)._check()
|
||||
message = self.env[scheduled_message.model].browse(scheduled_message.res_id).with_context(
|
||||
clean_context(scheduled_message.send_context or {})
|
||||
).with_user(message_creator).message_post(
|
||||
attachment_ids=list(scheduled_message.attachment_ids.ids),
|
||||
author_id=scheduled_message.author_id.id,
|
||||
subject=scheduled_message.subject,
|
||||
body=scheduled_message.body,
|
||||
partner_ids=list(scheduled_message.partner_ids.ids),
|
||||
subtype_xmlid='mail.mt_note' if scheduled_message.is_note else 'mail.mt_comment',
|
||||
**{k: v for k, v in json.loads(scheduled_message.notification_parameters or '{}').items() if k in notification_parameters_whitelist},
|
||||
)
|
||||
scheduled_message._message_created_hook(message)
|
||||
if auto_commit:
|
||||
self.env.cr.commit()
|
||||
except Exception:
|
||||
if raise_exception:
|
||||
raise
|
||||
_logger.info("Posting of scheduled message with ID %s failed", scheduled_message.id, exc_info=True)
|
||||
# notify user about the failure (send content as user might have lost access to the record)
|
||||
if auto_commit:
|
||||
self.env.cr.rollback()
|
||||
try:
|
||||
self.env['mail.thread'].message_notify(
|
||||
partner_ids=[message_creator.partner_id.id],
|
||||
subject=_("A scheduled message could not be sent"),
|
||||
body=_("The message scheduled on %(model)s(%(id)s) with the following content could not be sent:%(original_message)s",
|
||||
model=scheduled_message.model,
|
||||
id=scheduled_message.res_id,
|
||||
original_message=Markup("<br>-----<br>%s<br>-----<br>") % scheduled_message.body,
|
||||
)
|
||||
)
|
||||
if auto_commit:
|
||||
self.env.cr.commit()
|
||||
except Exception:
|
||||
# in case even message_notify fails, make sure the failing scheduled message
|
||||
# will be deleted
|
||||
_logger.exception("The notification about the failed scheduled message could not be sent")
|
||||
if auto_commit:
|
||||
self.env.cr.rollback()
|
||||
self.unlink()
|
||||
|
||||
# ------------------------------------------------------
|
||||
# Business Methods
|
||||
# ------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _check(self, values=None):
|
||||
""" Restrict the access to a scheduled message.
|
||||
Access is based on the record on which the scheduled message will be posted to.
|
||||
:param values: dict with model and res_id on which to perform the check
|
||||
"""
|
||||
if self.env.is_superuser():
|
||||
return True
|
||||
|
||||
model_ids = defaultdict(set)
|
||||
# sudo as anyways we check access on the related records
|
||||
for scheduled_message in self.sudo():
|
||||
model_ids[scheduled_message.model].add(scheduled_message.res_id)
|
||||
if values:
|
||||
model_ids[values['model']].add(values['res_id'])
|
||||
|
||||
for model, res_ids in model_ids.items():
|
||||
records = self.env[model].browse(res_ids)
|
||||
operation = getattr(records, '_mail_post_access', 'write')
|
||||
records.check_access(operation)
|
||||
|
||||
@api.model
|
||||
def _notification_parameters_whitelist(self):
|
||||
""" Parameters that can be used when posting the scheduled messages.
|
||||
"""
|
||||
return {
|
||||
'email_add_signature',
|
||||
'email_from',
|
||||
'email_layout_xmlid',
|
||||
'force_email_lang',
|
||||
'mail_activity_type_id',
|
||||
'mail_auto_delete',
|
||||
'mail_server_id',
|
||||
'message_type',
|
||||
'model_description',
|
||||
'reply_to',
|
||||
'reply_to_force_new',
|
||||
'subtype_id',
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _post_messages_cron(self, limit=50):
|
||||
""" Posts past-due scheduled messages.
|
||||
"""
|
||||
domain = [('scheduled_date', '<=', fields.Datetime.now())]
|
||||
messages_to_post = self.search(domain, limit=limit)
|
||||
_logger.info("Posting %s scheduled messages", len(messages_to_post))
|
||||
messages_to_post.with_context(mail_notify_force_send=True)._post_message(raise_exception=False)
|
||||
|
||||
# restart cron if needed
|
||||
if self.search_count(domain, limit=1):
|
||||
self.env.ref('mail.ir_cron_post_scheduled_message')._trigger()
|
||||
|
||||
def _to_store_defaults(self, target):
|
||||
return [
|
||||
Store.Many("attachment_ids"),
|
||||
Store.One("author_id"),
|
||||
"body",
|
||||
"is_note",
|
||||
"scheduled_date",
|
||||
"subject",
|
||||
]
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class MailShortcode(models.Model):
|
||||
""" Shortcode
|
||||
Canned Responses, allowing the user to defined shortcuts in its message. Should be applied before storing message in database.
|
||||
Emoji allowing replacing text with image for visual effect. Should be applied when the message is displayed (only for final rendering).
|
||||
These shortcodes are global and are available for every user.
|
||||
"""
|
||||
|
||||
_name = 'mail.shortcode'
|
||||
_description = 'Canned Response / Shortcode'
|
||||
source = fields.Char('Shortcut', required=True, index='trigram',
|
||||
help="Shortcut that will automatically be substituted with longer content in your messages."
|
||||
" Type ':' followed by the name of your shortcut (e.g. :hello) to use in your messages.")
|
||||
substitution = fields.Text('Substitution', required=True,
|
||||
help="Content that will automatically replace the shortcut of your choosing. This content can still be adapted before sending your message.")
|
||||
description = fields.Char('Description')
|
||||
message_ids = fields.Many2one('mail.message', string="Messages", store=False)
|
||||
|
|
@ -3,20 +3,23 @@
|
|||
|
||||
import base64
|
||||
import logging
|
||||
from ast import literal_eval
|
||||
|
||||
from odoo import _, api, fields, models, tools, Command
|
||||
from odoo.exceptions import UserError
|
||||
from odoo import _, api, fields, models, tools
|
||||
from odoo.exceptions import ValidationError, UserError
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import is_html_empty
|
||||
from odoo.tools.safe_eval import safe_eval, time
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MailTemplate(models.Model):
|
||||
"Templates for sending email"
|
||||
_name = "mail.template"
|
||||
_name = 'mail.template'
|
||||
_inherit = ['mail.render.mixin', 'template.reset.mixin']
|
||||
_description = 'Email Templates'
|
||||
_order = 'name'
|
||||
_order = 'user_id, name, id'
|
||||
|
||||
_unrestricted_rendering = True
|
||||
|
||||
|
|
@ -27,10 +30,15 @@ class MailTemplate(models.Model):
|
|||
res['model_id'] = self.env['ir.model']._get(res.pop('model')).id
|
||||
return res
|
||||
|
||||
def _get_non_abstract_models_domain(self):
|
||||
registry = self.env.registry
|
||||
abstract_models = [model for model in registry if registry[model]._abstract]
|
||||
return [('model', 'not in', abstract_models)]
|
||||
|
||||
# description
|
||||
name = fields.Char('Name', translate=True)
|
||||
description = fields.Text(
|
||||
'Template description', translate=True,
|
||||
'Template Description', translate=True,
|
||||
help="This field is used for internal description of the template's usage.")
|
||||
active = fields.Boolean(default=True)
|
||||
template_category = fields.Selection(
|
||||
|
|
@ -38,15 +46,17 @@ class MailTemplate(models.Model):
|
|||
('hidden_template', 'Hidden Template'),
|
||||
('custom_template', 'Custom Template')],
|
||||
compute="_compute_template_category", search="_search_template_category")
|
||||
model_id = fields.Many2one('ir.model', 'Applies to')
|
||||
model_id = fields.Many2one('ir.model', 'Applies to', ondelete='cascade', domain=_get_non_abstract_models_domain)
|
||||
model = fields.Char('Related Document Model', related='model_id.model', index=True, store=True, readonly=True)
|
||||
subject = fields.Char('Subject', translate=True, prefetch=True, help="Subject (placeholders may be used here)")
|
||||
email_from = fields.Char('From',
|
||||
email_from = fields.Char('Send From',
|
||||
help="Sender address (placeholders may be used here). If not set, the default "
|
||||
"value will be the author's email alias if configured, or email address.")
|
||||
user_id = fields.Many2one('res.users', string='Owner', domain="[('share', '=', False)]")
|
||||
# recipients
|
||||
use_default_to = fields.Boolean(
|
||||
'Default recipients',
|
||||
'Default Recipients',
|
||||
default=True,
|
||||
help="Default recipients of the record:\n"
|
||||
"- partner (using id on a partner or the partner_id field) OR\n"
|
||||
"- email (using email_from or email field)")
|
||||
|
|
@ -56,17 +66,25 @@ class MailTemplate(models.Model):
|
|||
email_cc = fields.Char('Cc', help="Carbon copy recipients (placeholders may be used here)")
|
||||
reply_to = fields.Char('Reply To', help="Email address to which replies will be redirected when sending emails in mass; only used when the reply is not logged in the original discussion thread.")
|
||||
# content
|
||||
body_html = fields.Html('Body', render_engine='qweb', translate=True, prefetch=True, sanitize=False)
|
||||
attachment_ids = fields.Many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id',
|
||||
'attachment_id', 'Attachments',
|
||||
help="You may attach files to this template, to be added to all "
|
||||
"emails created from this template")
|
||||
report_name = fields.Char('Report Filename', translate=True, prefetch=True,
|
||||
help="Name to use for the generated report file (may contain placeholders)\n"
|
||||
"The extension can be omitted and will then come from the report type.")
|
||||
report_template = fields.Many2one('ir.actions.report', 'Optional report to print and attach')
|
||||
body_html = fields.Html(
|
||||
'Body', render_engine='qweb', render_options={'post_process': True},
|
||||
prefetch=True, translate=True, sanitize='email_outgoing',
|
||||
)
|
||||
attachment_ids = fields.Many2many(
|
||||
'ir.attachment', 'email_template_attachment_rel',
|
||||
'email_template_id', 'attachment_id',
|
||||
string='Attachments',
|
||||
bypass_search_access=True,
|
||||
)
|
||||
report_template_ids = fields.Many2many(
|
||||
'ir.actions.report', relation='mail_template_ir_actions_report_rel',
|
||||
column1='mail_template_id',
|
||||
column2='ir_actions_report_id',
|
||||
string='Dynamic Reports',
|
||||
domain="[('model', '=', model)]")
|
||||
email_layout_xmlid = fields.Char('Email Notification Layout', copy=False)
|
||||
# options
|
||||
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False,
|
||||
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False, index='btree_not_null',
|
||||
help="Optional preferred server for outgoing mails. If not set, the highest "
|
||||
"priority one will be used.")
|
||||
scheduled_date = fields.Char('Scheduled Date', help="If set, the queue manager will send the email after the date. If not set, the email will be send as soon as possible. You can use dynamic expression.")
|
||||
|
|
@ -81,6 +99,27 @@ class MailTemplate(models.Model):
|
|||
# access
|
||||
can_write = fields.Boolean(compute='_compute_can_write',
|
||||
help='The current user can edit the template.')
|
||||
is_template_editor = fields.Boolean(compute="_compute_is_template_editor")
|
||||
|
||||
# view display
|
||||
has_dynamic_reports = fields.Boolean(compute='_compute_has_dynamic_reports')
|
||||
has_mail_server = fields.Boolean(compute='_compute_has_mail_server')
|
||||
|
||||
@api.depends('model')
|
||||
def _compute_has_dynamic_reports(self):
|
||||
number_of_dynamic_reports_per_model = dict(
|
||||
self.env['ir.actions.report'].sudo()._read_group(
|
||||
domain=[('model', 'in', self.mapped('model'))],
|
||||
groupby=['model'],
|
||||
aggregates=['id:count'],
|
||||
having=[('__count', '>', 0)]))
|
||||
for template in self:
|
||||
template.has_dynamic_reports = template.model in number_of_dynamic_reports_per_model
|
||||
|
||||
def _compute_has_mail_server(self):
|
||||
has_mail_server = bool(self.env['ir.mail_server'].sudo().search([], limit=1))
|
||||
for template in self:
|
||||
template.has_mail_server = has_mail_server
|
||||
|
||||
# Overrides of mail.render.mixin
|
||||
@api.depends('model')
|
||||
|
|
@ -90,10 +129,14 @@ class MailTemplate(models.Model):
|
|||
|
||||
@api.depends_context('uid')
|
||||
def _compute_can_write(self):
|
||||
writable_templates = self._filter_access_rules('write')
|
||||
writable_templates = self._filtered_access('write')
|
||||
for template in self:
|
||||
template.can_write = template in writable_templates
|
||||
|
||||
@api.depends_context('uid')
|
||||
def _compute_is_template_editor(self):
|
||||
self.is_template_editor = self.env.user.has_group('mail.group_mail_template_editor')
|
||||
|
||||
@api.depends('active', 'description')
|
||||
def _compute_template_category(self):
|
||||
""" Base templates (or master templates) are active templates having
|
||||
|
|
@ -116,19 +159,34 @@ class MailTemplate(models.Model):
|
|||
|
||||
@api.model
|
||||
def _search_template_category(self, operator, value):
|
||||
if operator in ['in', 'not in'] and isinstance(value, list):
|
||||
value_templates = self.env['mail.template'].search([]).filtered(
|
||||
lambda t: t.template_category in value
|
||||
)
|
||||
return [('id', operator, value_templates.ids)]
|
||||
if operator != 'in':
|
||||
return NotImplemented
|
||||
|
||||
if operator in ['=', '!='] and isinstance(value, str):
|
||||
value_templates = self.env['mail.template'].search([]).filtered(
|
||||
lambda t: t.template_category == value
|
||||
)
|
||||
return [('id', 'in' if operator == "=" else 'not in', value_templates.ids)]
|
||||
templates_with_xmlid = self.env['ir.model.data'].sudo()._search([
|
||||
('model', '=', 'mail.template'),
|
||||
('module', '!=', '__export__')
|
||||
]).subselect('res_id')
|
||||
|
||||
raise NotImplementedError(_('Operation not supported'))
|
||||
domain = Domain.FALSE
|
||||
|
||||
if 'hidden_template' in value:
|
||||
domain |= Domain(['|', ('active', '=', False), '&', ('description', '=', False), ('id', 'in', templates_with_xmlid)])
|
||||
|
||||
if 'base_template' in value:
|
||||
domain |= Domain([('active', '=', True), ('description', '!=', False), ('id', 'in', templates_with_xmlid)])
|
||||
|
||||
if 'custom_template' in value:
|
||||
domain |= Domain([('active', '=', True), ('template_category', 'not in', ['base_template', 'hidden_template'])])
|
||||
|
||||
return domain
|
||||
|
||||
@api.onchange("model")
|
||||
def _onchange_model(self):
|
||||
for template in self.filtered("model"):
|
||||
target = self.env[template.model]
|
||||
if hasattr(target, "_mail_template_default_values"):
|
||||
upd_values = target._mail_template_default_values()
|
||||
template.update(upd_values)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# CRUD
|
||||
|
|
@ -139,13 +197,61 @@ class MailTemplate(models.Model):
|
|||
record.attachment_ids.write({'res_model': record._name, 'res_id': record.id})
|
||||
return self
|
||||
|
||||
def _check_abstract_models(self, vals_list):
|
||||
model_names = self.sudo().env['ir.model'].browse(filter(None, (
|
||||
vals.get('model_id') for vals in vals_list
|
||||
))).mapped('model')
|
||||
for model in model_names:
|
||||
if self.env[model]._abstract:
|
||||
raise ValidationError(_('You may not define a template on an abstract model: %s', model))
|
||||
|
||||
def _check_can_be_rendered(self, fnames=None, render_options=None):
|
||||
dynamic_fnames = self._get_dynamic_field_names()
|
||||
|
||||
for template in self:
|
||||
model = template.sudo().model_id.model
|
||||
if not model:
|
||||
return
|
||||
record = template.env[model].search([], limit=1)
|
||||
if not record:
|
||||
return
|
||||
|
||||
fnames = fnames & dynamic_fnames if fnames else dynamic_fnames
|
||||
for fname in fnames:
|
||||
try:
|
||||
template._render_field(fname, record.ids, options=render_options)
|
||||
except Exception as e:
|
||||
_logger.exception("Error while checking if template can be rendered for field %s", fname)
|
||||
raise ValidationError(
|
||||
_("Oops! We couldn't save your template due to an issue with this value: %(template_txt)s. Correct it and try again.",
|
||||
template_txt=template[fname])
|
||||
) from e
|
||||
|
||||
def _get_dynamic_field_names(self):
|
||||
return {
|
||||
'body_html',
|
||||
'email_cc',
|
||||
'email_from',
|
||||
'email_to',
|
||||
'lang',
|
||||
'partner_to',
|
||||
'reply_to',
|
||||
'scheduled_date',
|
||||
'subject',
|
||||
}
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
return super().create(vals_list)\
|
||||
._fix_attachment_ownership()
|
||||
self._check_abstract_models(vals_list)
|
||||
records = super().create(vals_list)
|
||||
records._check_can_be_rendered(fnames=None)
|
||||
records._fix_attachment_ownership()
|
||||
return records
|
||||
|
||||
def write(self, vals):
|
||||
self._check_abstract_models([vals])
|
||||
super().write(vals)
|
||||
self._check_can_be_rendered(fnames=vals.keys() if {'model', 'model_id'}.isdisjoint(vals.keys()) else None)
|
||||
self._fix_attachment_ownership()
|
||||
return True
|
||||
|
||||
|
|
@ -153,11 +259,33 @@ class MailTemplate(models.Model):
|
|||
self.unlink_action()
|
||||
return super(MailTemplate, self).unlink()
|
||||
|
||||
@api.returns('self', lambda value: value.id)
|
||||
def copy_data(self, default=None):
|
||||
vals_list = super().copy_data(default=default)
|
||||
for vals, template in zip(vals_list, self):
|
||||
if 'name' not in (default or {}) and vals.get('name') == template.name:
|
||||
vals['name'] = self.env._("%s (copy)", template.name)
|
||||
return vals_list
|
||||
|
||||
def copy(self, default=None):
|
||||
default = dict(default or {},
|
||||
name=_("%s (copy)", self.name))
|
||||
return super(MailTemplate, self).copy(default=default)
|
||||
default = default or {}
|
||||
copy_attachments = 'attachment_ids' not in default
|
||||
if copy_attachments:
|
||||
default['attachment_ids'] = False
|
||||
copies = super().copy(default=default)
|
||||
|
||||
if copy_attachments:
|
||||
for copy, original in zip(copies, self):
|
||||
# copy attachments, to avoid ownership / ACLs issue
|
||||
# anyway filestore should keep a single reference to content
|
||||
if original.attachment_ids:
|
||||
copy.write({
|
||||
'attachment_ids': [
|
||||
(4, att_copy.id) for att_copy in (
|
||||
attachment.copy(default={'res_id': copy.id, 'res_model': original._name}) for attachment in original.attachment_ids
|
||||
)
|
||||
]
|
||||
})
|
||||
return copies
|
||||
|
||||
def unlink_action(self):
|
||||
for template in self:
|
||||
|
|
@ -168,15 +296,19 @@ class MailTemplate(models.Model):
|
|||
def create_action(self):
|
||||
ActWindow = self.env['ir.actions.act_window']
|
||||
view = self.env.ref('mail.email_compose_message_wizard_form')
|
||||
|
||||
for template in self:
|
||||
context = {
|
||||
'default_composition_mode': 'mass_mail',
|
||||
'default_model': template.model,
|
||||
'default_template_id' : template.id,
|
||||
}
|
||||
button_name = _('Send Mail (%s)', template.name)
|
||||
action = ActWindow.create({
|
||||
'name': button_name,
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'mail.compose.message',
|
||||
'context': "{'default_composition_mode': 'mass_mail', 'default_template_id' : %d, 'default_use_template': True}" % (template.id),
|
||||
'view_mode': 'form,tree',
|
||||
'context': repr(context),
|
||||
'view_mode': 'form,list',
|
||||
'view_id': view.id,
|
||||
'target': 'new',
|
||||
'binding_model_id': template.model_id.id,
|
||||
|
|
@ -185,122 +317,349 @@ class MailTemplate(models.Model):
|
|||
|
||||
return True
|
||||
|
||||
def action_open_mail_preview(self):
|
||||
action = self.env.ref('mail.mail_template_preview_action')._get_action_dict()
|
||||
action.update({'name': _('Template Preview: "%(template_name)s"', template_name=self.name)})
|
||||
return action
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# MESSAGE/EMAIL VALUES GENERATION
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def generate_recipients(self, results, res_ids):
|
||||
"""Generates the recipients of the template. Default values can ben generated
|
||||
instead of the template values if requested by template or context.
|
||||
Emails (email_to, email_cc) can be transformed into partners if requested
|
||||
in the context. """
|
||||
self.ensure_one()
|
||||
def _generate_template_attachments(self, res_ids, render_fields,
|
||||
render_results=None):
|
||||
""" Render attachments of template 'self', returning values for records
|
||||
given by 'res_ids'. Note that ``report_template_ids`` returns values for
|
||||
'attachments', as we have a list of tuple (report_name, base64 value)
|
||||
for those reports. It is considered as being the job of callers to
|
||||
transform those attachments into valid ``ir.attachment`` records.
|
||||
|
||||
if self.use_default_to or self._context.get('tpl_force_default_to'):
|
||||
records = self.env[self.model].browse(res_ids).sudo()
|
||||
default_recipients = records._message_get_default_recipients()
|
||||
for res_id, recipients in default_recipients.items():
|
||||
results[res_id].pop('partner_to', None)
|
||||
results[res_id].update(recipients)
|
||||
:param list res_ids: list of record IDs on which template is rendered;
|
||||
:param list render_fields: list of fields to render on template which
|
||||
are specific to attachments, e.g. attachment_ids or report_template_ids;
|
||||
:param dict render_results: res_ids-based dictionary of render values.
|
||||
For each res_id, a dict of values based on render_fields is given
|
||||
|
||||
records_company = None
|
||||
if self._context.get('tpl_partners_only') and self.model and results and 'company_id' in self.env[self.model]._fields:
|
||||
records = self.env[self.model].browse(results.keys()).read(['company_id'])
|
||||
records_company = {rec['id']: (rec['company_id'][0] if rec['company_id'] else None) for rec in records}
|
||||
|
||||
for res_id, values in results.items():
|
||||
partner_ids = values.get('partner_ids', list())
|
||||
if self._context.get('tpl_partners_only'):
|
||||
mails = tools.email_split(values.pop('email_to', '')) + tools.email_split(values.pop('email_cc', ''))
|
||||
Partner = self.env['res.partner']
|
||||
if records_company:
|
||||
Partner = Partner.with_context(default_company_id=records_company[res_id])
|
||||
for mail in mails:
|
||||
partner = Partner.find_or_create(mail)
|
||||
partner_ids.append(partner.id)
|
||||
partner_to = values.pop('partner_to', '')
|
||||
if partner_to:
|
||||
# placeholders could generate '', 3, 2 due to some empty field values
|
||||
tpl_partner_ids = [int(pid.strip()) for pid in partner_to.split(',') if (pid and pid.strip().isdigit())]
|
||||
partner_ids += self.env['res.partner'].sudo().browse(tpl_partner_ids).exists().ids
|
||||
results[res_id]['partner_ids'] = partner_ids
|
||||
return results
|
||||
|
||||
def generate_email(self, res_ids, fields):
|
||||
"""Generates an email from the template for given the given model based on
|
||||
records given by res_ids.
|
||||
|
||||
:param res_id: id of the record to use for rendering the template (model
|
||||
is taken from template definition)
|
||||
:returns: a dict containing all relevant fields for creating a new
|
||||
mail.mail entry, with one extra key ``attachments``, in the
|
||||
format [(report_name, data)] where data is base64 encoded.
|
||||
:return: updated (or new) render_results;
|
||||
"""
|
||||
self.ensure_one()
|
||||
multi_mode = True
|
||||
if isinstance(res_ids, int):
|
||||
res_ids = [res_ids]
|
||||
multi_mode = False
|
||||
if render_results is None:
|
||||
render_results = {}
|
||||
|
||||
results = dict()
|
||||
for lang, (template, template_res_ids) in self._classify_per_lang(res_ids).items():
|
||||
for field in fields:
|
||||
# generating reports is done on a per-record basis, better ensure cache
|
||||
# is filled up to avoid rendering and browsing in a loop
|
||||
if res_ids and 'report_template_ids' in render_fields and self.report_template_ids:
|
||||
self.env[self.model].browse(res_ids)
|
||||
|
||||
for res_id in res_ids:
|
||||
values = render_results.setdefault(res_id, {})
|
||||
|
||||
# link template attachments directly
|
||||
if 'attachment_ids' in render_fields:
|
||||
values['attachment_ids'] = self.attachment_ids.ids
|
||||
|
||||
# generate attachments (reports)
|
||||
if 'report_template_ids' in render_fields and self.report_template_ids:
|
||||
for report in self.report_template_ids:
|
||||
# generate content
|
||||
if report.report_type in ['qweb-html', 'qweb-pdf']:
|
||||
report_content, report_format = self.env['ir.actions.report']._render_qweb_pdf(report, [res_id])
|
||||
else:
|
||||
render_res = self.env['ir.actions.report']._render(report, [res_id])
|
||||
if not render_res:
|
||||
raise UserError(_('Unsupported report type %s found.', report.report_type))
|
||||
report_content, report_format = render_res
|
||||
report_content = base64.b64encode(report_content)
|
||||
# generate name
|
||||
if report.print_report_name:
|
||||
report_name = safe_eval(
|
||||
report.print_report_name,
|
||||
{
|
||||
'object': self.env[self.model].browse(res_id),
|
||||
'time': time,
|
||||
}
|
||||
)
|
||||
else:
|
||||
report_name = _('Report')
|
||||
extension = "." + report_format
|
||||
if not report_name.endswith(extension):
|
||||
report_name += extension
|
||||
values.setdefault('attachments', []).append((report_name, report_content))
|
||||
elif 'report_template_ids' in render_fields:
|
||||
values['attachments'] = []
|
||||
|
||||
# hook for attachments-specific computation, used currently only for accounting
|
||||
if hasattr(self.env[self.model], '_process_attachments_for_template_post'):
|
||||
records_attachments = self.env[self.model].browse(res_ids)._process_attachments_for_template_post(self)
|
||||
for res_id, additional_attachments in records_attachments.items():
|
||||
if not additional_attachments:
|
||||
continue
|
||||
if additional_attachments.get('attachment_ids'):
|
||||
render_results[res_id].setdefault('attachment_ids', []).extend(additional_attachments['attachment_ids'])
|
||||
if additional_attachments.get('attachments'):
|
||||
render_results[res_id].setdefault('attachments', []).extend(additional_attachments['attachments'])
|
||||
|
||||
return render_results
|
||||
|
||||
def _generate_template_recipients(self, res_ids, render_fields,
|
||||
allow_suggested=False,
|
||||
find_or_create_partners=False,
|
||||
render_results=None):
|
||||
""" Render recipients of the template 'self', returning values for records
|
||||
given by 'res_ids'. Default values can be generated instead of the template
|
||||
values if requested by template (see 'use_default_to' field). Email fields
|
||||
('email_cc', 'email_to') are transformed into partners if requested
|
||||
(finding or creating partners). 'partner_to' field is transformed into
|
||||
'partner_ids' field.
|
||||
|
||||
Note: for performance reason, information from records are transferred to
|
||||
created partners no matter the company. For example, if we have a record of
|
||||
company A and one of B with the same email and no related partner, a partner
|
||||
will be created with company A or B but populated with information from the 2
|
||||
records. So some info might be leaked from one company to the other through
|
||||
the partner.
|
||||
|
||||
:param list res_ids: list of record IDs on which template is rendered;
|
||||
:param list render_fields: list of fields to render on template which
|
||||
are specific to recipients, e.g. email_cc, email_to, partner_to);
|
||||
:param boolean allow_suggested: when computing default recipients,
|
||||
include suggested recipients in addition to minimal defaults;
|
||||
:param boolean find_or_create_partners: transform emails into partners
|
||||
(calling ``find_or_create`` on partner model);
|
||||
:param dict render_results: res_ids-based dictionary of render values.
|
||||
For each res_id, a dict of values based on render_fields is given;
|
||||
|
||||
:return: updated (or new) render_results. It holds a 'partner_ids' key
|
||||
holding partners given by ``_message_get_default_recipients`` and/or
|
||||
generated based on 'partner_to'. If ``find_or_create_partners`` is
|
||||
False emails are present, otherwise they are included as partners
|
||||
contained in ``partner_ids``.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if render_results is None:
|
||||
render_results = {}
|
||||
Model = self.env[self.model].with_prefetch(res_ids)
|
||||
|
||||
# if using default recipients -> ``_message_get_default_recipients`` gives
|
||||
# values for email_to, email_cc and partner_ids; if using suggested recipients
|
||||
# -> ``_message_get_suggested_recipients_batch`` gives a list of potential
|
||||
# recipients (TODO: decide which API to keep)
|
||||
if self.use_default_to and self.model:
|
||||
if allow_suggested:
|
||||
suggested_recipients = Model.browse(res_ids)._message_get_suggested_recipients_batch(
|
||||
reply_discussion=True, no_create=not find_or_create_partners,
|
||||
)
|
||||
for res_id, suggested_list in suggested_recipients.items():
|
||||
pids = [r['partner_id'] for r in suggested_list if r['partner_id']]
|
||||
email_to_lst = [
|
||||
tools.mail.formataddr(
|
||||
(r['name'] or '', r['email'] or '')
|
||||
) for r in suggested_list if not r['partner_id']
|
||||
]
|
||||
render_results.setdefault(res_id, {})
|
||||
render_results[res_id]['partner_ids'] = pids
|
||||
render_results[res_id]['email_to'] = ', '.join(email_to_lst)
|
||||
else:
|
||||
default_recipients = Model.browse(res_ids)._message_get_default_recipients()
|
||||
for res_id, recipients in default_recipients.items():
|
||||
render_results.setdefault(res_id, {}).update(recipients)
|
||||
# render fields dynamically which generates recipients
|
||||
else:
|
||||
for field in set(render_fields) & {'email_cc', 'email_to', 'partner_to'}:
|
||||
generated_field_values = self._render_field(field, res_ids)
|
||||
for res_id in res_ids:
|
||||
render_results.setdefault(res_id, {})[field] = generated_field_values[res_id]
|
||||
|
||||
# create partners from emails if asked to
|
||||
if find_or_create_partners:
|
||||
email_to_res_ids = {}
|
||||
records_emails = {}
|
||||
for record in Model.browse(res_ids):
|
||||
record_values = render_results.setdefault(record.id, {})
|
||||
mails = tools.email_split(record_values.pop('email_to', '')) + \
|
||||
tools.email_split(record_values.pop('email_cc', ''))
|
||||
records_emails[record] = mails
|
||||
for mail in mails:
|
||||
email_to_res_ids.setdefault(mail, []).append(record.id)
|
||||
|
||||
if hasattr(Model, '_partner_find_from_emails'):
|
||||
records_partners = Model.browse(res_ids)._partner_find_from_emails(records_emails)
|
||||
else:
|
||||
records_partners = self.env['mail.thread']._partner_find_from_emails(records_emails)
|
||||
for res_id, partners in records_partners.items():
|
||||
render_results[res_id].setdefault('partner_ids', []).extend(partners.ids)
|
||||
|
||||
# update 'partner_to' rendered value to 'partner_ids'
|
||||
all_partner_to = {
|
||||
pid
|
||||
for record_values in render_results.values()
|
||||
for pid in self._parse_partner_to(record_values.get('partner_to', ''))
|
||||
}
|
||||
existing_pids = set()
|
||||
if all_partner_to:
|
||||
existing_pids = set(self.env['res.partner'].sudo().browse(list(all_partner_to)).exists().ids)
|
||||
for record_values in render_results.values():
|
||||
partner_to = record_values.pop('partner_to', '')
|
||||
if partner_to:
|
||||
tpl_partner_ids = set(self._parse_partner_to(partner_to)) & existing_pids
|
||||
record_values.setdefault('partner_ids', []).extend(tpl_partner_ids)
|
||||
|
||||
return render_results
|
||||
|
||||
def _generate_template_scheduled_date(self, res_ids, render_results=None):
|
||||
""" Render scheduled date based on template 'self'. Specific parsing is
|
||||
done to ensure value matches ORM expected value: UTC but without
|
||||
timezone set in value.
|
||||
|
||||
:param list res_ids: list of record IDs on which template is rendered;
|
||||
:param dict render_results: res_ids-based dictionary of render values.
|
||||
For each res_id, a dict of values based on render_fields is given;
|
||||
|
||||
:return: updated (or new) render_results;
|
||||
"""
|
||||
self.ensure_one()
|
||||
if render_results is None:
|
||||
render_results = {}
|
||||
|
||||
scheduled_dates = self._render_field('scheduled_date', res_ids)
|
||||
for res_id in res_ids:
|
||||
scheduled_date = self._process_scheduled_date(scheduled_dates.get(res_id))
|
||||
render_results.setdefault(res_id, {})['scheduled_date'] = scheduled_date
|
||||
|
||||
return render_results
|
||||
|
||||
def _generate_template_static_values(self, res_ids, render_fields, render_results=None):
|
||||
""" Return values based on template 'self'. Those are not rendered nor
|
||||
dynamic, just static values used for configuration of emails.
|
||||
|
||||
:param list res_ids: list of record IDs on which template is rendered;
|
||||
:param list render_fields: list of fields to render, currently limited
|
||||
to a subset (i.e. auto_delete, mail_server_id, model, res_id);
|
||||
:param dict render_results: res_ids-based dictionary of render values.
|
||||
For each res_id, a dict of values based on render_fields is given;
|
||||
|
||||
:return: updated (or new) render_results;
|
||||
"""
|
||||
self.ensure_one()
|
||||
if render_results is None:
|
||||
render_results = {}
|
||||
|
||||
for res_id in res_ids:
|
||||
values = render_results.setdefault(res_id, {})
|
||||
|
||||
# technical settings
|
||||
if 'auto_delete' in render_fields:
|
||||
values['auto_delete'] = self.auto_delete
|
||||
if 'email_layout_xmlid' in render_fields:
|
||||
values['email_layout_xmlid'] = self.email_layout_xmlid
|
||||
if 'mail_server_id' in render_fields:
|
||||
values['mail_server_id'] = self.mail_server_id.id
|
||||
if 'model' in render_fields:
|
||||
values['model'] = self.model
|
||||
if 'res_id' in render_fields:
|
||||
values['res_id'] = res_id or False
|
||||
|
||||
return render_results
|
||||
|
||||
def _generate_template(self, res_ids, render_fields,
|
||||
recipients_allow_suggested=False,
|
||||
find_or_create_partners=False):
|
||||
""" Render values from template 'self' on records given by 'res_ids'.
|
||||
Those values are generally used to create a mail.mail or a mail.message.
|
||||
Model of records is the one defined on template.
|
||||
|
||||
:param list res_ids: list of record IDs on which template is rendered;
|
||||
:param list render_fields: list of fields to render on template;
|
||||
|
||||
# recipients generation
|
||||
:param boolean recipients_allow_suggested: when computing default
|
||||
recipients, include suggested recipients in addition to minimal
|
||||
defaults;
|
||||
:param boolean find_or_create_partners: transform emails into partners
|
||||
(see ``_generate_template_recipients``);
|
||||
|
||||
:returns: a dict of (res_ids, values) where values contains all rendered
|
||||
fields asked in ``render_fields``. Asking for attachments adds an
|
||||
'attachments' key using the format [(report_name, data)] where data
|
||||
is base64 encoded. Asking for recipients adds a 'partner_ids' key.
|
||||
Note that 2many fields contain a list of IDs, not commands.
|
||||
"""
|
||||
self.ensure_one()
|
||||
render_fields_set = set(render_fields)
|
||||
fields_specific = {
|
||||
'attachment_ids', # attachments
|
||||
'email_cc', # recipients
|
||||
'email_to', # recipients
|
||||
'partner_to', # recipients
|
||||
'report_template_ids', # attachments
|
||||
'scheduled_date', # specific
|
||||
# not rendered (static)
|
||||
'auto_delete',
|
||||
'email_layout_xmlid',
|
||||
'mail_server_id',
|
||||
'model',
|
||||
'res_id',
|
||||
}
|
||||
|
||||
render_results = {}
|
||||
for (template, template_res_ids) in self._classify_per_lang(res_ids).values():
|
||||
# render fields not rendered by sub methods
|
||||
fields_torender = {
|
||||
field for field in render_fields_set
|
||||
if field not in fields_specific
|
||||
}
|
||||
for field in fields_torender:
|
||||
generated_field_values = template._render_field(
|
||||
field, template_res_ids,
|
||||
post_process=(field == 'body_html')
|
||||
field, template_res_ids
|
||||
)
|
||||
for res_id, field_value in generated_field_values.items():
|
||||
results.setdefault(res_id, dict())[field] = field_value
|
||||
# compute recipients
|
||||
if any(field in fields for field in ['email_to', 'partner_to', 'email_cc']):
|
||||
results = template.generate_recipients(results, template_res_ids)
|
||||
# update values for all res_ids
|
||||
for res_id in template_res_ids:
|
||||
values = results[res_id]
|
||||
if values.get('body_html'):
|
||||
values['body'] = tools.html_sanitize(values['body_html'])
|
||||
# if asked in fields to return, parse generated date into tz agnostic UTC as expected by ORM
|
||||
scheduled_date = values.pop('scheduled_date', None)
|
||||
if 'scheduled_date' in fields and scheduled_date:
|
||||
parsed_datetime = self.env['mail.mail']._parse_scheduled_datetime(scheduled_date)
|
||||
values['scheduled_date'] = parsed_datetime.replace(tzinfo=None) if parsed_datetime else False
|
||||
render_results.setdefault(res_id, {})[field] = field_value
|
||||
|
||||
# technical settings
|
||||
values.update(
|
||||
mail_server_id=template.mail_server_id.id or False,
|
||||
auto_delete=template.auto_delete,
|
||||
model=template.model,
|
||||
res_id=res_id or False,
|
||||
attachment_ids=[attach.id for attach in template.attachment_ids],
|
||||
# render recipients
|
||||
if render_fields_set & {'email_cc', 'email_to', 'partner_to'}:
|
||||
template._generate_template_recipients(
|
||||
template_res_ids, render_fields_set,
|
||||
render_results=render_results,
|
||||
allow_suggested=recipients_allow_suggested,
|
||||
find_or_create_partners=find_or_create_partners
|
||||
)
|
||||
|
||||
# Add report in attachments: generate once for all template_res_ids
|
||||
if template.report_template:
|
||||
for res_id in template_res_ids:
|
||||
attachments = []
|
||||
report_name = template._render_field('report_name', [res_id])[res_id]
|
||||
report = template.report_template
|
||||
report_service = report.report_name
|
||||
# render scheduled_date
|
||||
if 'scheduled_date' in render_fields_set:
|
||||
template._generate_template_scheduled_date(
|
||||
template_res_ids,
|
||||
render_results=render_results
|
||||
)
|
||||
|
||||
if report.report_type in ['qweb-html', 'qweb-pdf']:
|
||||
result, report_format = self.env['ir.actions.report']._render_qweb_pdf(report, [res_id])
|
||||
else:
|
||||
res = self.env['ir.actions.report']._render(report, [res_id])
|
||||
if not res:
|
||||
raise UserError(_('Unsupported report type %s found.', report.report_type))
|
||||
result, report_format = res
|
||||
# add values static for all res_ids
|
||||
template._generate_template_static_values(
|
||||
template_res_ids,
|
||||
render_fields_set,
|
||||
render_results=render_results
|
||||
)
|
||||
|
||||
# TODO in trunk, change return format to binary to match message_post expected format
|
||||
result = base64.b64encode(result)
|
||||
if not report_name:
|
||||
report_name = 'report.' + report_service
|
||||
ext = "." + report_format
|
||||
if not report_name.endswith(ext):
|
||||
report_name += ext
|
||||
attachments.append((report_name, result))
|
||||
results[res_id]['attachments'] = attachments
|
||||
# generate attachments if requested
|
||||
if render_fields_set & {'attachment_ids', 'report_template_ids'}:
|
||||
template._generate_template_attachments(
|
||||
template_res_ids,
|
||||
render_fields_set,
|
||||
render_results=render_results
|
||||
)
|
||||
|
||||
return multi_mode and results or results[res_ids[0]]
|
||||
return render_results
|
||||
|
||||
@classmethod
|
||||
def _parse_partner_to(cls, partner_to):
|
||||
try:
|
||||
partner_to = literal_eval(partner_to or '[]')
|
||||
except (ValueError, SyntaxError):
|
||||
partner_to = partner_to.split(',')
|
||||
if not isinstance(partner_to, (list, tuple)):
|
||||
partner_to = [partner_to]
|
||||
return [
|
||||
int(pid.strip()) if isinstance(pid, str) else int(pid) for pid in partner_to
|
||||
if (isinstance(pid, str) and pid.strip().isdigit()) or (pid and not isinstance(pid, str))
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# EMAIL
|
||||
|
|
@ -308,8 +667,7 @@ class MailTemplate(models.Model):
|
|||
|
||||
def _send_check_access(self, res_ids):
|
||||
records = self.env[self.model].browse(res_ids)
|
||||
records.check_access_rights('read')
|
||||
records.check_access_rule('read')
|
||||
records.check_access('read')
|
||||
|
||||
def send_mail(self, res_id, force_send=False, raise_exception=False, email_values=None,
|
||||
email_layout_xmlid=False):
|
||||
|
|
@ -327,77 +685,141 @@ class MailTemplate(models.Model):
|
|||
|
||||
# Grant access to send_mail only if access to related document
|
||||
self.ensure_one()
|
||||
self._send_check_access([res_id])
|
||||
return self.send_mail_batch(
|
||||
[res_id],
|
||||
force_send=force_send,
|
||||
raise_exception=raise_exception,
|
||||
email_values=email_values,
|
||||
email_layout_xmlid=email_layout_xmlid
|
||||
)[0].id # TDE CLEANME: return mail + api.returns ?
|
||||
|
||||
Attachment = self.env['ir.attachment'] # TDE FIXME: should remove default_type from context
|
||||
def send_mail_batch(self, res_ids, force_send=False, raise_exception=False, email_values=None,
|
||||
email_layout_xmlid=False):
|
||||
""" Generates new mail.mails. Batch version of 'send_mail'.'
|
||||
|
||||
# create a mail_mail based on values, without attachments
|
||||
values = self.generate_email(
|
||||
res_id,
|
||||
['subject', 'body_html',
|
||||
'email_from',
|
||||
'email_cc', 'email_to', 'partner_to', 'reply_to',
|
||||
'auto_delete', 'scheduled_date']
|
||||
)
|
||||
values['recipient_ids'] = [Command.link(pid) for pid in values.get('partner_ids', list())]
|
||||
values['attachment_ids'] = [Command.link(aid) for aid in values.get('attachment_ids', list())]
|
||||
values.update(email_values or {})
|
||||
attachment_ids = values.pop('attachment_ids', [])
|
||||
attachments = values.pop('attachments', [])
|
||||
# add a protection against void email_from
|
||||
if 'email_from' in values and not values.get('email_from'):
|
||||
values.pop('email_from')
|
||||
# encapsulate body
|
||||
if email_layout_xmlid and values['body_html']:
|
||||
record = self.env[self.model].browse(res_id)
|
||||
model = self.env['ir.model']._get(record._name)
|
||||
:param list res_ids: IDs of modelrecords on which template will be rendered
|
||||
|
||||
if self.lang:
|
||||
lang = self._render_lang([res_id])[res_id]
|
||||
model = model.with_context(lang=lang)
|
||||
:returns: newly created mail.mail
|
||||
"""
|
||||
# Grant access to send_mail only if access to related document
|
||||
self.ensure_one()
|
||||
self._send_check_access(res_ids)
|
||||
sending_email_layout_xmlid = email_layout_xmlid or self.email_layout_xmlid
|
||||
|
||||
template_ctx = {
|
||||
# message
|
||||
'message': self.env['mail.message'].sudo().new(dict(body=values['body_html'], record_name=record.display_name)),
|
||||
'subtype': self.env['mail.message.subtype'].sudo(),
|
||||
# record
|
||||
'model_description': model.display_name,
|
||||
'record': record,
|
||||
'record_name': False,
|
||||
'subtitles': False,
|
||||
# user / environment
|
||||
'company': 'company_id' in record and record['company_id'] or self.env.company,
|
||||
'email_add_signature': False,
|
||||
'signature': '',
|
||||
'website_url': '',
|
||||
# tools
|
||||
'is_html_empty': is_html_empty,
|
||||
}
|
||||
body = model.env['ir.qweb']._render(email_layout_xmlid, template_ctx, minimal_qcontext=True, raise_if_not_found=False)
|
||||
if not body:
|
||||
_logger.warning(
|
||||
'QWeb template %s not found when sending template %s. Sending without layout.',
|
||||
email_layout_xmlid,
|
||||
self.name
|
||||
mails_sudo = self.env['mail.mail'].sudo()
|
||||
batch_size = int(
|
||||
self.env['ir.config_parameter'].sudo().get_param('mail.batch_size')
|
||||
) or 50 # be sure to not have 0, as otherwise no iteration is done
|
||||
RecordModel = self.env[self.model].with_prefetch(res_ids)
|
||||
record_ir_model = self.env['ir.model']._get(self.model)
|
||||
|
||||
for res_ids_chunk in tools.split_every(batch_size, res_ids):
|
||||
res_ids_values = self._generate_template(
|
||||
res_ids_chunk,
|
||||
('attachment_ids',
|
||||
'auto_delete',
|
||||
'body_html',
|
||||
'email_cc',
|
||||
'email_from',
|
||||
'email_to',
|
||||
'mail_server_id',
|
||||
'model',
|
||||
'partner_to',
|
||||
'reply_to',
|
||||
'report_template_ids',
|
||||
'res_id',
|
||||
'scheduled_date',
|
||||
'subject',
|
||||
)
|
||||
)
|
||||
values_list = [res_ids_values[res_id] for res_id in res_ids_chunk]
|
||||
|
||||
values['body_html'] = self.env['mail.render.mixin']._replace_local_links(body)
|
||||
# get record in batch to use the prefetch
|
||||
records = RecordModel.browse(res_ids_chunk)
|
||||
attachments_list = []
|
||||
|
||||
mail = self.env['mail.mail'].sudo().create(values)
|
||||
# lang and company is used for rendering layout
|
||||
res_ids_langs, res_ids_companies = {}, {}
|
||||
if sending_email_layout_xmlid:
|
||||
if self.lang:
|
||||
res_ids_langs = self._render_lang(res_ids_chunk)
|
||||
res_ids_companies = records._mail_get_companies(default=self.env.company)
|
||||
|
||||
# manage attachments
|
||||
for attachment in attachments:
|
||||
attachment_data = {
|
||||
'name': attachment[0],
|
||||
'datas': attachment[1],
|
||||
'type': 'binary',
|
||||
'res_model': 'mail.message',
|
||||
'res_id': mail.mail_message_id.id,
|
||||
}
|
||||
attachment_ids.append((4, Attachment.create(attachment_data).id))
|
||||
if attachment_ids:
|
||||
mail.write({'attachment_ids': attachment_ids})
|
||||
for record in records:
|
||||
values = res_ids_values[record.id]
|
||||
values['recipient_ids'] = [(4, pid) for pid in (values.get('partner_ids') or [])]
|
||||
values['attachment_ids'] = [(4, aid) for aid in (values.get('attachment_ids') or [])]
|
||||
values.update(email_values or {})
|
||||
|
||||
# delegate attachments after creation due to ACL check
|
||||
attachments_list.append(values.pop('attachments', []))
|
||||
|
||||
# add a protection against void email_from
|
||||
if 'email_from' in values and not values.get('email_from'):
|
||||
values.pop('email_from')
|
||||
|
||||
# encapsulate body
|
||||
if not sending_email_layout_xmlid:
|
||||
values['body'] = values['body_html']
|
||||
continue
|
||||
|
||||
lang = res_ids_langs.get(record.id) or self.env.lang
|
||||
company = res_ids_companies.get(record.id) or self.env.company
|
||||
model_lang = record_ir_model.with_context(lang=lang)
|
||||
self_lang = self.with_context(lang=lang)
|
||||
record_lang = record.with_context(lang=lang)
|
||||
|
||||
values['body_html'] = self_lang._render_encapsulate(
|
||||
sending_email_layout_xmlid,
|
||||
values['body_html'],
|
||||
add_context={
|
||||
'company': company,
|
||||
'model_description': model_lang.display_name,
|
||||
},
|
||||
context_record=record_lang,
|
||||
)
|
||||
values['body'] = values['body_html']
|
||||
|
||||
mails = self.env['mail.mail'].sudo().create(values_list)
|
||||
|
||||
# manage attachments
|
||||
for mail, attachments in zip(mails, attachments_list):
|
||||
if attachments:
|
||||
attachments_values = [
|
||||
(0, 0, {
|
||||
'name': name,
|
||||
'datas': datas,
|
||||
'type': 'binary',
|
||||
'res_model': 'mail.message',
|
||||
'res_id': mail.mail_message_id.id,
|
||||
})
|
||||
for (name, datas) in attachments
|
||||
]
|
||||
mail.with_context(default_type=None).write({'attachment_ids': attachments_values})
|
||||
|
||||
mails_sudo += mails
|
||||
|
||||
if force_send:
|
||||
mail.send(raise_exception=raise_exception)
|
||||
return mail.id # TDE CLEANME: return mail + api.returns ?
|
||||
mails_sudo.send(raise_exception=raise_exception)
|
||||
return mails_sudo
|
||||
|
||||
# ----------------------------------------
|
||||
# MAIL RENDER INTERNALS
|
||||
# ----------------------------------------
|
||||
|
||||
def _has_unsafe_expression_template_qweb(self, source, model, fname=None):
|
||||
if self._expression_is_default(source, model, fname):
|
||||
return False
|
||||
return super()._has_unsafe_expression_template_qweb(source, model, fname=fname)
|
||||
|
||||
def _has_unsafe_expression_template_inline_template(self, source, model, fname=None):
|
||||
if self._expression_is_default(source, model, fname):
|
||||
return False
|
||||
return super()._has_unsafe_expression_template_inline_template(source, model, fname=fname)
|
||||
|
||||
def _expression_is_default(self, source, model, fname):
|
||||
if not fname or not model:
|
||||
return False
|
||||
Model = self.env[model]
|
||||
model_defaults = hasattr(Model, '_mail_template_default_values') and Model._mail_template_default_values() or {}
|
||||
return source == model_defaults.get(fname)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -3,9 +3,10 @@
|
|||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import AccessError, UserError
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class MailBlackListMixin(models.AbstractModel):
|
||||
class MailThreadBlacklist(models.AbstractModel):
|
||||
""" Mixin that is inherited by all model with opt out. This mixin stores a normalized
|
||||
email based on primary_email field.
|
||||
|
||||
|
|
@ -31,8 +32,7 @@ class MailBlackListMixin(models.AbstractModel):
|
|||
_primary_email = 'email'
|
||||
|
||||
email_normalized = fields.Char(
|
||||
string='Normalized Email', compute="_compute_email_normalized", compute_sudo=True,
|
||||
store=True, invisible=True,
|
||||
string='Normalized Email', compute="_compute_email_normalized", compute_sudo=True, store=True,
|
||||
help="This field is used to search on email address as the primary email field can contain more than strictly an email address.")
|
||||
# Note : is_blacklisted sould only be used for display. As the compute is not depending on the blacklist,
|
||||
# once read, it won't be re-computed again if the blacklist is modified in the same request.
|
||||
|
|
@ -51,42 +51,39 @@ class MailBlackListMixin(models.AbstractModel):
|
|||
|
||||
@api.model
|
||||
def _search_is_blacklisted(self, operator, value):
|
||||
# Assumes operator is '=' or '!=' and value is True or False
|
||||
if operator not in ('in', 'not in'):
|
||||
return NotImplemented
|
||||
self.flush_model(['email_normalized'])
|
||||
self.env['mail.blacklist'].flush_model(['email', 'active'])
|
||||
self._assert_primary_email()
|
||||
if operator != '=':
|
||||
if operator == '!=' and isinstance(value, bool):
|
||||
value = not value
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
|
||||
if value:
|
||||
query = """
|
||||
if operator == 'in':
|
||||
sql = SQL("""
|
||||
SELECT m.id
|
||||
FROM mail_blacklist bl
|
||||
JOIN %s m
|
||||
ON m.email_normalized = bl.email AND bl.active
|
||||
"""
|
||||
""", SQL.identifier(self._table))
|
||||
else:
|
||||
query = """
|
||||
sql = SQL("""
|
||||
SELECT m.id
|
||||
FROM %s m
|
||||
LEFT JOIN mail_blacklist bl
|
||||
ON m.email_normalized = bl.email AND bl.active
|
||||
WHERE bl.id IS NULL
|
||||
"""
|
||||
self._cr.execute((query + " FETCH FIRST ROW ONLY") % self._table)
|
||||
res = self._cr.fetchall()
|
||||
""", SQL.identifier(self._table))
|
||||
|
||||
self.env.cr.execute(SQL("%s FETCH FIRST ROW ONLY", sql))
|
||||
res = self.env.cr.fetchall()
|
||||
if not res:
|
||||
return [(0, '=', 1)]
|
||||
return [('id', 'inselect', (query % self._table, []))]
|
||||
return [('id', 'in', SQL("(%s)", sql))]
|
||||
|
||||
@api.depends('email_normalized')
|
||||
def _compute_is_blacklisted(self):
|
||||
# TODO : Should remove the sudo as compute_sudo defined on methods.
|
||||
# But if user doesn't have access to mail.blacklist, doen't work without sudo().
|
||||
blacklist = set(self.env['mail.blacklist'].sudo().search([
|
||||
blacklist = set(self.env['mail.blacklist'].sudo().with_context(active_test=True).search([
|
||||
('email', 'in', self.mapped('email_normalized'))]).mapped('email'))
|
||||
for record in self:
|
||||
record.is_blacklisted = record.email_normalized in blacklist
|
||||
|
|
@ -100,20 +97,20 @@ class MailBlackListMixin(models.AbstractModel):
|
|||
def _message_receive_bounce(self, email, partner):
|
||||
""" Override of mail.thread generic method. Purpose is to increment the
|
||||
bounce counter of the record. """
|
||||
super(MailBlackListMixin, self)._message_receive_bounce(email, partner)
|
||||
super()._message_receive_bounce(email, partner)
|
||||
for record in self:
|
||||
record.message_bounce = record.message_bounce + 1
|
||||
|
||||
def _message_reset_bounce(self, email):
|
||||
""" Override of mail.thread generic method. Purpose is to reset the
|
||||
bounce counter of the record. """
|
||||
super(MailBlackListMixin, self)._message_reset_bounce(email)
|
||||
super()._message_reset_bounce(email)
|
||||
self.write({'message_bounce': 0})
|
||||
|
||||
def mail_action_blacklist_remove(self):
|
||||
# wizard access rights currently not working as expected and allows users without access to
|
||||
# open this wizard, therefore we check to make sure they have access before the wizard opens.
|
||||
can_access = self.env['mail.blacklist'].check_access_rights('write', raise_exception=False)
|
||||
can_access = self.env['mail.blacklist'].has_access('write')
|
||||
if can_access:
|
||||
return {
|
||||
'name': _('Are you sure you want to unblacklist this Email Address?'),
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@
|
|||
from odoo import _, api, fields, models, tools
|
||||
|
||||
|
||||
class MailCCMixin(models.AbstractModel):
|
||||
class MailThreadCc(models.AbstractModel):
|
||||
_name = 'mail.thread.cc'
|
||||
_inherit = 'mail.thread'
|
||||
_inherit = ['mail.thread']
|
||||
_description = 'Email CC management'
|
||||
|
||||
email_cc = fields.Char('Email cc')
|
||||
|
|
@ -17,7 +17,7 @@ class MailCCMixin(models.AbstractModel):
|
|||
return {}
|
||||
return {
|
||||
tools.email_normalize(email): tools.formataddr((name, tools.email_normalize(email)))
|
||||
for (name, email) in tools.email_split_tuples(cc_string)
|
||||
for (name, email) in tools.mail.email_split_tuples(cc_string)
|
||||
}
|
||||
|
||||
@api.model
|
||||
|
|
@ -28,10 +28,10 @@ class MailCCMixin(models.AbstractModel):
|
|||
'email_cc': ", ".join(self._mail_cc_sanitized_raw_dict(msg_dict.get('cc')).values()),
|
||||
}
|
||||
cc_values.update(custom_values)
|
||||
return super(MailCCMixin, self).message_new(msg_dict, cc_values)
|
||||
return super().message_new(msg_dict, cc_values)
|
||||
|
||||
def message_update(self, msg_dict, update_vals=None):
|
||||
'''Adds cc email to self.email_cc while trying to keep email as raw as possible but unique'''
|
||||
# Adds cc email to self.email_cc while trying to keep email as raw as possible but unique
|
||||
if update_vals is None:
|
||||
update_vals = {}
|
||||
cc_values = {}
|
||||
|
|
@ -41,12 +41,10 @@ class MailCCMixin(models.AbstractModel):
|
|||
new_cc.update(old_cc)
|
||||
cc_values['email_cc'] = ", ".join(new_cc.values())
|
||||
cc_values.update(update_vals)
|
||||
return super(MailCCMixin, self).message_update(msg_dict, cc_values)
|
||||
return super().message_update(msg_dict, cc_values)
|
||||
|
||||
def _message_get_suggested_recipients(self):
|
||||
recipients = super(MailCCMixin, self)._message_get_suggested_recipients()
|
||||
for record in self:
|
||||
if record.email_cc:
|
||||
for email in tools.email_split_and_format(record.email_cc):
|
||||
record._message_add_suggested_recipient(recipients, email=email, reason=_('CC Email'))
|
||||
return recipients
|
||||
def _message_add_suggested_recipients(self, force_primary_email=False):
|
||||
suggested = super()._message_add_suggested_recipients(force_primary_email=force_primary_email)
|
||||
for record in self.filtered('email_cc'):
|
||||
suggested[record.id]['email_to_lst'] += tools.mail.email_split_and_format_normalize(record.email_cc)
|
||||
return suggested
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class MailThreadMainAttachment(models.AbstractModel):
|
||||
""" Mixin that adds main attachment support to the MailThread class. """
|
||||
|
||||
_name = 'mail.thread.main.attachment'
|
||||
_inherit = ['mail.thread']
|
||||
_description = 'Mail Main Attachment management'
|
||||
|
||||
message_main_attachment_id = fields.Many2one(string="Main Attachment", comodel_name='ir.attachment', copy=False, index='btree_not_null')
|
||||
|
||||
def _message_post_after_hook(self, message, msg_values):
|
||||
""" Set main attachment field if necessary """
|
||||
super()._message_post_after_hook(message, msg_values)
|
||||
self.sudo()._message_set_main_attachment_id(
|
||||
self.env["ir.attachment"].browse([
|
||||
attachment_command[1]
|
||||
for attachment_command in (msg_values['attachment_ids'] or [])
|
||||
])
|
||||
)
|
||||
|
||||
def _message_set_main_attachment_id(self, attachments, force=False, filter_xml=True):
|
||||
""" Update 'main' attachment.
|
||||
|
||||
:param list attachments: new main attachment IDS; if several attachments
|
||||
are given, we search for pdf or image first;
|
||||
:param boolean force: if set, replace an existing attachment; otherwise
|
||||
update is skipped;
|
||||
:param filter_xml: filters out xml (and octet-stream) attachments, as in
|
||||
most cases you don't want that kind of file to end up as main attachment
|
||||
of records;
|
||||
"""
|
||||
if attachments and (force or not self.message_main_attachment_id):
|
||||
# we filter out attachment with 'xml' and 'octet' types
|
||||
if filter_xml:
|
||||
attachments = attachments.filtered(
|
||||
lambda r: not r.mimetype.endswith('xml') and not r.mimetype.endswith('application/octet-stream')
|
||||
)
|
||||
|
||||
# Assign one of the attachments as the main according to the following priority: pdf, image, other types.
|
||||
if attachments:
|
||||
self.with_context(tracking_disable=True).message_main_attachment_id = max(
|
||||
attachments,
|
||||
key=lambda r: (r.mimetype.endswith('pdf'), r.mimetype.startswith('image'))
|
||||
).id
|
||||
|
||||
def _thread_to_store(self, store: Store, fields, *, request_list=None):
|
||||
super()._thread_to_store(store, fields, request_list=request_list)
|
||||
if request_list and "attachments" in request_list:
|
||||
store.add(
|
||||
self,
|
||||
Store.One("message_main_attachment_id", []),
|
||||
as_thread=True,
|
||||
)
|
||||
|
|
@ -0,0 +1,250 @@
|
|||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class MailTrackingDurationMixin(models.AbstractModel):
|
||||
_name = 'mail.tracking.duration.mixin'
|
||||
_description = "Mixin to compute the time a record has spent in each value a many2one field can take"
|
||||
_inherit = ['mail.thread']
|
||||
|
||||
duration_tracking = fields.Json(
|
||||
string="Status time", compute="_compute_duration_tracking",
|
||||
help="JSON that maps ids from a many2one field to seconds spent")
|
||||
|
||||
# The rotting feature enables resources to mark themselves as stale if enough time has passed
|
||||
# since their stage was last updated.
|
||||
# Consult _is_rotting_feature_enabled() documentation for configuration instructions
|
||||
rotting_days = fields.Integer('Days Rotting', help='Day count since this resource was last updated',
|
||||
compute='_compute_rotting')
|
||||
is_rotting = fields.Boolean('Rotting', compute='_compute_rotting', search='_search_is_rotting')
|
||||
|
||||
def _compute_duration_tracking(self):
|
||||
"""
|
||||
Computes duration_tracking, a Json field stored as { <many2one_id (str)>: <duration_spent_in_seconds (int)> }
|
||||
|
||||
e.g. {"1": 1230, "2": 2220, "5": 14}
|
||||
|
||||
`_track_duration_field` must be present in the model that uses the mixin to specify on what
|
||||
field to compute time spent. Besides, tracking must be activated for that field.
|
||||
|
||||
e.g.
|
||||
class MyModel(models.Model):
|
||||
_name = 'my.model'
|
||||
_track_duration_field = "tracked_field"
|
||||
|
||||
tracked_field = fields.Many2one('tracked.model', tracking=True)
|
||||
"""
|
||||
|
||||
field = self.env['ir.model.fields'].sudo().search_fetch([
|
||||
('model', '=', self._name),
|
||||
('name', '=', self._track_duration_field),
|
||||
], ['id'], limit=1)
|
||||
|
||||
if (
|
||||
self._track_duration_field not in self._track_get_fields()
|
||||
or self._fields[self._track_duration_field].type != 'many2one'
|
||||
):
|
||||
self.duration_tracking = False
|
||||
raise ValueError(_(
|
||||
'Field “%(field)s” on model “%(model)s” must be of type Many2one and have tracking=True for the computation of duration.',
|
||||
field=self._track_duration_field, model=self._name
|
||||
))
|
||||
|
||||
if self.ids:
|
||||
self.env['mail.tracking.value'].flush_model()
|
||||
self.env['mail.message'].flush_model()
|
||||
trackings = self.env.execute_query_dict(SQL("""
|
||||
SELECT m.res_id,
|
||||
v.create_date,
|
||||
v.old_value_integer
|
||||
FROM mail_tracking_value v
|
||||
LEFT JOIN mail_message m
|
||||
ON m.id = v.mail_message_id
|
||||
AND v.field_id = %(field_id)s
|
||||
WHERE m.model = %(model_name)s
|
||||
AND m.res_id IN %(record_ids)s
|
||||
ORDER BY v.id
|
||||
""",
|
||||
field_id=field.id, model_name=self._name, record_ids=tuple(self.ids),
|
||||
))
|
||||
else:
|
||||
trackings = []
|
||||
|
||||
for record in self:
|
||||
record_trackings = [tracking for tracking in trackings if tracking['res_id'] == record._origin.id]
|
||||
record.duration_tracking = record._get_duration_from_tracking(record_trackings)
|
||||
|
||||
def _get_duration_from_tracking(self, trackings):
|
||||
"""
|
||||
Calculates the duration spent in each value based on the provided list of trackings.
|
||||
It adds a "fake" tracking at the end of the trackings list to account for the time spent in the current value.
|
||||
|
||||
Args:
|
||||
trackings (list): A list of dictionaries representing the trackings with:
|
||||
- 'create_date': The date and time of the tracking.
|
||||
- 'old_value_integer': The ID of the previous value.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary where the keys are the IDs of the values, and the values are the durations in seconds
|
||||
"""
|
||||
self.ensure_one()
|
||||
json = defaultdict(lambda: 0)
|
||||
previous_date = self.create_date or self.env.cr.now()
|
||||
|
||||
# If there is a tracking value to be created, but still in the
|
||||
# precommit values, create a fake one to take it into account.
|
||||
# Otherwise, the duration_tracking value will add time spent on
|
||||
# previous tracked field value to the time spent in the new value
|
||||
# (after writing the stage on the record)
|
||||
if f'mail.tracking.{self._name}' in self.env.cr.precommit.data:
|
||||
if data := self.env.cr.precommit.data.get(f'mail.tracking.{self._name}', {}).get(self._origin.id):
|
||||
new_id = data.get(self._track_duration_field, self.env[self._name]).id
|
||||
if new_id and new_id != self[self._track_duration_field].id:
|
||||
trackings.append({
|
||||
'create_date': self.env.cr.now(),
|
||||
'old_value_integer': data[self._track_duration_field].id,
|
||||
})
|
||||
|
||||
# add "fake" tracking for time spent in the current value
|
||||
trackings.append({
|
||||
'create_date': self.env.cr.now(),
|
||||
'old_value_integer': self[self._track_duration_field].id,
|
||||
})
|
||||
|
||||
for tracking in trackings:
|
||||
json[tracking['old_value_integer']] += int((tracking['create_date'] - previous_date).total_seconds())
|
||||
previous_date = tracking['create_date']
|
||||
|
||||
return json
|
||||
|
||||
def _is_rotting_feature_enabled(self):
|
||||
"""
|
||||
To enable the rotting behavior, the following must be present:
|
||||
|
||||
* Stage-like model (linked by '_track_duration_field') must have a 'rotting_threshold_days' integer field
|
||||
modeling the number of days before a record rots
|
||||
|
||||
* Model inheriting from duration mixin must have a 'date_last_stage_update' field tracking the last stage change
|
||||
|
||||
|
||||
Also consider overriding _get_rotting_depends_fields() and _get_rotting_domain().
|
||||
|
||||
Certain views have access to widgets to display rotting status:
|
||||
'rotting' for kanbans, 'rotting_statusbar_duration' for forms, 'badge_rotting' for lists.
|
||||
|
||||
:return: bool: whether the rotting feature has been configured for this model
|
||||
"""
|
||||
return 'rotting_threshold_days' in self[self._track_duration_field] and 'date_last_stage_update' in self and (
|
||||
not self # api.model call
|
||||
or any(stage.rotting_threshold_days for stage in self[self._track_duration_field])
|
||||
)
|
||||
|
||||
def _get_rotting_depends_fields(self):
|
||||
"""
|
||||
fields added to this method through override should likely also be returned by _get_rotting_domain() override
|
||||
|
||||
:return: the array of fields that can affect the ability of a resource to rot
|
||||
"""
|
||||
if hasattr(self, '_track_duration_field') and 'rotting_threshold_days' in self[self._track_duration_field]:
|
||||
return ['date_last_stage_update', f'{self._track_duration_field}.rotting_threshold_days']
|
||||
return []
|
||||
|
||||
def _get_rotting_domain(self):
|
||||
"""
|
||||
fields added to this method through override should likely also be returned by _get_rotting_depends_fields() override
|
||||
|
||||
:return: domain: conditions that must be met so that the field can be considered rotting
|
||||
"""
|
||||
return Domain(f'{self._track_duration_field}.rotting_threshold_days', '!=', 0)
|
||||
|
||||
@api.depends(lambda self: self._get_rotting_depends_fields())
|
||||
def _compute_rotting(self):
|
||||
"""
|
||||
A resource is rotting if its stage has not been updated in a number of days depending on its
|
||||
stage's rotting_threshold_days value, assuming it matches _get_rotting_domain() conditions.
|
||||
|
||||
If the rotting_threshold_days field is not defined on the tracked module,
|
||||
or if the value of rotting_threshold_days is 0,
|
||||
then the resource will never rot.
|
||||
"""
|
||||
if not self._is_rotting_feature_enabled():
|
||||
self.is_rotting = False
|
||||
self.rotting_days = 0
|
||||
return
|
||||
now = self.env.cr.now()
|
||||
rot_enabled = self.filtered_domain(self._get_rotting_domain())
|
||||
others = self - rot_enabled
|
||||
for stage, records in rot_enabled.grouped(self._track_duration_field).items():
|
||||
rotting = records.filtered(lambda record:
|
||||
(record.date_last_stage_update or record.create_date or fields.Datetime.now())
|
||||
+ timedelta(days=stage.rotting_threshold_days) < now
|
||||
)
|
||||
for record in rotting:
|
||||
record.is_rotting = True
|
||||
record.rotting_days = (now - (record.date_last_stage_update or record.create_date)).days
|
||||
others += records - rotting
|
||||
others.is_rotting = False
|
||||
others.rotting_days = 0
|
||||
|
||||
def _search_is_rotting(self, operator, value):
|
||||
if operator not in ['in', 'not in']:
|
||||
raise ValueError(self.env._('For performance reasons, use "=" operators on rotting fields.'))
|
||||
if not self._is_rotting_feature_enabled():
|
||||
raise UserError(self.env._('Model configuration does not support the rotting feature'))
|
||||
model_depends = [fname for fname in self._get_rotting_depends_fields() if '.' not in fname]
|
||||
self.flush_model(model_depends) # flush fields to make sure DB is up to date
|
||||
self.env[self[self._track_duration_field]._name].flush_model(['rotting_threshold_days'])
|
||||
base_query = self._search(self._get_rotting_domain())
|
||||
|
||||
# Our query needs to JOIN the stage field's table.
|
||||
# This JOIN needs to use the same alias as the base query to avoid non-matching alias issues
|
||||
# Note that query objects do not make their alias table available trivially,
|
||||
# but the alias can be inferred by consulting the _joins attribute and compare it to the result of make_alias()
|
||||
stage_table_alias_name = base_query.make_alias(self._table, self._track_duration_field)
|
||||
|
||||
# We only need to add a JOIN if the stage table is not already present in the query's _joins attribute.
|
||||
from_add_join = ''
|
||||
if not base_query._joins or not stage_table_alias_name in base_query._joins:
|
||||
from_add_join = """
|
||||
INNER JOIN %(stage_table)s AS %(stage_table_alias_name)s
|
||||
ON %(stage_table_alias_name)s.id = %(table)s.%(stage_field)s
|
||||
"""
|
||||
|
||||
# Items with a date_last_stage_update inferior to that number of months will not be returned by the search function.
|
||||
max_rotting_months = int(self.env['ir.config_parameter'].sudo().get_param('crm.lead.rot.max.months', default=12))
|
||||
|
||||
# We use a F-string so that the from_add_join is added with its %s parameters before the query string is processed
|
||||
query = f"""
|
||||
WITH perishables AS (
|
||||
SELECT %(table)s.id AS id,
|
||||
(
|
||||
%(table)s.date_last_stage_update + %(stage_table_alias_name)s.rotting_threshold_days * interval '1 day'
|
||||
) AS date_rot
|
||||
FROM %(from_clause)s
|
||||
{from_add_join}
|
||||
WHERE
|
||||
%(table)s.date_last_stage_update > %(today)s - INTERVAL '%(max_rotting_months)s months'
|
||||
AND %(where_clause)s
|
||||
)
|
||||
SELECT id
|
||||
FROM perishables
|
||||
WHERE %(today)s >= date_rot
|
||||
|
||||
"""
|
||||
self.env.cr.execute(SQL(query,
|
||||
table=SQL.identifier(self._table),
|
||||
stage_table=SQL.identifier(self[self._track_duration_field]._table),
|
||||
stage_table_alias_name=SQL.identifier(stage_table_alias_name),
|
||||
stage_field=SQL.identifier(self._track_duration_field),
|
||||
today=self.env.cr.now(),
|
||||
where_clause=base_query.where_clause,
|
||||
from_clause=base_query.from_clause,
|
||||
max_rotting_months=max_rotting_months,
|
||||
))
|
||||
rows = self.env.cr.dictfetchall()
|
||||
return [('id', operator, [r['id'] for r in rows])]
|
||||
|
|
@ -6,59 +6,96 @@ from datetime import datetime
|
|||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class MailTracking(models.Model):
|
||||
class MailTrackingValue(models.Model):
|
||||
_name = 'mail.tracking.value'
|
||||
_description = 'Mail Tracking Value'
|
||||
_rec_name = 'field'
|
||||
_order = 'tracking_sequence asc'
|
||||
_rec_name = 'field_id'
|
||||
_order = 'id DESC'
|
||||
|
||||
field = fields.Many2one('ir.model.fields', required=True, readonly=1, index=True, ondelete='cascade')
|
||||
field_desc = fields.Char('Field Description', required=True, readonly=1)
|
||||
field_type = fields.Char('Field Type')
|
||||
field_groups = fields.Char(compute='_compute_field_groups')
|
||||
field_id = fields.Many2one(
|
||||
'ir.model.fields', required=False, readonly=True,
|
||||
index=True, ondelete='set null')
|
||||
field_info = fields.Json('Removed field information')
|
||||
|
||||
old_value_integer = fields.Integer('Old Value Integer', readonly=1)
|
||||
old_value_float = fields.Float('Old Value Float', readonly=1)
|
||||
old_value_monetary = fields.Float('Old Value Monetary', readonly=1)
|
||||
old_value_char = fields.Char('Old Value Char', readonly=1)
|
||||
old_value_text = fields.Text('Old Value Text', readonly=1)
|
||||
old_value_datetime = fields.Datetime('Old Value DateTime', readonly=1)
|
||||
old_value_integer = fields.Integer('Old Value Integer', readonly=True)
|
||||
old_value_float = fields.Float('Old Value Float', readonly=True)
|
||||
old_value_char = fields.Char('Old Value Char', readonly=True)
|
||||
old_value_text = fields.Text('Old Value Text', readonly=True)
|
||||
old_value_datetime = fields.Datetime('Old Value DateTime', readonly=True)
|
||||
|
||||
new_value_integer = fields.Integer('New Value Integer', readonly=1)
|
||||
new_value_float = fields.Float('New Value Float', readonly=1)
|
||||
new_value_monetary = fields.Float('New Value Monetary', readonly=1)
|
||||
new_value_char = fields.Char('New Value Char', readonly=1)
|
||||
new_value_text = fields.Text('New Value Text', readonly=1)
|
||||
new_value_datetime = fields.Datetime('New Value Datetime', readonly=1)
|
||||
new_value_integer = fields.Integer('New Value Integer', readonly=True)
|
||||
new_value_float = fields.Float('New Value Float', readonly=True)
|
||||
new_value_char = fields.Char('New Value Char', readonly=True)
|
||||
new_value_text = fields.Text('New Value Text', readonly=True)
|
||||
new_value_datetime = fields.Datetime('New Value Datetime', readonly=True)
|
||||
|
||||
currency_id = fields.Many2one('res.currency', 'Currency', readonly=True, ondelete='set null',
|
||||
help="Used to display the currency when tracking monetary values")
|
||||
|
||||
mail_message_id = fields.Many2one('mail.message', 'Message ID', required=True, index=True, ondelete='cascade')
|
||||
|
||||
tracking_sequence = fields.Integer('Tracking field sequence', readonly=1, default=100)
|
||||
def _filter_has_field_access(self, env):
|
||||
""" Return the subset of self for which the user in env has access. As
|
||||
this model is admin-only, it is generally accessed as sudo and we need
|
||||
to distinguish context environment from tracking values environment.
|
||||
|
||||
@api.depends('mail_message_id', 'field')
|
||||
def _compute_field_groups(self):
|
||||
for tracking in self:
|
||||
model = self.env[tracking.field.model]
|
||||
field = model._fields.get(tracking.field.name)
|
||||
tracking.field_groups = field.groups if field else 'base.group_system'
|
||||
If tracking is linked to a field, user should have access to the field.
|
||||
Otherwise only members of "base.group_system" can access it. """
|
||||
|
||||
def has_field_access(tracking):
|
||||
if not tracking.field_id:
|
||||
return env.is_system()
|
||||
model = env[tracking.field_id.model]
|
||||
model_field = model._fields.get(tracking.field_id.name)
|
||||
return model._has_field_access(model_field, 'read') if model_field else False
|
||||
|
||||
return self.filtered(has_field_access)
|
||||
|
||||
def _filter_free_field_access(self):
|
||||
""" Return the subset of self which is available for all users: trackings
|
||||
linked to an existing field without access group. It is used notably
|
||||
when sending tracking summary through notifications. """
|
||||
|
||||
def has_free_access(tracking):
|
||||
if not tracking.field_id:
|
||||
return False
|
||||
model_field = self.env[tracking.field_id.model]._fields.get(tracking.field_id.name)
|
||||
return model_field and not model_field.groups
|
||||
|
||||
return self.filtered(has_free_access)
|
||||
|
||||
@api.model
|
||||
def create_tracking_values(self, initial_value, new_value, col_name, col_info, tracking_sequence, model_name):
|
||||
tracked = True
|
||||
def _create_tracking_values(self, initial_value, new_value, col_name, col_info, record):
|
||||
""" Prepare values to create a mail.tracking.value. It prepares old and
|
||||
new value according to the field type.
|
||||
|
||||
field = self.env['ir.model.fields']._get(model_name, col_name)
|
||||
:param initial_value: field value before the change, could be text, int,
|
||||
date, datetime, ...;
|
||||
:param new_value: field value after the change, could be text, int,
|
||||
date, datetime, ...;
|
||||
:param str col_name: technical field name, column name (e.g. 'user_id);
|
||||
:param dict col_info: result of fields_get(col_name);
|
||||
:param <record> record: record on which tracking is performed, used for
|
||||
related computation e.g. finding currency of monetary fields;
|
||||
|
||||
:return: a dict values valid for 'mail.tracking.value' creation;
|
||||
"""
|
||||
field = self.env['ir.model.fields']._get(record._name, col_name)
|
||||
if not field:
|
||||
return
|
||||
raise ValueError(f'Unknown field {col_name} on model {record._name}')
|
||||
|
||||
values = {'field': field.id, 'field_desc': col_info['string'], 'field_type': col_info['type'], 'tracking_sequence': tracking_sequence}
|
||||
values = {'field_id': field.id}
|
||||
|
||||
if col_info['type'] in ['integer', 'float', 'char', 'text', 'datetime', 'monetary']:
|
||||
if col_info['type'] in {'integer', 'float', 'char', 'text', 'datetime'}:
|
||||
values.update({
|
||||
'old_value_%s' % col_info['type']: initial_value,
|
||||
'new_value_%s' % col_info['type']: new_value
|
||||
f'old_value_{col_info["type"]}': initial_value,
|
||||
f'new_value_{col_info["type"]}': new_value
|
||||
})
|
||||
elif col_info['type'] == 'monetary':
|
||||
values.update({
|
||||
'currency_id': record[col_info['currency_field']].id,
|
||||
'old_value_float': initial_value,
|
||||
'new_value_float': new_value
|
||||
})
|
||||
elif col_info['type'] == 'date':
|
||||
values.update({
|
||||
|
|
@ -76,64 +113,203 @@ class MailTracking(models.Model):
|
|||
'new_value_char': new_value and dict(col_info['selection'])[new_value] or ''
|
||||
})
|
||||
elif col_info['type'] == 'many2one':
|
||||
# Can be:
|
||||
# - False value
|
||||
# - recordset, in case of standard field
|
||||
# - (id, display name), in case of properties (read format)
|
||||
if not initial_value:
|
||||
initial_value = (0, '')
|
||||
elif isinstance(initial_value, models.BaseModel):
|
||||
initial_value = (initial_value.id, initial_value.display_name)
|
||||
|
||||
if not new_value:
|
||||
new_value = (0, '')
|
||||
elif isinstance(new_value, models.BaseModel):
|
||||
new_value = (new_value.id, new_value.display_name)
|
||||
|
||||
values.update({
|
||||
'old_value_integer': initial_value and initial_value.id or 0,
|
||||
'new_value_integer': new_value and new_value.id or 0,
|
||||
'old_value_char': initial_value and initial_value.sudo().name_get()[0][1] or '',
|
||||
'new_value_char': new_value and new_value.sudo().name_get()[0][1] or ''
|
||||
'old_value_integer': initial_value[0],
|
||||
'new_value_integer': new_value[0],
|
||||
'old_value_char': initial_value[1],
|
||||
'new_value_char': new_value[1]
|
||||
})
|
||||
elif col_info['type'] in {'one2many', 'many2many', 'tags'}:
|
||||
# Can be:
|
||||
# - False value
|
||||
# - recordset, in case of standard field
|
||||
# - [(id, display name), ...], in case of properties (read format)
|
||||
model_name = self.env['ir.model']._get(field.relation).display_name
|
||||
if not initial_value:
|
||||
old_value_char = ''
|
||||
elif isinstance(initial_value, models.BaseModel):
|
||||
old_value_char = ', '.join(
|
||||
value.display_name or self.env._(
|
||||
'Unnamed %(record_model_name)s (%(record_id)s)',
|
||||
record_model_name=model_name, record_id=value.id
|
||||
)
|
||||
for value in initial_value
|
||||
)
|
||||
else:
|
||||
old_value_char = ', '.join(value[1] for value in initial_value)
|
||||
if not new_value:
|
||||
new_value_char = ''
|
||||
elif isinstance(new_value, models.BaseModel):
|
||||
new_value_char = ', '.join(
|
||||
value.display_name or self.env._(
|
||||
'Unnamed %(record_model_name)s (%(record_id)s)',
|
||||
record_model_name=model_name, record_id=value.id
|
||||
)
|
||||
for value in new_value
|
||||
)
|
||||
else:
|
||||
new_value_char = ', '.join(value[1] for value in new_value)
|
||||
|
||||
values.update({
|
||||
'old_value_char': old_value_char,
|
||||
'new_value_char': new_value_char,
|
||||
})
|
||||
else:
|
||||
tracked = False
|
||||
raise NotImplementedError(f'Unsupported tracking on field {field.name} (type {col_info["type"]}')
|
||||
|
||||
if tracked:
|
||||
return values
|
||||
return {}
|
||||
return values
|
||||
|
||||
@api.model
|
||||
def _create_tracking_values_property(self, initial_value, col_name, col_info, record):
|
||||
"""Generate the values for the <mail.tracking.values> corresponding to a property."""
|
||||
col_info = col_info | {'type': initial_value['type'], 'selection': initial_value.get('selection')}
|
||||
|
||||
field_info = {
|
||||
'desc': f"{col_info['string']}: {initial_value['string']}",
|
||||
'name': col_name,
|
||||
'type': initial_value['type'],
|
||||
}
|
||||
value = initial_value.get('value', False)
|
||||
if value and initial_value['type'] == 'tags':
|
||||
value = [t for t in initial_value.get('tags', []) if t[0] in value]
|
||||
|
||||
tracking_values = self.env['mail.tracking.value']._create_tracking_values(
|
||||
value, False, col_name, col_info, record)
|
||||
return {**tracking_values, 'field_info': field_info}
|
||||
|
||||
def _tracking_value_format(self):
|
||||
tracking_values = [{
|
||||
'changedField': tracking.field_desc,
|
||||
'id': tracking.id,
|
||||
'newValue': {
|
||||
'currencyId': tracking.currency_id.id,
|
||||
'fieldType': tracking.field_type,
|
||||
'value': tracking._get_new_display_value()[0],
|
||||
},
|
||||
'oldValue': {
|
||||
'currencyId': tracking.currency_id.id,
|
||||
'fieldType': tracking.field_type,
|
||||
'value': tracking._get_old_display_value()[0],
|
||||
},
|
||||
} for tracking in self]
|
||||
return tracking_values
|
||||
""" Return structure and formatted data structure to be used by chatter
|
||||
to display tracking values. Order it according to asked display, aka
|
||||
ascending sequence (and field name).
|
||||
|
||||
:return: for each tracking value in self, their formatted display
|
||||
values given as a dict;
|
||||
:rtype: list[dict]
|
||||
"""
|
||||
model_map = {}
|
||||
for tracking in self:
|
||||
model = tracking.field_id.model or tracking.mail_message_id.model
|
||||
model_map.setdefault(model, self.browse())
|
||||
model_map[model] += tracking
|
||||
formatted = []
|
||||
for model, trackings in model_map.items():
|
||||
formatted += trackings._tracking_value_format_model(model)
|
||||
return formatted
|
||||
|
||||
def _tracking_value_format_model(self, model):
|
||||
""" Return structure and formatted data structure to be used by chatter
|
||||
to display tracking values. Order it according to asked display, aka
|
||||
ascending sequence (and field name).
|
||||
|
||||
:returns: for each tracking value in self, their formatted display
|
||||
values given as a dict;
|
||||
:rtype: list[dict]
|
||||
"""
|
||||
if not self:
|
||||
return []
|
||||
|
||||
# fetch model-based information
|
||||
if model:
|
||||
TrackedModel = self.env[model]
|
||||
tracked_fields = TrackedModel.fields_get(self.field_id.mapped('name'), attributes={'digits', 'string', 'type'})
|
||||
model_sequence_info = dict(TrackedModel._mail_track_order_fields(tracked_fields)) if model else {}
|
||||
else:
|
||||
tracked_fields, model_sequence_info = {}, {}
|
||||
|
||||
# generate sequence of trackings
|
||||
fields_sequence_map = dict(
|
||||
{
|
||||
tracking.field_info['name']: tracking.field_info.get('sequence', 100)
|
||||
for tracking in self.filtered('field_info')
|
||||
},
|
||||
**model_sequence_info,
|
||||
)
|
||||
# generate dict of field information, if available
|
||||
fields_col_info = (
|
||||
tracking.field_id.ttype != 'properties'
|
||||
and tracked_fields.get(tracking.field_id.name)
|
||||
or {
|
||||
'string': tracking.field_info['desc'] if tracking.field_info else self.env._('Unknown'),
|
||||
'type': tracking.field_info['type'] if tracking.field_info else 'char',
|
||||
} for tracking in self
|
||||
)
|
||||
|
||||
def sort_tracking_info(tracking_info_tuple):
|
||||
tracking = tracking_info_tuple[0]
|
||||
field_name = tracking.field_id.name or (tracking.field_info['name'] if tracking.field_info else 'unknown')
|
||||
return (
|
||||
fields_sequence_map.get(field_name, 100),
|
||||
tracking.field_id.ttype == 'properties',
|
||||
field_name,
|
||||
)
|
||||
|
||||
formatted = [
|
||||
{
|
||||
'id': tracking.id,
|
||||
'fieldInfo': {
|
||||
'changedField': col_info['string'],
|
||||
'currencyId': tracking.currency_id.id,
|
||||
'floatPrecision': col_info.get('digits'),
|
||||
'fieldType': col_info['type'],
|
||||
'isPropertyField': tracking.field_id.ttype == 'properties',
|
||||
},
|
||||
'newValue': tracking._format_display_value(col_info['type'], new=True)[0],
|
||||
'oldValue': tracking._format_display_value(col_info['type'], new=False)[0],
|
||||
}
|
||||
for tracking, col_info in sorted(zip(self, fields_col_info), key=sort_tracking_info)
|
||||
]
|
||||
return formatted
|
||||
|
||||
def _format_display_value(self, field_type, new=True):
|
||||
""" Format value of 'mail.tracking.value', according to the field type.
|
||||
|
||||
:param str field_type: Odoo field type;
|
||||
:param bool new: if True, display the 'new' value. Otherwise display
|
||||
the 'old' one.
|
||||
"""
|
||||
field_mapping = {
|
||||
'boolean': ('old_value_integer', 'new_value_integer'),
|
||||
'date': ('old_value_datetime', 'new_value_datetime'),
|
||||
'datetime': ('old_value_datetime', 'new_value_datetime'),
|
||||
'char': ('old_value_char', 'new_value_char'),
|
||||
'float': ('old_value_float', 'new_value_float'),
|
||||
'integer': ('old_value_integer', 'new_value_integer'),
|
||||
'monetary': ('old_value_float', 'new_value_float'),
|
||||
'text': ('old_value_text', 'new_value_text'),
|
||||
}
|
||||
|
||||
def _get_display_value(self, prefix):
|
||||
assert prefix in ('new', 'old')
|
||||
result = []
|
||||
for record in self:
|
||||
if record.field_type in ['integer', 'float', 'char', 'text', 'monetary']:
|
||||
result.append(record[f'{prefix}_value_{record.field_type}'])
|
||||
elif record.field_type == 'datetime':
|
||||
if record[f'{prefix}_value_datetime']:
|
||||
new_datetime = record[f'{prefix}_value_datetime']
|
||||
result.append(f'{new_datetime}Z')
|
||||
value_fname = field_mapping.get(
|
||||
field_type, ('old_value_char', 'new_value_char')
|
||||
)[bool(new)]
|
||||
value = record[value_fname]
|
||||
|
||||
if field_type in {'integer', 'float', 'char', 'text', 'monetary'}:
|
||||
result.append(value)
|
||||
elif field_type in {'date', 'datetime'}:
|
||||
if not record[value_fname]:
|
||||
result.append(value)
|
||||
elif field_type == 'date':
|
||||
result.append(fields.Date.to_string(value))
|
||||
else:
|
||||
result.append(record[f'{prefix}_value_datetime'])
|
||||
elif record.field_type == 'date':
|
||||
if record[f'{prefix}_value_datetime']:
|
||||
new_date = record[f'{prefix}_value_datetime']
|
||||
result.append(fields.Date.to_string(new_date))
|
||||
else:
|
||||
result.append(record[f'{prefix}_value_datetime'])
|
||||
elif record.field_type == 'boolean':
|
||||
result.append(bool(record[f'{prefix}_value_integer']))
|
||||
result.append(f'{value}Z')
|
||||
elif field_type == 'boolean':
|
||||
result.append(bool(value))
|
||||
else:
|
||||
result.append(record[f'{prefix}_value_char'])
|
||||
result.append(value)
|
||||
return result
|
||||
|
||||
def _get_old_display_value(self):
|
||||
# grep : # old_value_integer | old_value_datetime | old_value_char
|
||||
return self._get_display_value('old')
|
||||
|
||||
def _get_new_display_value(self):
|
||||
# grep : # new_value_integer | new_value_datetime | new_value_char
|
||||
return self._get_display_value('new')
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -4,32 +4,55 @@
|
|||
from odoo import api, models, fields, tools
|
||||
|
||||
|
||||
class Company(models.Model):
|
||||
_name = 'res.company'
|
||||
class ResCompany(models.Model):
|
||||
_inherit = 'res.company'
|
||||
|
||||
def _default_alias_domain_id(self):
|
||||
return self.env['mail.alias.domain'].search([], limit=1)
|
||||
|
||||
alias_domain_id = fields.Many2one(
|
||||
'mail.alias.domain', string='Email Domain', index='btree_not_null',
|
||||
default=lambda self: self._default_alias_domain_id())
|
||||
bounce_email = fields.Char(string="Bounce Email", compute="_compute_bounce")
|
||||
bounce_formatted = fields.Char(string="Bounce", compute="_compute_bounce")
|
||||
catchall_email = fields.Char(string="Catchall Email", compute="_compute_catchall")
|
||||
catchall_formatted = fields.Char(string="Catchall", compute="_compute_catchall")
|
||||
default_from_email = fields.Char(
|
||||
string="Default From", related="alias_domain_id.default_from_email",
|
||||
readonly=True)
|
||||
# the compute method is sudo'ed because it needs to access res.partner records
|
||||
# portal users cannot access those (but they should be able to read the company email address)
|
||||
email_formatted = fields.Char(string="Formatted Email",
|
||||
email_formatted = fields.Char(
|
||||
string="Formatted Email",
|
||||
compute="_compute_email_formatted", compute_sudo=True)
|
||||
email_primary_color = fields.Char(
|
||||
"Email Button Text", default="#FFFFFF",
|
||||
readonly=False)
|
||||
email_secondary_color = fields.Char(
|
||||
"Email Button Color", default="#875A7B",
|
||||
readonly=False)
|
||||
|
||||
@api.depends('name')
|
||||
@api.depends('alias_domain_id', 'name')
|
||||
def _compute_bounce(self):
|
||||
self.bounce_email = ''
|
||||
self.bounce_formatted = ''
|
||||
|
||||
for company in self.filtered('alias_domain_id'):
|
||||
bounce_email = company.alias_domain_id.bounce_email
|
||||
company.bounce_email = bounce_email
|
||||
company.bounce_formatted = tools.formataddr((company.name, bounce_email))
|
||||
|
||||
@api.depends('alias_domain_id', 'name')
|
||||
def _compute_catchall(self):
|
||||
ConfigParameter = self.env['ir.config_parameter'].sudo()
|
||||
alias = ConfigParameter.get_param('mail.catchall.alias')
|
||||
domain = ConfigParameter.get_param('mail.catchall.domain')
|
||||
if alias and domain:
|
||||
for company in self:
|
||||
company.catchall_email = '%s@%s' % (alias, domain)
|
||||
company.catchall_formatted = tools.formataddr((company.name, company.catchall_email))
|
||||
else:
|
||||
for company in self:
|
||||
company.catchall_email = ''
|
||||
company.catchall_formatted = ''
|
||||
self.catchall_email = ''
|
||||
self.catchall_formatted = ''
|
||||
|
||||
@api.depends('partner_id.email_formatted', 'catchall_formatted')
|
||||
for company in self.filtered('alias_domain_id'):
|
||||
catchall_email = company.alias_domain_id.catchall_email
|
||||
company.catchall_email = catchall_email
|
||||
company.catchall_formatted = tools.formataddr((company.name, catchall_email))
|
||||
|
||||
@api.depends('partner_id', 'catchall_formatted')
|
||||
def _compute_email_formatted(self):
|
||||
for company in self:
|
||||
if company.partner_id.email_formatted:
|
||||
|
|
|
|||
|
|
@ -12,9 +12,13 @@ class ResConfigSettings(models.TransientModel):
|
|||
the alias domain. """
|
||||
_inherit = 'res.config.settings'
|
||||
|
||||
external_email_server_default = fields.Boolean(
|
||||
"Use Custom Email Servers",
|
||||
config_parameter='base_setup.default_external_email_server')
|
||||
fail_counter = fields.Integer('Fail Mail', compute="_compute_fail_counter")
|
||||
alias_domain = fields.Char(
|
||||
'Alias Domain', config_parameter='mail.catchall.domain',
|
||||
alias_domain_id = fields.Many2one(
|
||||
'mail.alias.domain', 'Alias Domain',
|
||||
readonly=False, related='company_id.alias_domain_id',
|
||||
help="If you have setup a catch-all email domain redirected to the Odoo server, enter the domain name here.")
|
||||
module_google_gmail = fields.Boolean('Support Gmail Authentication')
|
||||
module_microsoft_outlook = fields.Boolean('Support Outlook Authentication')
|
||||
|
|
@ -29,15 +33,33 @@ class ResConfigSettings(models.TransientModel):
|
|||
config_parameter='mail.use_twilio_rtc_servers',
|
||||
)
|
||||
twilio_account_sid = fields.Char(
|
||||
'Twilio Account SID',
|
||||
'Account SID',
|
||||
config_parameter='mail.twilio_account_sid',
|
||||
)
|
||||
twilio_account_token = fields.Char(
|
||||
'Twilio Account Auth Token',
|
||||
'Account Auth Token',
|
||||
config_parameter='mail.twilio_account_token',
|
||||
)
|
||||
primary_color = fields.Char(related='company_id.primary_color', string="Header Color", readonly=False)
|
||||
secondary_color = fields.Char(related='company_id.secondary_color', string="Button Color", readonly=False)
|
||||
use_sfu_server = fields.Boolean(
|
||||
'Use SFU server',
|
||||
help="If you want to setup SFU server for large group calls.",
|
||||
config_parameter="mail.use_sfu_server",
|
||||
)
|
||||
sfu_server_url = fields.Char("SFU Server URL", config_parameter="mail.sfu_server_url")
|
||||
sfu_server_key = fields.Char("SFU Server key", config_parameter="mail.sfu_server_key", help="Base64 encoded key")
|
||||
email_primary_color = fields.Char(related='company_id.email_primary_color', readonly=False)
|
||||
email_secondary_color = fields.Char(related='company_id.email_secondary_color', readonly=False)
|
||||
|
||||
tenor_api_key = fields.Char(
|
||||
'Tenor API key',
|
||||
config_parameter='discuss.tenor_api_key',
|
||||
help="Add a Tenor GIF API key to enable GIFs support. https://developers.google.com/tenor/guides/quickstart#setup",
|
||||
)
|
||||
google_translate_api_key = fields.Char(
|
||||
"Message Translation API Key",
|
||||
help="A valid Google API key is required to enable message translation. https://cloud.google.com/translate/docs/setup",
|
||||
config_parameter="mail.google_translate_api_key",
|
||||
)
|
||||
|
||||
def _compute_fail_counter(self):
|
||||
previous_date = fields.Datetime.now() - datetime.timedelta(days=30)
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class ResGroups(models.Model):
|
||||
""" Update of res.groups class
|
||||
- if adding users from a group, check mail.channels linked to this user
|
||||
group and subscribe them. This is done by overriding the write method.
|
||||
"""
|
||||
_name = 'res.groups'
|
||||
_inherit = 'res.groups'
|
||||
_description = 'Access Groups'
|
||||
|
||||
def write(self, vals):
|
||||
res = super(ResGroups, self).write(vals)
|
||||
if vals.get('users'):
|
||||
# form: {'group_ids': [(3, 10), (3, 3), (4, 10), (4, 3)]} or {'group_ids': [(6, 0, [ids]}
|
||||
user_ids = [command[1] for command in vals['users'] if command[0] == 4]
|
||||
user_ids += [id for command in vals['users'] if command[0] == 6 for id in command[2]]
|
||||
self.env['mail.channel'].search([('group_ids', 'in', self._ids)])._subscribe_users_automatically()
|
||||
return res
|
||||
|
|
@ -1,28 +1,64 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
|
||||
from odoo import _, api, fields, models, tools
|
||||
from odoo.osv import expression
|
||||
from odoo.fields import Domain
|
||||
from odoo.tools.misc import limited_field_access_token
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
from odoo.exceptions import AccessError
|
||||
|
||||
|
||||
class Partner(models.Model):
|
||||
class ResPartner(models.Model):
|
||||
""" Update partner to add a field about notification preferences. Add a generic opt-out field that can be used
|
||||
to restrict usage of automatic email templates. """
|
||||
_name = "res.partner"
|
||||
_name = 'res.partner'
|
||||
_inherit = ['res.partner', 'mail.activity.mixin', 'mail.thread.blacklist']
|
||||
_mail_flat_thread = False
|
||||
|
||||
# override to add and order tracking
|
||||
name = fields.Char(tracking=1)
|
||||
email = fields.Char(tracking=1)
|
||||
phone = fields.Char(tracking=2)
|
||||
parent_id = fields.Many2one(tracking=3)
|
||||
user_id = fields.Many2one(tracking=4)
|
||||
vat = fields.Char(tracking=5)
|
||||
# channels
|
||||
channel_ids = fields.Many2many('mail.channel', 'mail_channel_member', 'partner_id', 'channel_id', string='Channels', copy=False)
|
||||
# tracked field used for chatter logging purposes
|
||||
# we need this to be readable inline as tracking messages use inline HTML nodes
|
||||
contact_address_inline = fields.Char(compute='_compute_contact_address_inline', string='Inlined Complete Address', tracking=True)
|
||||
# sudo: res.partner - can access presence of accessible partner
|
||||
im_status = fields.Char("IM Status", compute="_compute_im_status", compute_sudo=True)
|
||||
offline_since = fields.Datetime("Offline since", compute="_compute_im_status", compute_sudo=True)
|
||||
|
||||
@api.depends('contact_address')
|
||||
def _compute_contact_address_inline(self):
|
||||
"""Compute an inline-friendly address based on contact_address."""
|
||||
for partner in self:
|
||||
# replace any successive \n with a single comma
|
||||
partner.contact_address_inline = re.sub(r'\n(\s|\n)*', ', ', partner.contact_address).strip().strip(',')
|
||||
|
||||
@api.depends("user_ids.manual_im_status", "user_ids.presence_ids.status")
|
||||
def _compute_im_status(self):
|
||||
super()._compute_im_status()
|
||||
for partner in self:
|
||||
all_status = partner.user_ids.presence_ids.mapped(
|
||||
lambda p: "offline" if p.status == "offline" else p.user_id.manual_im_status or p.status
|
||||
)
|
||||
partner.im_status = (
|
||||
"online"
|
||||
if "online" in all_status
|
||||
else "away"
|
||||
if "away" in all_status
|
||||
else "busy"
|
||||
if "busy" in all_status
|
||||
else "offline"
|
||||
if partner.user_ids
|
||||
else "im_partner"
|
||||
)
|
||||
partner.offline_since = (
|
||||
max(partner.user_ids.presence_ids.mapped("last_poll"), default=None)
|
||||
if partner.im_status == "offline"
|
||||
else None
|
||||
)
|
||||
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id('base.partner_root')
|
||||
odoobot = self.env['res.partner'].browse(odoobot_id)
|
||||
if odoobot in self:
|
||||
|
|
@ -44,25 +80,9 @@ class Partner(models.Model):
|
|||
# MESSAGING
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def _mail_get_partners(self):
|
||||
def _mail_get_partners(self, introspect_fields=False):
|
||||
return dict((partner.id, partner) for partner in self)
|
||||
|
||||
def _message_get_suggested_recipients(self):
|
||||
recipients = super(Partner, self)._message_get_suggested_recipients()
|
||||
for partner in self:
|
||||
partner._message_add_suggested_recipient(recipients, partner=partner, reason=_('Partner Profile'))
|
||||
return recipients
|
||||
|
||||
def _message_get_default_recipients(self):
|
||||
return {
|
||||
r.id:
|
||||
{'partner_ids': [r.id],
|
||||
'email_to': False,
|
||||
'email_cc': False
|
||||
}
|
||||
for r in self
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# ORM
|
||||
# ------------------------------------------------------------
|
||||
|
|
@ -70,145 +90,241 @@ class Partner(models.Model):
|
|||
def _get_view_cache_key(self, view_id=None, view_type='form', **options):
|
||||
"""Add context variable force_email in the key as _get_view depends on it."""
|
||||
key = super()._get_view_cache_key(view_id, view_type, **options)
|
||||
return key + (self._context.get('force_email'),)
|
||||
return key + (self.env.context.get('force_email'),)
|
||||
|
||||
@api.model
|
||||
@api.returns('self', lambda value: value.id)
|
||||
def find_or_create(self, email, assert_valid_email=False):
|
||||
""" Override to use the email_normalized field. """
|
||||
if not email:
|
||||
raise ValueError(_('An email is required for find_or_create to work'))
|
||||
|
||||
parsed_name, parsed_email = self._parse_partner_name(email)
|
||||
if not parsed_email and assert_valid_email:
|
||||
parsed_name, parsed_email_normalized = tools.parse_contact_from_email(email)
|
||||
if not parsed_email_normalized and assert_valid_email:
|
||||
raise ValueError(_('%(email)s is not recognized as a valid email. This is required to create a new customer.'))
|
||||
if parsed_email:
|
||||
email_normalized = tools.email_normalize(parsed_email)
|
||||
if email_normalized:
|
||||
partners = self.search([('email_normalized', '=', email_normalized)], limit=1)
|
||||
if partners:
|
||||
return partners
|
||||
if parsed_email_normalized:
|
||||
partners = self.search([('email_normalized', '=', parsed_email_normalized)], limit=1)
|
||||
if partners:
|
||||
return partners
|
||||
|
||||
# We don't want to call `super()` to avoid searching twice on the email
|
||||
# Especially when the search `email =ilike` cannot be as efficient as
|
||||
# a search on email_normalized with a btree index
|
||||
# If you want to override `find_or_create()` your module should depend on `mail`
|
||||
create_values = {self._rec_name: parsed_name or parsed_email}
|
||||
if parsed_email: # otherwise keep default_email in context
|
||||
create_values['email'] = parsed_email
|
||||
create_values = {self._rec_name: parsed_name or parsed_email_normalized}
|
||||
if parsed_email_normalized: # otherwise keep default_email in context
|
||||
create_values['email'] = parsed_email_normalized
|
||||
return self.create(create_values)
|
||||
|
||||
@api.model
|
||||
def _find_or_create_from_emails(self, emails, ban_emails=None,
|
||||
filter_found=None, additional_values=None,
|
||||
no_create=False, sort_key=None, sort_reverse=True):
|
||||
""" Based on a list of emails, find or (optionally) create partners.
|
||||
If an email is not unique (e.g. multi-email input), only the first found
|
||||
valid email in input is considered. Filter and sort options allow to
|
||||
tweak the way we link emails to partners (e.g. share partners only, ...).
|
||||
|
||||
Optional additional values allow to customize the created partner. Data
|
||||
are given per normalized email as it the creation criterion.
|
||||
|
||||
When an email is invalid but not void, it is used for search or create.
|
||||
It allows updating it afterwards e.g. with notifications resend which
|
||||
allows fixing typos / wrong emails.
|
||||
|
||||
:param list emails: list of emails that can be formatted;
|
||||
:param list ban_emails: optional list of banished emails e.g. because
|
||||
it may interfere with master data like aliases;
|
||||
:param callable filter_found: if given, filters found partners based on emails;
|
||||
:param dict additional_values: additional values per normalized or
|
||||
raw invalid email given to partner creation. Typically used to
|
||||
propagate a company_id and customer information from related record.
|
||||
If email cannot be normalized, raw value is used as dict key instead;
|
||||
:param sort_key: an optional sorting key for sorting partners before
|
||||
finding one with matching email normalized. When several partners
|
||||
have the same email, users might want to give a preference based
|
||||
on e.g. company, being a customer or not, ... Default ordering is
|
||||
to use 'id ASC', which means older partners first as they are considered
|
||||
as more relevant compared to default 'complete_name';
|
||||
:param bool sort_reverse: given to sorted (see 'reverse' argument of sort);
|
||||
:param bool no_create: skip the 'create' part of 'find or create'. Allows
|
||||
to use tool as 'find and sort' without adding new partners in db;
|
||||
|
||||
:return: res.partner records in a list, following order of emails. Using
|
||||
a list allows to to keep Falsy values when no match;
|
||||
:rtype: list
|
||||
"""
|
||||
additional_values = additional_values or {}
|
||||
partners, tocreate_vals_list = self.env['res.partner'], []
|
||||
name_emails = [tools.parse_contact_from_email(email) for email in emails]
|
||||
|
||||
# find valid emails_normalized, filtering out false / void values, and search
|
||||
# for existing partners based on those emails
|
||||
emails_normalized = {email_normalized
|
||||
for _name, email_normalized in name_emails
|
||||
if email_normalized and email_normalized not in (ban_emails or [])}
|
||||
# find partners for invalid (but not void) emails, aka either invalid email
|
||||
# either no email and a name that will be used as email
|
||||
names = {
|
||||
name.strip()
|
||||
for name, email_normalized in name_emails
|
||||
if not email_normalized and name.strip() and name.strip() not in (ban_emails or [])
|
||||
}
|
||||
if emails_normalized or names:
|
||||
domains = []
|
||||
if emails_normalized:
|
||||
domains.append([('email_normalized', 'in', list(emails_normalized))])
|
||||
if names:
|
||||
domains.append([('email', 'in', list(names))])
|
||||
partners += self.search(Domain.OR(domains), order='id ASC')
|
||||
if filter_found:
|
||||
partners = partners.filtered(filter_found)
|
||||
|
||||
if not no_create:
|
||||
# create partners for valid email without any existing partner. Keep
|
||||
# only first found occurrence of each normalized email, aka: ('Norbert',
|
||||
# 'norbert@gmail.com'), ('Norbert With Surname', 'norbert@gmail.com')'
|
||||
# -> a single partner is created for email 'norbert@gmail.com'
|
||||
seen = set()
|
||||
notfound_emails = emails_normalized - set(partners.mapped('email_normalized'))
|
||||
notfound_name_emails = [
|
||||
name_email
|
||||
for name_email in name_emails
|
||||
if name_email[1] in notfound_emails and name_email[1] not in seen
|
||||
and not seen.add(name_email[1])
|
||||
]
|
||||
tocreate_vals_list += [
|
||||
{
|
||||
self._rec_name: name or email_normalized,
|
||||
'email': email_normalized,
|
||||
**additional_values.get(email_normalized, {}),
|
||||
}
|
||||
for name, email_normalized in notfound_name_emails
|
||||
if email_normalized not in (ban_emails or [])
|
||||
]
|
||||
# create partners for invalid emails (aka name and not email_normalized)
|
||||
# without any existing partner
|
||||
tocreate_vals_list += [
|
||||
{
|
||||
self._rec_name: name,
|
||||
'email': name,
|
||||
**additional_values.get(name, {}),
|
||||
}
|
||||
for name in names if name not in partners.mapped('email') and name not in (ban_emails or [])
|
||||
]
|
||||
# create partners once, avoid current user being followers of those
|
||||
if tocreate_vals_list:
|
||||
partners += self.with_context(mail_create_nosubscribe=True).create(tocreate_vals_list)
|
||||
|
||||
# sort partners (already ordered based on search)
|
||||
if sort_key:
|
||||
partners = partners.sorted(key=sort_key, reverse=sort_reverse)
|
||||
|
||||
return [
|
||||
next(
|
||||
(partner for partner in partners
|
||||
if (email_normalized and partner.email_normalized == email_normalized)
|
||||
or (not email_normalized and email and partner.email == email)
|
||||
or (not email_normalized and name and partner.name == name)
|
||||
),
|
||||
self.env['res.partner']
|
||||
)
|
||||
for (name, email_normalized), email in zip(name_emails, emails)
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# DISCUSS
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def mail_partner_format(self, fields=None):
|
||||
partners_format = dict()
|
||||
if not fields:
|
||||
fields = {'id': True, 'name': True, 'email': True, 'active': True, 'im_status': True, 'user': {}}
|
||||
for partner in self:
|
||||
data = {}
|
||||
if 'id' in fields:
|
||||
data['id'] = partner.id
|
||||
if 'name' in fields:
|
||||
data['name'] = partner.name
|
||||
if 'email' in fields:
|
||||
data['email'] = partner.email
|
||||
if 'active' in fields:
|
||||
data['active'] = partner.active
|
||||
if 'im_status' in fields:
|
||||
data['im_status'] = partner.im_status
|
||||
if 'user' in fields:
|
||||
internal_users = partner.user_ids - partner.user_ids.filtered('share')
|
||||
main_user = internal_users[0] if len(internal_users) > 0 else partner.user_ids[0] if len(partner.user_ids) > 0 else self.env['res.users']
|
||||
data['user'] = {
|
||||
"id": main_user.id,
|
||||
"isInternalUser": not main_user.share,
|
||||
} if main_user else [('clear',)]
|
||||
if not self.env.user._is_internal():
|
||||
data.pop('email', None)
|
||||
partners_format[partner] = data
|
||||
return partners_format
|
||||
def _get_im_status_access_token(self):
|
||||
"""Return a scoped access token for the `im_status` field. The token is used in
|
||||
`ir_websocket._prepare_subscribe_data` to grant access to presence channels.
|
||||
|
||||
def _message_fetch_failed(self):
|
||||
"""Returns first 100 messages, sent by the current partner, that have errors, in
|
||||
the format expected by the web client."""
|
||||
self.ensure_one()
|
||||
notifications = self.env['mail.notification'].search([
|
||||
('author_id', '=', self.id),
|
||||
('notification_status', 'in', ('bounce', 'exception')),
|
||||
('mail_message_id.message_type', '!=', 'user_notification'),
|
||||
('mail_message_id.model', '!=', False),
|
||||
('mail_message_id.res_id', '!=', 0),
|
||||
], limit=100)
|
||||
return notifications.mail_message_id._message_notification_format()
|
||||
|
||||
def _get_channels_as_member(self):
|
||||
"""Returns the channels of the partner."""
|
||||
self.ensure_one()
|
||||
channels = self.env['mail.channel']
|
||||
# get the channels and groups
|
||||
channels |= self.env['mail.channel'].search([
|
||||
('channel_type', 'in', ('channel', 'group')),
|
||||
('channel_partner_ids', 'in', [self.id]),
|
||||
])
|
||||
# get the pinned direct messages
|
||||
channels |= self.env['mail.channel'].search([
|
||||
('channel_type', '=', 'chat'),
|
||||
('channel_member_ids', 'in', self.env['mail.channel.member'].sudo()._search([
|
||||
('partner_id', '=', self.id),
|
||||
('is_pinned', '=', True),
|
||||
])),
|
||||
])
|
||||
return channels
|
||||
|
||||
@api.model
|
||||
def search_for_channel_invite(self, search_term, channel_id=None, limit=30):
|
||||
""" Returns partners matching search_term that can be invited to a channel.
|
||||
If the channel_id is specified, only partners that can actually be invited to the channel
|
||||
are returned (not already members, and in accordance to the channel configuration).
|
||||
:rtype: str
|
||||
"""
|
||||
domain = expression.AND([
|
||||
expression.OR([
|
||||
[('name', 'ilike', search_term)],
|
||||
[('email', 'ilike', search_term)],
|
||||
]),
|
||||
[('active', '=', True)],
|
||||
[('type', '!=', 'private')],
|
||||
[('user_ids', '!=', False)],
|
||||
[('user_ids.active', '=', True)],
|
||||
[('user_ids.share', '=', False)],
|
||||
])
|
||||
if channel_id:
|
||||
channel = self.env['mail.channel'].search([('id', '=', int(channel_id))])
|
||||
domain = expression.AND([domain, [('channel_ids', 'not in', channel.id)]])
|
||||
if channel.group_public_id:
|
||||
domain = expression.AND([domain, [('user_ids.groups_id', 'in', channel.group_public_id.id)]])
|
||||
query = self.env['res.partner']._search(domain, order='name, id')
|
||||
query.order = 'LOWER("res_partner"."name"), "res_partner"."id"' # bypass lack of support for case insensitive order in search()
|
||||
query.limit = int(limit)
|
||||
return {
|
||||
'count': self.env['res.partner'].search_count(domain),
|
||||
'partners': list(self.env['res.partner'].browse(query).mail_partner_format().values()),
|
||||
}
|
||||
self.ensure_one()
|
||||
return limited_field_access_token(self, "im_status", scope="mail.presence")
|
||||
|
||||
def _get_mention_token(self):
|
||||
"""Return a scoped limited access token that indicates the current partner
|
||||
can be mentioned in messages.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
self.ensure_one()
|
||||
return limited_field_access_token(self, "id", scope="mail.message_mention")
|
||||
|
||||
def _get_store_mention_fields(self):
|
||||
return [Store.Attr("mention_token", lambda p: p._get_mention_token())]
|
||||
|
||||
def _get_store_avatar_card_fields(self, target):
|
||||
fields = [
|
||||
"im_status",
|
||||
"name",
|
||||
"partner_share",
|
||||
]
|
||||
if target.is_internal(self.env):
|
||||
fields.extend(["email", "phone"])
|
||||
return fields
|
||||
|
||||
def _field_store_repr(self, field_name):
|
||||
if field_name == "avatar_128":
|
||||
return [
|
||||
Store.Attr("avatar_128_access_token", lambda p: p._get_avatar_128_access_token()),
|
||||
"write_date",
|
||||
]
|
||||
if field_name == "im_status":
|
||||
return [
|
||||
"im_status",
|
||||
Store.Attr("im_status_access_token", lambda p: p._get_im_status_access_token()),
|
||||
]
|
||||
return [field_name]
|
||||
|
||||
def _to_store_defaults(self, target: Store.Target):
|
||||
res = [
|
||||
"active",
|
||||
"avatar_128",
|
||||
"im_status",
|
||||
"is_company",
|
||||
# sudo: res.partner - to access portal user of another company in chatter
|
||||
Store.One("main_user_id", ["partner_id", "share"], sudo=True),
|
||||
"name",
|
||||
]
|
||||
if target.is_internal(self.env):
|
||||
res.append("email")
|
||||
return res
|
||||
|
||||
@api.readonly
|
||||
@api.model
|
||||
def get_mention_suggestions(self, search, limit=8, channel_id=None):
|
||||
def get_mention_suggestions(self, search, limit=8):
|
||||
""" Return 'limit'-first partners' such that the name or email matches a 'search' string.
|
||||
Prioritize partners that are also (internal) users, and then extend the research to all partners.
|
||||
If channel_id is given, only members of this channel are returned.
|
||||
The return format is a list of partner data (as per returned by `mail_partner_format()`).
|
||||
The return format is a list of partner data (as per returned by `_to_store()`).
|
||||
"""
|
||||
search_dom = expression.OR([[('name', 'ilike', search)], [('email', 'ilike', search)]])
|
||||
search_dom = expression.AND([[('active', '=', True), ('type', '!=', 'private')], search_dom])
|
||||
if channel_id:
|
||||
search_dom = expression.AND([[('channel_ids', 'in', channel_id)], search_dom])
|
||||
domain_is_user = expression.AND([[('user_ids', '!=', False), ('user_ids.active', '=', True)], search_dom])
|
||||
domain = self._get_mention_suggestions_domain(search)
|
||||
partners = self._search_mention_suggestions(domain, limit)
|
||||
store = Store().add(partners, extra_fields=partners._get_store_mention_fields())
|
||||
try:
|
||||
roles = self.env["res.role"].search([("name", "ilike", search)], limit=8)
|
||||
store.add(roles, "name")
|
||||
except AccessError:
|
||||
pass
|
||||
return store.get_result()
|
||||
|
||||
@api.model
|
||||
def _get_mention_suggestions_domain(self, search):
|
||||
return (Domain('name', 'ilike', search) | Domain('email', 'ilike', search)) & Domain('active', '=', True)
|
||||
|
||||
@api.model
|
||||
def _search_mention_suggestions(self, domain, limit, extra_domain=None):
|
||||
domain = Domain(domain)
|
||||
domain_is_user = Domain('user_ids', '!=', False) & Domain('user_ids.active', '=', True) & domain
|
||||
priority_conditions = [
|
||||
expression.AND([domain_is_user, [('partner_share', '=', False)]]), # Search partners that are internal users
|
||||
domain_is_user & Domain('partner_share', '=', False), # Search partners that are internal users
|
||||
domain_is_user, # Search partners that are users
|
||||
search_dom, # Search partners that are not users
|
||||
domain, # Search partners that are not users
|
||||
]
|
||||
if extra_domain:
|
||||
priority_conditions.append(Domain(extra_domain))
|
||||
partners = self.env['res.partner']
|
||||
for domain in priority_conditions:
|
||||
remaining_limit = limit - len(partners)
|
||||
|
|
@ -217,31 +333,12 @@ class Partner(models.Model):
|
|||
# We are using _search to avoid the default order that is
|
||||
# automatically added by the search method. "Order by" makes the query
|
||||
# really slow.
|
||||
query = self._search(expression.AND([[('id', 'not in', partners.ids)], domain]), limit=remaining_limit)
|
||||
query = self._search(Domain('id', 'not in', partners.ids) & domain, limit=remaining_limit)
|
||||
partners |= self.browse(query)
|
||||
partners_format = partners.mail_partner_format()
|
||||
if channel_id:
|
||||
member_by_partner = {member.partner_id: member for member in self.env['mail.channel.member'].search([('channel_id', '=', channel_id), ('partner_id', 'in', partners.ids)])}
|
||||
for partner in partners:
|
||||
partners_format.get(partner)['persona'] = {
|
||||
'channelMembers': [('insert', member_by_partner.get(partner)._mail_channel_member_format(fields={'id': True, 'channel': {'id'}, 'persona': {'partner': {'id'}}}).get(member_by_partner.get(partner)))],
|
||||
}
|
||||
return list(partners_format.values())
|
||||
return partners
|
||||
|
||||
@api.model
|
||||
def im_search(self, name, limit=20):
|
||||
""" Search partner with a name and return its id, name and im_status.
|
||||
Note : the user must be logged
|
||||
:param name : the partner name to search
|
||||
:param limit : the limit of result to return
|
||||
"""
|
||||
# This method is supposed to be used only in the context of channel creation or
|
||||
# extension via an invite. As both of these actions require the 'create' access
|
||||
# right, we check this specific ACL.
|
||||
users = self.env['res.users'].search([
|
||||
('id', '!=', self.env.user.id),
|
||||
('name', 'ilike', name),
|
||||
('active', '=', True),
|
||||
('share', '=', False),
|
||||
], order='name, id', limit=limit)
|
||||
return list(users.partner_id.mail_partner_format().values())
|
||||
def _get_current_persona(self):
|
||||
if not self.env.user or self.env.user._is_public():
|
||||
return (self.env["res.partner"], self.env["mail.guest"]._get_guest_from_context())
|
||||
return (self.env.user.partner_id, self.env["mail.guest"])
|
||||
|
|
|
|||
17
odoo-bringout-oca-ocb-mail/mail/models/res_role.py
Normal file
17
odoo-bringout-oca-ocb-mail/mail/models/res_role.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
|
||||
|
||||
class ResRole(models.Model):
|
||||
_name = "res.role"
|
||||
_description = (
|
||||
"Represents a role in the system used to categorize users. "
|
||||
"Each role has a unique name and can be associated with multiple users. "
|
||||
"Roles can be mentioned in messages to notify all associated users."
|
||||
)
|
||||
|
||||
name = fields.Char(required=True)
|
||||
user_ids = fields.Many2many("res.users", relation="res_role_res_users_rel", string="Users")
|
||||
|
||||
_unique_name = models.UniqueIndex("(name)", "A role with the same name already exists.")
|
||||
|
|
@ -1,57 +1,152 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from collections import defaultdict
|
||||
import contextlib
|
||||
|
||||
from odoo import _, api, fields, models, modules, tools
|
||||
from odoo.addons.base.models.res_users import is_selection_groups
|
||||
from odoo import _, api, Command, fields, models, modules, tools
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.http import request
|
||||
from odoo.tools import email_normalize, str2bool
|
||||
from odoo.addons.mail.tools.discuss import Store
|
||||
|
||||
|
||||
class Users(models.Model):
|
||||
class ResUsers(models.Model):
|
||||
""" Update of res.users class
|
||||
- add a preference about sending emails about notifications
|
||||
- make a new user follow itself
|
||||
- add a welcome message
|
||||
- add suggestion preference
|
||||
- if adding groups to a user, check mail.channels linked to this user
|
||||
group, and the user. This is done by overriding the write method.
|
||||
"""
|
||||
_name = 'res.users'
|
||||
_inherit = ['res.users']
|
||||
_inherit = 'res.users'
|
||||
|
||||
role_ids = fields.Many2many(
|
||||
"res.role",
|
||||
relation="res_role_res_users_rel",
|
||||
string="User Roles",
|
||||
help="Users are notified whenever one of their roles is @-mentioned in a conversation.",
|
||||
)
|
||||
can_edit_role = fields.Boolean(compute="_compute_can_edit_role")
|
||||
notification_type = fields.Selection([
|
||||
('email', 'Handle by Emails'),
|
||||
('inbox', 'Handle in Odoo')],
|
||||
('email', 'By Emails'),
|
||||
('inbox', 'In Odoo')],
|
||||
'Notification', required=True, default='email',
|
||||
compute='_compute_notification_type', store=True, readonly=False,
|
||||
compute='_compute_notification_type', inverse='_inverse_notification_type', store=True,
|
||||
help="Policy on how to handle Chatter notifications:\n"
|
||||
"- Handle by Emails: notifications are sent to your email address\n"
|
||||
"- Handle in Odoo: notifications appear in your Odoo Inbox")
|
||||
res_users_settings_ids = fields.One2many('res.users.settings', 'user_id')
|
||||
# Provide a target for relateds that is not a x2Many field.
|
||||
res_users_settings_id = fields.Many2one('res.users.settings', string="Settings", compute='_compute_res_users_settings_id', search='_search_res_users_settings_id')
|
||||
"- By Emails: notifications are sent to your email address\n"
|
||||
"- In Odoo: notifications appear in your Odoo Inbox")
|
||||
presence_ids = fields.One2many("mail.presence", "user_id", groups="base.group_system")
|
||||
# OOO management
|
||||
out_of_office_from = fields.Datetime()
|
||||
out_of_office_to = fields.Datetime()
|
||||
out_of_office_message = fields.Html('Vacation Responder')
|
||||
is_out_of_office = fields.Boolean('Out of Office', compute='_compute_is_out_of_office')
|
||||
# sudo: res.users - can access presence of accessible user
|
||||
im_status = fields.Char("IM Status", compute="_compute_im_status", compute_sudo=True)
|
||||
manual_im_status = fields.Selection(
|
||||
[("away", "Away"), ("busy", "Do Not Disturb"), ("offline", "Offline")],
|
||||
string="IM status manually set by the user",
|
||||
)
|
||||
|
||||
_sql_constraints = [(
|
||||
"notification_type",
|
||||
outgoing_mail_server_id = fields.Many2one(
|
||||
"ir.mail_server",
|
||||
"Outgoing Mail Server",
|
||||
compute='_compute_outgoing_mail_server_id',
|
||||
groups='base.group_user',
|
||||
)
|
||||
outgoing_mail_server_type = fields.Selection(
|
||||
[('default', 'Default')],
|
||||
"Outgoing Mail Server Type",
|
||||
compute='_compute_outgoing_mail_server_id',
|
||||
required=True,
|
||||
default='default',
|
||||
groups='base.group_user',
|
||||
)
|
||||
has_external_mail_server = fields.Boolean(compute='_compute_has_external_mail_server')
|
||||
|
||||
def _compute_has_external_mail_server(self):
|
||||
self.has_external_mail_server = self.env['ir.config_parameter'].sudo().get_param(
|
||||
'base_setup.default_external_email_server')
|
||||
|
||||
_notification_type = models.Constraint(
|
||||
"CHECK (notification_type = 'email' OR NOT share)",
|
||||
"Only internal user can receive notifications in Odoo",
|
||||
)]
|
||||
'Only internal user can receive notifications in Odoo',
|
||||
)
|
||||
|
||||
@api.depends('share')
|
||||
@api.depends('share', 'all_group_ids')
|
||||
def _compute_notification_type(self):
|
||||
for user in self:
|
||||
# Only the internal users can receive notifications in Odoo
|
||||
if user.share or not user.notification_type:
|
||||
user.notification_type = 'email'
|
||||
# Because of the `group_ids` in the `api.depends`,
|
||||
# this code will be called for any change of group on a user,
|
||||
# even unrelated to the group_mail_notification_type_inbox or share flag.
|
||||
# e.g. if you add HR > Manager to a user, this method will be called.
|
||||
# It should therefore be written to be as performant as possible, and make the less change/write as possible
|
||||
# when it's not `mail.group_mail_notification_type_inbox` or `share` that are being changed.
|
||||
inbox_group_id = self.env['ir.model.data']._xmlid_to_res_id('mail.group_mail_notification_type_inbox')
|
||||
|
||||
@api.depends('res_users_settings_ids')
|
||||
def _compute_res_users_settings_id(self):
|
||||
for user in self:
|
||||
user.res_users_settings_id = user.res_users_settings_ids and user.res_users_settings_ids[0]
|
||||
self.filtered_domain([
|
||||
('group_ids', 'in', inbox_group_id), ('notification_type', '!=', 'inbox')
|
||||
]).notification_type = 'inbox'
|
||||
self.filtered_domain([
|
||||
('group_ids', 'not in', inbox_group_id), ('notification_type', '=', 'inbox')
|
||||
]).notification_type = 'email'
|
||||
|
||||
@api.model
|
||||
def _search_res_users_settings_id(self, operator, operand):
|
||||
return [('res_users_settings_ids', operator, operand)]
|
||||
# Special case: internal users with inbox notifications converted to portal must be converted to email users
|
||||
new_portal_users = self.filtered_domain([('share', '=', True), ('notification_type', '=', 'inbox')])
|
||||
new_portal_users.notification_type = 'email'
|
||||
new_portal_users.write({"group_ids": [Command.unlink(inbox_group_id)]})
|
||||
|
||||
@api.depends('out_of_office_from', 'out_of_office_to')
|
||||
def _compute_is_out_of_office(self):
|
||||
""" Out-of-office is considered as activated once out_of_office_from is
|
||||
set in the past. "To" is not mandatory, as users could simply deactivate
|
||||
it when coming back if the leave timerange is unknown. """
|
||||
now = self.env.cr.now()
|
||||
todo = self.filtered(lambda u: u.out_of_office_from and u._is_internal())
|
||||
for user in todo:
|
||||
if user.out_of_office_to:
|
||||
user.is_out_of_office = (user.out_of_office_from <= now <= user.out_of_office_to)
|
||||
else:
|
||||
user.is_out_of_office = (user.out_of_office_from <= now)
|
||||
(self - todo).is_out_of_office = False
|
||||
|
||||
@api.depends("manual_im_status", "presence_ids.status")
|
||||
def _compute_im_status(self):
|
||||
for user in self:
|
||||
user.im_status = (
|
||||
"offline"
|
||||
if user.presence_ids.status in ["offline", False]
|
||||
else user.manual_im_status or user.presence_ids.status
|
||||
)
|
||||
|
||||
def _inverse_notification_type(self):
|
||||
inbox_group = self.env.ref('mail.group_mail_notification_type_inbox')
|
||||
inbox_users = self.filtered(lambda user: user.notification_type == 'inbox')
|
||||
inbox_users.write({"group_ids": [Command.link(inbox_group.id)]})
|
||||
(self - inbox_users).write({"group_ids": [Command.unlink(inbox_group.id)]})
|
||||
|
||||
@api.depends_context("uid")
|
||||
def _compute_can_edit_role(self):
|
||||
self.can_edit_role = self.env["res.role"].sudo(False).has_access("write")
|
||||
|
||||
@api.depends("email")
|
||||
def _compute_outgoing_mail_server_id(self):
|
||||
mail_servers = self.env['ir.mail_server'].sudo().search(fields.Domain.AND([
|
||||
[('from_filter', 'ilike', '_@_')],
|
||||
fields.Domain.OR([[
|
||||
('from_filter', '=', user.email_normalized),
|
||||
('smtp_user', '=', user.email),
|
||||
('owner_user_id', '=', user._origin.id),
|
||||
] for user in self]),
|
||||
]))
|
||||
mail_servers = {m.owner_user_id: m for m in mail_servers}
|
||||
for user in self:
|
||||
server = mail_servers.get(user) or self.env['ir.mail_server']
|
||||
user.outgoing_mail_server_id = server.id
|
||||
type_options = self._fields['outgoing_mail_server_type']._selection
|
||||
user.outgoing_mail_server_type = (
|
||||
server.smtp_authentication
|
||||
if server.smtp_authentication in type_options
|
||||
else 'default'
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# CRUD
|
||||
|
|
@ -59,46 +154,69 @@ class Users(models.Model):
|
|||
|
||||
@property
|
||||
def SELF_READABLE_FIELDS(self):
|
||||
return super().SELF_READABLE_FIELDS + ['notification_type']
|
||||
return super().SELF_READABLE_FIELDS + [
|
||||
"can_edit_role",
|
||||
"is_out_of_office",
|
||||
"notification_type",
|
||||
"out_of_office_from",
|
||||
"out_of_office_message",
|
||||
"out_of_office_to",
|
||||
"role_ids",
|
||||
"has_external_mail_server",
|
||||
"outgoing_mail_server_id",
|
||||
"outgoing_mail_server_type",
|
||||
]
|
||||
|
||||
@property
|
||||
def SELF_WRITEABLE_FIELDS(self):
|
||||
return super().SELF_WRITEABLE_FIELDS + ['notification_type']
|
||||
return super().SELF_WRITEABLE_FIELDS + [
|
||||
"notification_type",
|
||||
"out_of_office_from",
|
||||
"out_of_office_message",
|
||||
"out_of_office_to",
|
||||
]
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
|
||||
users = super(Users, self).create(vals_list)
|
||||
users = super().create(vals_list)
|
||||
|
||||
# log a portal status change (manual tracking)
|
||||
log_portal_access = not self._context.get('mail_create_nolog') and not self._context.get('mail_notrack')
|
||||
log_portal_access = not self.env.context.get('mail_create_nolog') and not self.env.context.get('mail_notrack')
|
||||
if log_portal_access:
|
||||
for user in users:
|
||||
if user.has_group('base.group_portal'):
|
||||
if user._is_portal():
|
||||
body = user._get_portal_access_update_body(True)
|
||||
user.partner_id.message_post(
|
||||
body=body,
|
||||
message_type='notification',
|
||||
subtype_xmlid='mail.mt_note'
|
||||
)
|
||||
# Auto-subscribe to channels unless skip explicitly requested
|
||||
if not self.env.context.get('mail_channel_nosubscribe'):
|
||||
self.env['mail.channel'].search([('group_ids', 'in', users.groups_id.ids)])._subscribe_users_automatically()
|
||||
return users
|
||||
|
||||
def write(self, vals):
|
||||
log_portal_access = 'groups_id' in vals and not self._context.get('mail_create_nolog') and not self._context.get('mail_notrack')
|
||||
log_portal_access = 'group_ids' in vals and not self.env.context.get('mail_create_nolog') and not self.env.context.get('mail_notrack')
|
||||
user_portal_access_dict = {
|
||||
user.id: user.has_group('base.group_portal')
|
||||
user.id: user._is_portal()
|
||||
for user in self
|
||||
} if log_portal_access else {}
|
||||
|
||||
write_res = super(Users, self).write(vals)
|
||||
previous_email_by_user = {}
|
||||
if vals.get('email'):
|
||||
previous_email_by_user = {
|
||||
user: user.email
|
||||
for user in self.filtered(lambda user: bool(user.email_normalized))
|
||||
if user.email_normalized != email_normalize(vals['email'])
|
||||
}
|
||||
if 'notification_type' in vals:
|
||||
user_notification_type_modified = self.filtered(lambda user: user.notification_type != vals['notification_type'])
|
||||
|
||||
write_res = super().write(vals)
|
||||
|
||||
# log a portal status change (manual tracking)
|
||||
if log_portal_access:
|
||||
for user in self:
|
||||
user_has_group = user.has_group('base.group_portal')
|
||||
user_has_group = user._is_portal()
|
||||
portal_access_changed = user_has_group != user_portal_access_dict[user.id]
|
||||
if portal_access_changed:
|
||||
body = user._get_portal_access_update_body(user_has_group)
|
||||
|
|
@ -108,35 +226,128 @@ class Users(models.Model):
|
|||
subtype_xmlid='mail.mt_note'
|
||||
)
|
||||
|
||||
if 'active' in vals and not vals['active']:
|
||||
self._unsubscribe_from_non_public_channels()
|
||||
sel_groups = [vals[k] for k in vals if is_selection_groups(k) and vals[k]]
|
||||
if vals.get('groups_id'):
|
||||
# form: {'group_ids': [(3, 10), (3, 3), (4, 10), (4, 3)]} or {'group_ids': [(6, 0, [ids]}
|
||||
user_group_ids = [command[1] for command in vals['groups_id'] if command[0] == 4]
|
||||
user_group_ids += [id for command in vals['groups_id'] if command[0] == 6 for id in command[2]]
|
||||
self.env['mail.channel'].search([('group_ids', 'in', user_group_ids)])._subscribe_users_automatically()
|
||||
elif sel_groups:
|
||||
self.env['mail.channel'].search([('group_ids', 'in', sel_groups)])._subscribe_users_automatically()
|
||||
if 'login' in vals:
|
||||
self._notify_security_setting_update(
|
||||
_("Security Update: Login Changed"),
|
||||
_("Your account login has been updated"),
|
||||
)
|
||||
if 'password' in vals:
|
||||
self._notify_security_setting_update(
|
||||
_("Security Update: Password Changed"),
|
||||
_("Your account password has been updated"),
|
||||
)
|
||||
if 'email' in vals:
|
||||
# when the email is modified, we want notify the previous address (and not the new one)
|
||||
for user, previous_email in previous_email_by_user.items():
|
||||
self._notify_security_setting_update(
|
||||
_("Security Update: Email Changed"),
|
||||
_(
|
||||
"Your account email has been changed from %(old_email)s to %(new_email)s.",
|
||||
old_email=previous_email,
|
||||
new_email=user.email,
|
||||
),
|
||||
mail_values={'email_to': previous_email},
|
||||
suggest_password_reset=False,
|
||||
)
|
||||
if "notification_type" in vals:
|
||||
for user in user_notification_type_modified:
|
||||
Store(bus_channel=user).add(user, "notification_type").bus_send()
|
||||
|
||||
return write_res
|
||||
|
||||
def unlink(self):
|
||||
self._unsubscribe_from_non_public_channels()
|
||||
return super().unlink()
|
||||
def action_archive(self):
|
||||
activities_to_delete = self.env['mail.activity'].sudo().search([('user_id', 'in', self.ids)])
|
||||
activities_to_delete.unlink()
|
||||
return super().action_archive()
|
||||
|
||||
def _unsubscribe_from_non_public_channels(self):
|
||||
""" This method un-subscribes users from group restricted channels. Main purpose
|
||||
of this method is to prevent sending internal communication to archived / deleted users.
|
||||
We do not un-subscribes users from public channels because in most common cases,
|
||||
public channels are mailing list (e-mail based) and so users should always receive
|
||||
updates from public channels until they manually un-subscribe themselves.
|
||||
"""
|
||||
current_cm = self.env['mail.channel.member'].sudo().search([
|
||||
('partner_id', 'in', self.partner_id.ids),
|
||||
])
|
||||
current_cm.filtered(
|
||||
lambda cm: (cm.channel_id.channel_type == 'channel' and cm.channel_id.group_public_id)
|
||||
).unlink()
|
||||
def _notify_security_setting_update(self, subject, content, mail_values=None, **kwargs):
|
||||
""" This method is meant to be called whenever a sensitive update is done on the user's account.
|
||||
It will send an email to the concerned user warning him about this change and making some security suggestions.
|
||||
|
||||
:param str subject: The subject of the sent email (e.g: 'Security Update: Password Changed')
|
||||
:param str content: The text to embed within the email template (e.g: 'Your password has been changed')
|
||||
:param kwargs: 'suggest_password_reset' key:
|
||||
Whether or not to suggest the end-user to reset
|
||||
his password in the email sent.
|
||||
Defaults to True. """
|
||||
|
||||
mail_create_values = []
|
||||
for user in self:
|
||||
body_html = self.env['mail.render.mixin']._render_template(
|
||||
'mail.account_security_alert',
|
||||
model='res.users',
|
||||
res_ids=user.ids,
|
||||
engine='qweb_view',
|
||||
options={'post_process': True},
|
||||
add_context=user._notify_security_setting_update_prepare_values(content, **kwargs),
|
||||
)[user.id]
|
||||
|
||||
body_html = self.env['mail.render.mixin']._render_encapsulate(
|
||||
'mail.mail_notification_light',
|
||||
body_html,
|
||||
add_context={
|
||||
'model_description': _('Account'),
|
||||
},
|
||||
context_record=user,
|
||||
)
|
||||
|
||||
vals = {
|
||||
'auto_delete': True,
|
||||
'body_html': body_html,
|
||||
'author_id': self.env.user.partner_id.id,
|
||||
'email_from': (
|
||||
user.company_id.partner_id.email_formatted or
|
||||
self.env.user.email_formatted or
|
||||
self.env.ref('base.user_root').email_formatted
|
||||
),
|
||||
'email_to': kwargs.get('force_email') or user.email_formatted,
|
||||
'subject': subject,
|
||||
}
|
||||
|
||||
if mail_values:
|
||||
vals.update(mail_values)
|
||||
|
||||
mail_create_values.append(vals)
|
||||
|
||||
mails = self.env['mail.mail'].sudo().create(mail_create_values)
|
||||
with contextlib.suppress(Exception):
|
||||
mails.send()
|
||||
return mails
|
||||
|
||||
def _notify_security_setting_update_prepare_values(self, content, **kwargs):
|
||||
""""Prepare rendering values for the 'mail.account_security_alert' qweb template."""
|
||||
reset_password_enabled = str2bool(self.env['ir.config_parameter'].sudo().get_param("auth_signup.reset_password", True))
|
||||
|
||||
values = {
|
||||
'browser': False,
|
||||
'content': content,
|
||||
'event_datetime': fields.Datetime.now(),
|
||||
'ip_address': False,
|
||||
'location_address': False,
|
||||
'suggest_password_reset': kwargs.get('suggest_password_reset', True) and reset_password_enabled,
|
||||
'user': self,
|
||||
'useros': False,
|
||||
}
|
||||
if not request:
|
||||
return values
|
||||
|
||||
city = request.geoip.get('city') or False
|
||||
region = request.geoip.get('region_name') or False
|
||||
country = request.geoip.get('country') or False
|
||||
if country:
|
||||
if region and city:
|
||||
values['location_address'] = _("Near %(city)s, %(region)s, %(country)s", city=city, region=region, country=country)
|
||||
elif region:
|
||||
values['location_address'] = _("Near %(region)s, %(country)s", region=region, country=country)
|
||||
else:
|
||||
values['location_address'] = _("In %(country)s", country=country)
|
||||
values['ip_address'] = request.httprequest.environ['REMOTE_ADDR']
|
||||
if request.httprequest.user_agent:
|
||||
if request.httprequest.user_agent.browser:
|
||||
values['browser'] = request.httprequest.user_agent.browser.capitalize()
|
||||
if request.httprequest.user_agent.platform:
|
||||
values['useros'] = request.httprequest.user_agent.platform.capitalize()
|
||||
return values
|
||||
|
||||
def _get_portal_access_update_body(self, access_granted):
|
||||
body = _('Portal Access Granted') if access_granted else _('Portal Access Revoked')
|
||||
|
|
@ -162,79 +373,288 @@ class Users(models.Model):
|
|||
else:
|
||||
users_to_blacklist = []
|
||||
|
||||
super(Users, self)._deactivate_portal_user(**post)
|
||||
super()._deactivate_portal_user(**post)
|
||||
|
||||
for user, user_email in users_to_blacklist:
|
||||
blacklist = self.env['mail.blacklist']._add(user_email)
|
||||
blacklist._message_log(
|
||||
body=_('Blocked by deletion of portal account %(portal_user_name)s by %(user_name)s (#%(user_id)s)',
|
||||
user_name=current_user.name, user_id=current_user.id,
|
||||
portal_user_name=user.name),
|
||||
self.env['mail.blacklist']._add(
|
||||
user_email,
|
||||
message=_('Blocked by deletion of portal account %(portal_user_name)s by %(user_name)s (#%(user_id)s)',
|
||||
user_name=current_user.name, user_id=current_user.id,
|
||||
portal_user_name=user.name)
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# DISCUSS
|
||||
# ------------------------------------------------------------
|
||||
|
||||
def _init_messaging(self):
|
||||
@api.model
|
||||
def _init_store_data(self, store: Store):
|
||||
"""Initialize the store of the user."""
|
||||
xmlid_to_res_id = self.env["ir.model.data"]._xmlid_to_res_id
|
||||
# sudo: res.partner - exposing OdooBot data is considered acceptable
|
||||
odoobot = self.env.ref("base.partner_root").sudo()
|
||||
if not self.env.user._is_public():
|
||||
odoobot = odoobot.with_prefetch((odoobot + self.env.user.partner_id).ids)
|
||||
store.add_global_values(
|
||||
action_discuss_id=xmlid_to_res_id("mail.action_discuss"),
|
||||
hasLinkPreviewFeature=self.env["mail.link.preview"]._is_link_preview_enabled(),
|
||||
internalUserGroupId=self.env.ref("base.group_user").id,
|
||||
mt_comment=xmlid_to_res_id("mail.mt_comment"),
|
||||
mt_note=xmlid_to_res_id("mail.mt_note"),
|
||||
odoobot=Store.One(odoobot),
|
||||
)
|
||||
if not self.env.user._is_public():
|
||||
settings = self.env["res.users.settings"]._find_or_create_for_user(self.env.user)
|
||||
store.add_global_values(
|
||||
self_partner=Store.One(
|
||||
self.env.user.partner_id,
|
||||
[
|
||||
"active",
|
||||
"avatar_128",
|
||||
"im_status",
|
||||
Store.One(
|
||||
"main_user_id",
|
||||
[
|
||||
Store.Attr("is_admin", lambda u: u._is_admin()),
|
||||
"notification_type",
|
||||
"partner_id",
|
||||
"share",
|
||||
"signature",
|
||||
],
|
||||
),
|
||||
"name",
|
||||
],
|
||||
),
|
||||
settings=settings._res_users_settings_format(),
|
||||
)
|
||||
if guest := self.env["mail.guest"]._get_guest_from_context():
|
||||
# sudo() => adding current guest data is acceptable
|
||||
store.add_global_values(self_guest=Store.One(guest.sudo(), ["avatar_128", "name"]))
|
||||
|
||||
def _init_messaging(self, store: Store):
|
||||
self.ensure_one()
|
||||
partner_root = self.env.ref('base.partner_root')
|
||||
values = {
|
||||
'channels': self.partner_id._get_channels_as_member().channel_info(),
|
||||
'companyName': self.env.company.name,
|
||||
'currentGuest': False,
|
||||
'current_partner': self.partner_id.mail_partner_format().get(self.partner_id),
|
||||
'current_user_id': self.id,
|
||||
'current_user_settings': self.env['res.users.settings']._find_or_create_for_user(self)._res_users_settings_format(),
|
||||
'hasLinkPreviewFeature': self.env['mail.link.preview']._is_link_preview_enabled(),
|
||||
'internalUserGroupId': self.env.ref('base.group_user').id,
|
||||
'menu_id': self.env['ir.model.data']._xmlid_to_res_id('mail.menu_root_discuss'),
|
||||
'needaction_inbox_counter': self.partner_id._get_needaction_count(),
|
||||
'partner_root': partner_root.sudo().mail_partner_format().get(partner_root),
|
||||
'shortcodes': self.env['mail.shortcode'].sudo().search_read([], ['source', 'substitution']),
|
||||
'starred_counter': self.env['mail.message'].search_count([('starred_partner_ids', 'in', self.partner_id.ids)]),
|
||||
}
|
||||
return values
|
||||
self = self.with_user(self)
|
||||
# sudo: bus.bus: reading non-sensitive last id
|
||||
bus_last_id = self.env["bus.bus"].sudo()._bus_last_id()
|
||||
store.add_global_values(
|
||||
inbox={
|
||||
"counter": self.partner_id._get_needaction_count(),
|
||||
"counter_bus_id": bus_last_id,
|
||||
"id": "inbox",
|
||||
"model": "mail.box",
|
||||
},
|
||||
starred={
|
||||
"counter": self.env["mail.message"].search_count(
|
||||
[("starred_partner_ids", "in", self.partner_id.ids)]
|
||||
),
|
||||
"counter_bus_id": bus_last_id,
|
||||
"id": "starred",
|
||||
"model": "mail.box",
|
||||
},
|
||||
)
|
||||
|
||||
@api.model
|
||||
def systray_get_activities(self):
|
||||
activities = self.env["mail.activity"].search([("user_id", "=", self.env.uid)])
|
||||
activities_by_record_by_model_name = defaultdict(lambda: defaultdict(lambda: self.env["mail.activity"]))
|
||||
def _get_activity_groups(self):
|
||||
search_limit = int(self.env['ir.config_parameter'].sudo().get_param('mail.activity.systray.limit', 1000))
|
||||
activities = self.env["mail.activity"].search(
|
||||
[("user_id", "=", self.env.uid)],
|
||||
order='id desc', limit=search_limit,
|
||||
)
|
||||
|
||||
user_company_ids = self.env.user.company_ids.ids
|
||||
is_all_user_companies_allowed = set(user_company_ids) == set(self.env.context.get('allowed_company_ids') or [])
|
||||
|
||||
activities_model_groups = defaultdict(lambda: self.env["mail.activity"])
|
||||
activities_rec_groups = defaultdict(lambda: defaultdict(lambda: self.env["mail.activity"]))
|
||||
|
||||
for activity in activities:
|
||||
record = self.env[activity.res_model].browse(activity.res_id)
|
||||
activities_by_record_by_model_name[activity.res_model][record] += activity
|
||||
model_ids = list({self.env["ir.model"]._get(name).id for name in activities_by_record_by_model_name.keys()})
|
||||
if activity.res_model:
|
||||
activities_rec_groups[activity.res_model][activity.res_id] += activity
|
||||
else:
|
||||
activities_rec_groups["mail.activity"][activity.id] += activity
|
||||
model_activity_states = {
|
||||
'mail.activity': {'overdue_count': 0, 'today_count': 0, 'planned_count': 0, 'total_count': 0}
|
||||
}
|
||||
for model_name, activities_by_record in activities_rec_groups.items():
|
||||
res_ids = activities_by_record.keys()
|
||||
Model = self.env[model_name]
|
||||
has_model_access_right = Model.has_access('read')
|
||||
# also filters out non existing records (db cascade)
|
||||
existing = Model.browse(res_ids).exists()
|
||||
if has_model_access_right:
|
||||
allowed_records = existing._filtered_access('read')
|
||||
else:
|
||||
allowed_records = Model
|
||||
unallowed_records = Model.browse(res_ids) - allowed_records
|
||||
# We remove from not allowed records, records that the user has access to through others of his companies
|
||||
if has_model_access_right and unallowed_records and not is_all_user_companies_allowed:
|
||||
unallowed_records -= (unallowed_records & existing).with_context(
|
||||
allowed_company_ids=user_company_ids)._filtered_access('read')
|
||||
model_activity_states[model_name] = {'overdue_count': 0, 'today_count': 0, 'planned_count': 0, 'total_count': 0}
|
||||
for record_id, activities in activities_by_record.items():
|
||||
if record_id in unallowed_records.ids:
|
||||
model_key = 'mail.activity'
|
||||
activities_model_groups['mail.activity'] += activities
|
||||
elif record_id in allowed_records.ids:
|
||||
model_key = model_name
|
||||
activities_model_groups[model_name] += activities
|
||||
elif record_id:
|
||||
continue
|
||||
|
||||
if 'overdue' in activities.mapped('state'):
|
||||
model_activity_states[model_key]['overdue_count'] += 1
|
||||
model_activity_states[model_key]['total_count'] += 1
|
||||
elif 'today' in activities.mapped('state'):
|
||||
model_activity_states[model_key]['today_count'] += 1
|
||||
model_activity_states[model_key]['total_count'] += 1
|
||||
else:
|
||||
model_activity_states[model_key]['planned_count'] += 1
|
||||
|
||||
model_ids = [self.env["ir.model"]._get_id(name) for name in activities_model_groups]
|
||||
user_activities = {}
|
||||
for model_name, activities_by_record in activities_by_record_by_model_name.items():
|
||||
domain = [("id", "in", list({r.id for r in activities_by_record.keys()}))]
|
||||
allowed_records = self.env[model_name].search(domain)
|
||||
if not allowed_records:
|
||||
continue
|
||||
module = self.env[model_name]._original_module
|
||||
for model_name, activities in activities_model_groups.items():
|
||||
Model = self.env[model_name]
|
||||
module = Model._original_module
|
||||
icon = module and modules.module.get_module_icon(module)
|
||||
model = self.env["ir.model"]._get(model_name).with_prefetch(model_ids)
|
||||
user_activities[model_name] = {
|
||||
"id": model.id,
|
||||
"name": model.name,
|
||||
"name": model.name if model_name != "mail.activity" else _("Other activities"),
|
||||
"model": model_name,
|
||||
"type": "activity",
|
||||
"icon": icon,
|
||||
"total_count": 0,
|
||||
"today_count": 0,
|
||||
"overdue_count": 0,
|
||||
"planned_count": 0,
|
||||
"actions": [
|
||||
{
|
||||
"icon": "fa-clock-o",
|
||||
"name": "Summary",
|
||||
}
|
||||
],
|
||||
# activity more important than archived status, active_test is too broad
|
||||
"domain": [('active', 'in', [True, False])] if model_name != "mail.activity" and "active" in Model else [],
|
||||
"total_count": model_activity_states[model_name]['total_count'],
|
||||
"today_count": model_activity_states[model_name]['today_count'],
|
||||
"overdue_count": model_activity_states[model_name]['overdue_count'],
|
||||
"planned_count": model_activity_states[model_name]['planned_count'],
|
||||
"view_type": getattr(Model, '_systray_view', 'list'),
|
||||
}
|
||||
for record, activities in activities_by_record.items():
|
||||
if record not in allowed_records:
|
||||
continue
|
||||
for activity in activities:
|
||||
user_activities[model_name]["%s_count" % activity.state] += 1
|
||||
if activity.state in ("today", "overdue"):
|
||||
user_activities[model_name]["total_count"] += 1
|
||||
if model_name == 'mail.activity':
|
||||
user_activities[model_name]['activity_ids'] = activities.ids
|
||||
return list(user_activities.values())
|
||||
|
||||
def _get_store_avatar_card_fields(self, target):
|
||||
return ["share", Store.One("partner_id", self.partner_id._get_store_avatar_card_fields(target))]
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Mail Servers
|
||||
# ------------------------------------------------------------
|
||||
|
||||
@api.autovacuum
|
||||
def _gc_personal_mail_servers(self):
|
||||
"""In case the user change their email, we need to delete the old personal servers."""
|
||||
self.env['ir.mail_server'].with_context(active_test=False) \
|
||||
.search([('owner_user_id', '!=', False)]) \
|
||||
.filtered(lambda s: s.owner_user_id.outgoing_mail_server_id != s or not s.active) \
|
||||
.unlink()
|
||||
|
||||
@api.model
|
||||
def _get_mail_server_values(self, server_type):
|
||||
return {}
|
||||
|
||||
@api.model
|
||||
def action_setup_outgoing_mail_server(self, server_type):
|
||||
"""Configure the outgoing mail servers."""
|
||||
user = self.env.user
|
||||
if not user.has_external_mail_server:
|
||||
raise UserError(_('You are not allowed to create a personal mail server.'))
|
||||
|
||||
if not user._is_internal():
|
||||
raise UserError(_('Only internal users can configure a personal mail server.'))
|
||||
|
||||
existing_mail_server = self.env["ir.mail_server"].sudo() \
|
||||
.with_context(active_test=False).search([("owner_user_id", "=", user.id)])
|
||||
|
||||
if server_type == 'default':
|
||||
# Use the default server
|
||||
if existing_mail_server:
|
||||
existing_mail_server.unlink()
|
||||
|
||||
return {
|
||||
"type": "ir.actions.client",
|
||||
"tag": "display_notification",
|
||||
"params": {
|
||||
"message": _("Switching back to the default server."),
|
||||
"type": "warning",
|
||||
},
|
||||
}
|
||||
|
||||
email = user.email
|
||||
if not email:
|
||||
raise UserError(_("Please set your email before connecting your mail server."))
|
||||
|
||||
normalized_email = tools.email_normalize(email)
|
||||
if (
|
||||
not normalized_email
|
||||
or "@" not in normalized_email
|
||||
# Be sure it's well parsed by `ir.mail_server`
|
||||
or self.env["ir.mail_server"]._parse_from_filter(normalized_email)
|
||||
!= [normalized_email]
|
||||
):
|
||||
raise UserError(_("Wrong email address %s.", email))
|
||||
|
||||
# Check that the user's email is not used by `mail.alias.domain` to avoid leaking the outgoing emails
|
||||
alias_domain = self.env["mail.alias.domain"].sudo().search([])
|
||||
cli_default_from = tools.config.get("email_from")
|
||||
match_from_filter = self.env["ir.mail_server"]._match_from_filter
|
||||
if (
|
||||
any(match_from_filter(e, normalized_email) for e in alias_domain.mapped("default_from_email"))
|
||||
or (cli_default_from and match_from_filter(cli_default_from, normalized_email))
|
||||
):
|
||||
raise UserError(_("Your email address is used by an alias domain, and so you can not create a mail server for it."))
|
||||
|
||||
if (
|
||||
server_type == user.outgoing_mail_server_type
|
||||
and user.outgoing_mail_server_id.from_filter == normalized_email
|
||||
and user.outgoing_mail_server_id.smtp_user == normalized_email
|
||||
):
|
||||
# Re-connect the account
|
||||
return self._get_mail_server_setup_end_action(user.outgoing_mail_server_id)
|
||||
|
||||
if existing_mail_server:
|
||||
existing_mail_server.unlink()
|
||||
|
||||
values = {
|
||||
# Will be un-archived once logged in
|
||||
# Archived personal server will be deleted in GC CRON
|
||||
# to clean pending connection that didn't finish
|
||||
"active": False,
|
||||
"name": _("%s's outgoing email", user.name),
|
||||
"smtp_user": normalized_email,
|
||||
"smtp_pass": False,
|
||||
"from_filter": normalized_email,
|
||||
"smtp_port": 587,
|
||||
"smtp_encryption": "starttls",
|
||||
"owner_user_id": user.id,
|
||||
**self._get_mail_server_values(server_type),
|
||||
}
|
||||
smtp_server = self.env["ir.mail_server"].sudo().create(values)
|
||||
return self._get_mail_server_setup_end_action(smtp_server)
|
||||
|
||||
@api.model
|
||||
def action_test_outgoing_mail_server(self):
|
||||
user = self.env.user
|
||||
if not user.has_external_mail_server:
|
||||
raise UserError(_('You are not allowed to test personal mail servers.'))
|
||||
|
||||
if not user.has_group('base.group_user'):
|
||||
raise UserError(_('Only internal users can configure personal mail servers.'))
|
||||
|
||||
server_sudo = user.outgoing_mail_server_id.sudo()
|
||||
if not server_sudo:
|
||||
raise UserError(_('No mail server configured'))
|
||||
server_sudo.test_smtp_connection()
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'message': _('Connection Test Successful!'),
|
||||
'type': 'success',
|
||||
},
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _get_mail_server_setup_end_action(self, smtp_server):
|
||||
raise NotImplementedError()
|
||||
|
|
|
|||
|
|
@ -1,55 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class ResUsersSettings(models.Model):
|
||||
_name = 'res.users.settings'
|
||||
_description = 'User Settings'
|
||||
_rec_name = 'user_id'
|
||||
_inherit = 'res.users.settings'
|
||||
|
||||
user_id = fields.Many2one('res.users', string="User", required=True, ondelete='cascade', domain=[("res_users_settings_id", "=", False)])
|
||||
is_discuss_sidebar_category_channel_open = fields.Boolean(string="Is discuss sidebar category channel open?", default=True)
|
||||
is_discuss_sidebar_category_chat_open = fields.Boolean(string="Is discuss sidebar category chat open?", default=True)
|
||||
|
||||
# RTC
|
||||
push_to_talk_key = fields.Char(string="Push-To-Talk shortcut", help="String formatted to represent a key with modifiers following this pattern: shift.ctrl.alt.key, e.g: truthy.1.true.b")
|
||||
use_push_to_talk = fields.Boolean(string="Use the push to talk feature", default=False)
|
||||
voice_active_duration = fields.Integer(string="Duration of voice activity in ms", help="How long the audio broadcast will remain active after passing the volume threshold")
|
||||
voice_active_duration = fields.Integer(string="Duration of voice activity in ms", default=200, help="How long the audio broadcast will remain active after passing the volume threshold")
|
||||
volume_settings_ids = fields.One2many('res.users.settings.volumes', 'user_setting_id', string="Volumes of other partners")
|
||||
|
||||
_sql_constraints = [
|
||||
('unique_user_id', 'UNIQUE(user_id)', 'One user should only have one mail user settings.')
|
||||
]
|
||||
# Notifications
|
||||
channel_notifications = fields.Selection(
|
||||
[("all", "All Messages"), ("no_notif", "Nothing")],
|
||||
"Channel Notifications",
|
||||
help="This setting will only be applied to channels. Mentions only if not specified.",
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _find_or_create_for_user(self, user):
|
||||
settings = user.sudo().res_users_settings_ids
|
||||
if not settings:
|
||||
settings = self.sudo().create({'user_id': user.id})
|
||||
return settings
|
||||
|
||||
def _res_users_settings_format(self, fields_to_format=None):
|
||||
self.ensure_one()
|
||||
if not fields_to_format:
|
||||
fields_to_format = [name for name, field in self._fields.items() if name == 'id' or not field.automatic]
|
||||
res = self._read_format(fnames=fields_to_format)[0]
|
||||
if 'user_id' in fields_to_format:
|
||||
res['user_id'] = {'id': self.user_id.id}
|
||||
def _format_settings(self, fields_to_format):
|
||||
res = super()._format_settings(fields_to_format)
|
||||
if 'volume_settings_ids' in fields_to_format:
|
||||
volume_settings = self.volume_settings_ids._discuss_users_settings_volume_format()
|
||||
res['volume_settings_ids'] = [('insert', volume_settings)]
|
||||
res.pop('volume_settings_ids', None)
|
||||
res['volumes'] = [('ADD', volume_settings)]
|
||||
return res
|
||||
|
||||
def set_res_users_settings(self, new_settings):
|
||||
self.ensure_one()
|
||||
changed_settings = {}
|
||||
for setting in new_settings.keys():
|
||||
if setting in self._fields and new_settings[setting] != self[setting]:
|
||||
changed_settings[setting] = new_settings[setting]
|
||||
self.write(changed_settings)
|
||||
self.env['bus.bus']._sendone(self.user_id.partner_id, 'res.users.settings/insert', self._res_users_settings_format([*changed_settings.keys(), 'id']))
|
||||
formatted = super().set_res_users_settings(new_settings)
|
||||
self._bus_send("res.users.settings", formatted)
|
||||
return formatted
|
||||
|
||||
def set_volume_setting(self, partner_id, volume, guest_id=None):
|
||||
"""
|
||||
|
|
@ -72,4 +57,6 @@ class ResUsersSettings(models.Model):
|
|||
'partner_id': partner_id,
|
||||
'guest_id': guest_id,
|
||||
})
|
||||
self.env['bus.bus']._sendone(self.user_id.partner_id, 'res.users.settings.volumes/insert', volume_setting._discuss_users_settings_volume_format())
|
||||
self._bus_send(
|
||||
"res.users.settings.volumes", volume_setting._discuss_users_settings_volume_format()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
||||
class ResUsersSettingsVolumes(models.Model):
|
||||
|
|
@ -14,26 +14,30 @@ class ResUsersSettingsVolumes(models.Model):
|
|||
guest_id = fields.Many2one('res.partner', ondelete='cascade', index=True)
|
||||
volume = fields.Float(default=0.5, help="Ranges between 0.0 and 1.0, scale depends on the browser implementation")
|
||||
|
||||
def init(self):
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS res_users_settings_volumes_partner_unique ON %s (user_setting_id, partner_id) WHERE partner_id IS NOT NULL" % self._table)
|
||||
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS res_users_settings_volumes_guest_unique ON %s (user_setting_id, guest_id) WHERE guest_id IS NOT NULL" % self._table)
|
||||
_partner_unique = models.UniqueIndex("(user_setting_id, partner_id) WHERE partner_id IS NOT NULL")
|
||||
_guest_unique = models.UniqueIndex("(user_setting_id, guest_id) WHERE guest_id IS NOT NULL")
|
||||
_partner_or_guest_exists = models.Constraint(
|
||||
'CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))',
|
||||
'A volume setting must have a partner or a guest.',
|
||||
)
|
||||
|
||||
_sql_constraints = [
|
||||
("partner_or_guest_exists", "CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))", "A volume setting must have a partner or a guest."),
|
||||
]
|
||||
@api.depends('user_setting_id', 'partner_id', 'guest_id')
|
||||
def _compute_display_name(self):
|
||||
for rec in self:
|
||||
rec.display_name = f'{rec.user_setting_id.user_id.name} - {rec.partner_id.name or rec.guest_id.name}'
|
||||
|
||||
def _discuss_users_settings_volume_format(self):
|
||||
return [{
|
||||
'id': volume_setting.id,
|
||||
'volume': volume_setting.volume,
|
||||
'guest_id': {
|
||||
'id': volume_setting.guest_id.id,
|
||||
'name': volume_setting.guest_id.name,
|
||||
} if volume_setting.guest_id else [('clear',)],
|
||||
'partner_id': {
|
||||
'id': volume_setting.partner_id.id,
|
||||
'name': volume_setting.partner_id.name,
|
||||
} if volume_setting.partner_id else [('clear',)],
|
||||
"partner_id": {
|
||||
"id": volume_setting.partner_id.id,
|
||||
"name": volume_setting.partner_id.name,
|
||||
} if volume_setting.partner_id else None,
|
||||
"guest_id": {
|
||||
"id": volume_setting.guest_id.id,
|
||||
"name": volume_setting.guest_id.name,
|
||||
} if volume_setting.guest_id else None,
|
||||
'user_setting_id': {
|
||||
'id': volume_setting.user_setting_id.id,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -7,15 +7,14 @@ from lxml import etree
|
|||
|
||||
from odoo import api, fields, models, tools, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.modules import get_module_resource
|
||||
from odoo.modules.module import get_resource_from_path, get_resource_path
|
||||
from odoo.modules.module import get_resource_from_path
|
||||
from odoo.tools.convert import xml_import
|
||||
from odoo.tools.misc import file_open
|
||||
from odoo.tools.translate import TranslationImporter
|
||||
from odoo.tools.misc import file_path
|
||||
from odoo.tools.translate import TranslationImporter, get_po_paths
|
||||
|
||||
|
||||
class TemplateResetMixin(models.AbstractModel):
|
||||
_name = "template.reset.mixin"
|
||||
_name = 'template.reset.mixin'
|
||||
_description = 'Template Reset Mixin'
|
||||
|
||||
template_fs = fields.Char(
|
||||
|
|
@ -40,10 +39,9 @@ class TemplateResetMixin(models.AbstractModel):
|
|||
def _load_records_write(self, values):
|
||||
# OVERRIDE to make the fields blank that are not present in xml record
|
||||
if self.env.context.get('reset_template'):
|
||||
# We don't want to change anything for magic columns, values present in XML record, and
|
||||
# special fields self.CONCURRENCY_CHECK_FIELD (__last_update) and 'template_fs'
|
||||
# We don't want to change anything for magic columns, values present in XML record, and 'template_fs'
|
||||
fields_in_xml_record = values.keys()
|
||||
fields_not_to_touch = set(models.MAGIC_COLUMNS) | fields_in_xml_record | {self.CONCURRENCY_CHECK_FIELD, 'template_fs'}
|
||||
fields_not_to_touch = set(models.MAGIC_COLUMNS) | fields_in_xml_record | {'template_fs'}
|
||||
fields_to_empty = self._fields.keys() - fields_not_to_touch
|
||||
# For the fields not defined in xml record, if they have default values, we should not
|
||||
# enforce empty values for them and the default values should be kept
|
||||
|
|
@ -63,23 +61,9 @@ class TemplateResetMixin(models.AbstractModel):
|
|||
def _override_translation_term(self, module_name, xml_ids):
|
||||
translation_importer = TranslationImporter(self.env.cr)
|
||||
|
||||
for code, _ in self.env['res.lang'].get_installed():
|
||||
lang_code = tools.get_iso_codes(code)
|
||||
# In case of sub languages (e.g fr_BE), load the base language first, (e.g fr.po) and
|
||||
# then load the main translation file (e.g fr_BE.po)
|
||||
|
||||
# Step 1: reset translation terms with base language file
|
||||
if '_' in lang_code:
|
||||
base_lang_code = lang_code.split('_')[0]
|
||||
base_trans_file = get_module_resource(module_name, 'i18n', base_lang_code + '.po')
|
||||
if base_trans_file:
|
||||
translation_importer.load_file(base_trans_file, code, xmlids=xml_ids)
|
||||
|
||||
# Step 2: reset translation file with main language file (can possibly override the
|
||||
# terms coming from the base language)
|
||||
trans_file = get_module_resource(module_name, 'i18n', lang_code + '.po')
|
||||
if trans_file:
|
||||
translation_importer.load_file(trans_file, code, xmlids=xml_ids)
|
||||
for lang, _ in self.env['res.lang'].get_installed():
|
||||
for po_path in get_po_paths(module_name, lang):
|
||||
translation_importer.load_file(po_path, lang, xmlids=xml_ids)
|
||||
|
||||
translation_importer.save(overwrite=True, force_overwrite=True)
|
||||
|
||||
|
|
@ -94,7 +78,7 @@ class TemplateResetMixin(models.AbstractModel):
|
|||
for template in self.filtered('template_fs'):
|
||||
external_id = template.get_external_id().get(template.id)
|
||||
module, xml_id = external_id.split('.')
|
||||
fullpath = get_resource_path(*template.template_fs.split('/'))
|
||||
fullpath = file_path(template.template_fs)
|
||||
if fullpath:
|
||||
for field_name, field in template._fields.items():
|
||||
if field.translate is True:
|
||||
|
|
@ -104,7 +88,7 @@ class TemplateResetMixin(models.AbstractModel):
|
|||
# We don't have a way to pass context while loading record from a file, so we use this hack
|
||||
# to pass the context key that is needed to reset the fields not available in data file
|
||||
rec.set('context', json.dumps({'reset_template': 'True'}))
|
||||
obj = xml_import(template.env.cr, module, {}, mode='init', xml_filename=fullpath)
|
||||
obj = xml_import(template.env, module, {}, mode='init', xml_filename=fullpath)
|
||||
obj._tag_record(rec)
|
||||
template._override_translation_term(module, [xml_id, external_id])
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,25 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import requests
|
||||
import werkzeug.urls
|
||||
|
||||
from ast import literal_eval
|
||||
|
||||
from odoo import api, release, SUPERUSER_ID
|
||||
import requests
|
||||
|
||||
from odoo import api, fields, release, SUPERUSER_ID
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.models import AbstractModel
|
||||
from odoo.tools.translate import _
|
||||
from odoo.tools import config, misc, ustr
|
||||
from odoo.tools import config
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PublisherWarrantyContract(AbstractModel):
|
||||
_name = "publisher_warranty.contract"
|
||||
class Publisher_WarrantyContract(AbstractModel):
|
||||
_name = 'publisher_warranty.contract'
|
||||
_description = 'Publisher Warranty Contract'
|
||||
|
||||
@api.model
|
||||
|
|
@ -29,16 +26,14 @@ class PublisherWarrantyContract(AbstractModel):
|
|||
|
||||
dbuuid = IrParamSudo.get_param('database.uuid')
|
||||
db_create_date = IrParamSudo.get_param('database.create_date')
|
||||
limit_date = datetime.datetime.now()
|
||||
limit_date = limit_date - datetime.timedelta(15)
|
||||
limit_date_str = limit_date.strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT)
|
||||
limit_date = fields.Datetime.now() - datetime.timedelta(15)
|
||||
nbr_users = Users.search_count([('active', '=', True)])
|
||||
nbr_active_users = Users.search_count([("login_date", ">=", limit_date_str), ('active', '=', True)])
|
||||
nbr_active_users = Users.search_count([("login_date", ">=", limit_date), ('active', '=', True)])
|
||||
nbr_share_users = 0
|
||||
nbr_active_share_users = 0
|
||||
if "share" in Users._fields:
|
||||
nbr_share_users = Users.search_count([("share", "=", True), ('active', '=', True)])
|
||||
nbr_active_share_users = Users.search_count([("share", "=", True), ("login_date", ">=", limit_date_str), ('active', '=', True)])
|
||||
nbr_active_share_users = Users.search_count([("share", "=", True), ("login_date", ">=", limit_date), ('active', '=', True)])
|
||||
user = self.env.user
|
||||
domain = [('application', '=', True), ('state', 'in', ['installed', 'to upgrade', 'to remove'])]
|
||||
apps = self.env['ir.module.module'].sudo().search_read(domain, ['name'])
|
||||
|
|
@ -52,7 +47,7 @@ class PublisherWarrantyContract(AbstractModel):
|
|||
"nbr_active_users": nbr_active_users,
|
||||
"nbr_share_users": nbr_share_users,
|
||||
"nbr_active_share_users": nbr_active_share_users,
|
||||
"dbname": self._cr.dbname,
|
||||
"dbname": self.env.cr.dbname,
|
||||
"db_create_date": db_create_date,
|
||||
"version": release.version,
|
||||
"language": user.lang,
|
||||
|
|
@ -71,7 +66,7 @@ class PublisherWarrantyContract(AbstractModel):
|
|||
Utility method to send a publisher warranty get logs messages.
|
||||
"""
|
||||
msg = self._get_message()
|
||||
arguments = {'arg0': ustr(msg), "action": "update"}
|
||||
arguments = {'arg0': str(msg), "action": "update"}
|
||||
|
||||
url = config.get("publisher_warranty_url")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue