19.0 vanilla

This commit is contained in:
Ernad Husremovic 2026-03-09 09:32:12 +01:00
parent 79f83631d5
commit 73afc09215
6267 changed files with 1534193 additions and 1130106 deletions

View file

@ -1,9 +1,11 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import itertools
import time
from odoo import api, fields, models, _
from odoo import api, fields, models
from odoo.tools.sql import SQL
class ProductProduct(models.Model):
@ -46,51 +48,81 @@ class ProductProduct(models.Model):
expected_margin_rate = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Margin (%)',
help="Expected margin * 100 / Expected Sale")
_SPECIAL_SUM_AGGREGATES = {f"{name}:sum" for name in (
'turnover', 'sale_avg_price', 'sale_num_invoiced', 'purchase_num_invoiced',
'sales_gap', 'purchase_gap', 'total_cost', 'sale_expected', 'normal_cost',
'total_margin', 'expected_margin', 'total_margin_rate', 'expected_margin_rate',
)}
def _read_group_select(self, aggregate_spec, query):
# the purpose of this override is to flag the aggregates above as such:
# field._description_aggregator() should simply not fail
if aggregate_spec in self._SPECIAL_SUM_AGGREGATES:
return SQL("NULL")
return super()._read_group_select(aggregate_spec, query)
@api.model
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
def _read_group(self, domain, groupby=(), aggregates=(), having=(), offset=0, limit=None, order=None):
"""
Inherit read_group to calculate the sum of the non-stored fields, as it is not automatically done anymore through the XML.
Inherit _read_group to calculate the sum of the non-stored fields, as it is not automatically done anymore through the XML.
"""
fields_list = ['turnover', 'sale_avg_price', 'sale_purchase_price', 'sale_num_invoiced', 'purchase_num_invoiced',
'sales_gap', 'purchase_gap', 'total_cost', 'sale_expected', 'normal_cost', 'total_margin',
'expected_margin', 'total_margin_rate', 'expected_margin_rate']
if self._SPECIAL_SUM_AGGREGATES.isdisjoint(aggregates):
return super()._read_group(domain, groupby, aggregates, having, offset, limit, order)
# Not any of the fields_list support aggregate function like :sum
def truncate_aggr(field):
field_no_aggr, _sep, agg = field.partition(':')
if field_no_aggr in fields_list:
if agg and agg != 'sum':
raise NotImplementedError(_('Aggregate functions other than \':sum\' are not allowed.'))
return field_no_aggr
return field
fields = {truncate_aggr(field) for field in fields}
base_aggregates = [*(agg for agg in aggregates if agg not in self._SPECIAL_SUM_AGGREGATES), 'id:recordset']
base_result = super()._read_group(domain, groupby, base_aggregates, having, offset, limit, order)
res = super(ProductProduct, self).read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy)
if any(x in fields for x in fields_list):
# Calculate first for every product in which line it needs to be applied
re_ind = 0
prod_re = {}
tot_products = self.browse([])
for re in res:
if re.get('__domain'):
products = self.search(re['__domain'])
tot_products |= products
for prod in products:
prod_re[prod.id] = re_ind
re_ind += 1
res_val = tot_products._compute_product_margin_fields_values(field_names=[x for x in fields if fields in fields_list])
for key in res_val:
for l in res_val[key]:
re = res[prod_re[key]]
if re.get(l):
re[l] += res_val[key][l]
else:
re[l] = res_val[key][l]
return res
# Force the compute of all records to bypass the limit compute batching (PREFETCH_MAX)
all_records = self.browse().union(*(item[-1] for item in base_result))
# This line will compute all fields having _compute_product_margin_fields_values
# as compute method.
self._fields['turnover'].compute_value(all_records)
# base_result = [(a1, b1, records), (a2, b2, records), ...]
result = []
for *other, records in base_result:
for index, spec in enumerate(itertools.chain(groupby, aggregates)):
if spec in self._SPECIAL_SUM_AGGREGATES:
field_name = spec.split(':')[0]
other.insert(index, sum(records.mapped(field_name)))
result.append(tuple(other))
return result
def _read_grouping_sets(self, domain, grouping_sets, aggregates=(), order=None):
if self._SPECIAL_SUM_AGGREGATES.isdisjoint(aggregates):
return super()._read_grouping_sets(domain, grouping_sets, aggregates, order)
base_aggregates = [*(agg for agg in aggregates if agg not in self._SPECIAL_SUM_AGGREGATES), 'id:recordset']
base_result = super()._read_grouping_sets(domain, grouping_sets, base_aggregates, order)
# Force the compute of all records to bypass the limit compute batching (PREFETCH_MAX)
all_records = self.concat(*(item[-1] for row in base_result for item in row))
# This line will compute all fields having _compute_product_margin_fields_values
# as compute method.
all_records._compute_product_margin_fields_values()
# base_result = [[(a1, b1, records), (a2, b2, records), ...], [(a1, b1, c1, records), (a2, b2, c2, records), ...] ...]
result = []
for grouping_spec, grouping in zip(grouping_sets, base_result):
row = []
for *other, records in grouping:
for index, spec in enumerate(itertools.chain(grouping_spec, aggregates)):
if spec in self._SPECIAL_SUM_AGGREGATES:
field_name = spec.split(':')[0]
other.insert(index, sum(records.mapped(field_name)))
row.append(tuple(other))
result.append(row)
return result
def _compute_product_margin_fields_values(self):
if not self.ids:
for field_name, field in self._fields.items():
if field.compute == '_compute_product_margin_fields_values':
self[field_name] = False
return
def _compute_product_margin_fields_values(self, field_names=None):
if field_names is None:
field_names = []
date_from = self.env.context.get('date_from', time.strftime('%Y-01-01'))
date_to = self.env.context.get('date_to', time.strftime('%Y-12-31'))
invoice_state = self.env.context.get('invoice_state', 'open_paid')
@ -128,7 +160,7 @@ class ProductProduct(models.Model):
l.quantity * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END) * ((100 - l.discount) * 0.01)
) / NULLIF(SUM(l.quantity * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)), 0) AS avg_unit_price,
SUM(l.quantity * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS num_qty,
SUM(ABS(l.balance) * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS total,
SUM(CASE WHEN i.move_type = 'out_invoice' THEN -l.balance WHEN i.move_type = 'in_invoice' THEN l.balance ELSE -ABS(l.balance) END) AS total,
SUM(l.quantity * pt.list_price * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS sale_expected
FROM account_move_line l
LEFT JOIN account_move i ON (l.move_id = i.id)
@ -165,7 +197,7 @@ class ProductProduct(models.Model):
ctx['force_company'] = company_id
invoice_types = ('in_invoice', 'in_refund')
self.env.cr.execute(sqlstr, (tuple(self.ids), states, payment_states, invoice_types, date_from, date_to, company_id))
for product_id, avg, qty, total, dummy in self.env.cr.fetchall():
for product_id, avg, qty, total, _dummy in self.env.cr.fetchall():
res[product_id]['purchase_avg_price'] = avg and avg or 0.0
res[product_id]['purchase_num_invoiced'] = qty and qty or 0.0
res[product_id]['total_cost'] = total and total or 0.0