17.0 vanilla

This commit is contained in:
Ernad Husremovic 2025-10-03 18:05:14 +02:00
parent 2e65bf056a
commit df627a6bba
328 changed files with 578149 additions and 759311 deletions

View file

@ -14,7 +14,6 @@ import re
import threading
import time
import uuid
import warnings
from contextlib import contextmanager
from datetime import datetime, timedelta
from inspect import currentframe
@ -22,12 +21,14 @@ from inspect import currentframe
import psycopg2
import psycopg2.extensions
import psycopg2.extras
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.extensions import ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError
from psycopg2.sql import SQL, Identifier
from psycopg2.sql import Composable
from werkzeug import urls
import odoo
from . import tools
from .tools import SQL
from .tools.func import frame_codeinfo, locked
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@ -49,6 +50,8 @@ re_into = re.compile(r'\binto\s+"?([a-zA-Z_0-9]+)\b', re.IGNORECASE)
sql_counter = 0
MAX_IDLE_TIMEOUT = 60 * 10
class Savepoint:
""" Reifies an active breakpoint, allows :meth:`BaseCursor.savepoint` users
@ -70,10 +73,10 @@ class Savepoint:
"""
def __init__(self, cr):
self.name = str(uuid.uuid1())
self._name = Identifier(self.name)
self._name = SQL.identifier(self.name)
self._cr = cr
self.closed = False
cr.execute(SQL('SAVEPOINT {}').format(self._name))
cr.execute(SQL('SAVEPOINT %s', self._name))
def __enter__(self):
return self
@ -86,12 +89,12 @@ class Savepoint:
self._close(rollback)
def rollback(self):
self._cr.execute(SQL('ROLLBACK TO SAVEPOINT {}').format(self._name))
self._cr.execute(SQL('ROLLBACK TO SAVEPOINT %s', self._name))
def _close(self, rollback):
if rollback:
self.rollback()
self._cr.execute(SQL('RELEASE SAVEPOINT {}').format(self._name))
self._cr.execute(SQL('RELEASE SAVEPOINT %s', self._name))
self.closed = True
@ -244,11 +247,8 @@ class Cursor(BaseCursor):
"""
IN_MAX = 1000 # decent limit on size of IN queries - guideline = Oracle limit
def __init__(self, pool, dbname, dsn, **kwargs):
def __init__(self, pool, dbname, dsn):
super().__init__()
if 'serialized' in kwargs:
warnings.warn("Since 16.0, 'serialized' parameter is not used anymore.", DeprecationWarning, 2)
assert kwargs.keys() <= {'serialized'}
self.sql_from_log = {}
self.sql_into_log = {}
@ -310,10 +310,21 @@ class Cursor(BaseCursor):
def _format(self, query, params=None):
encoding = psycopg2.extensions.encodings[self.connection.encoding]
return self._obj.mogrify(query, params).decode(encoding, 'replace')
return self.mogrify(query, params).decode(encoding, 'replace')
def mogrify(self, query, params=None):
if isinstance(query, SQL):
assert params is None, "Unexpected parameters for SQL query object"
query, params = query.code, query.params
return self._obj.mogrify(query, params)
def execute(self, query, params=None, log_exceptions=True):
global sql_counter
if isinstance(query, SQL):
assert params is None, "Unexpected parameters for SQL query object"
query, params = query.code, query.params
if params and not isinstance(params, (tuple, list, dict)):
# psycopg2's TypeError is not clear if you mess up the params
raise ValueError("SQL query parameters should be a tuple, list or dict; got %r" % (params,))
@ -364,6 +375,17 @@ class Cursor(BaseCursor):
self.sql_from_log[res_from.group(1)][1] += delay
return res
def execute_values(self, query, argslist, template=None, page_size=100, fetch=False):
"""
A proxy for psycopg2.extras.execute_values which can log all queries like execute.
But this method cannot set log_exceptions=False like execute
"""
# Odoo Cursor only proxies all methods of psycopg2 Cursor. This is a patch for problems caused by passing
# self instead of self._obj to the first parameter of psycopg2.extras.execute_values.
if isinstance(query, Composable):
query = query.as_string(self._obj)
return psycopg2.extras.execute_values(self, query, argslist, template=template, page_size=page_size, fetch=fetch)
def split_for_in_conditions(self, ids, size=None):
"""Split a list of identifiers into one or more smaller tuples
safe for IN conditions, after uniquifying them."""
@ -408,8 +430,8 @@ class Cursor(BaseCursor):
@contextmanager
def _enable_table_tracking(self):
old = self._sql_table_tracking
self._sql_table_tracking = True
try:
self._sql_table_tracking = True
yield
finally:
self._sql_table_tracking = old
@ -448,17 +470,6 @@ class Cursor(BaseCursor):
keep_in_pool = self.dbname not in ('template0', 'template1', 'postgres', chosen_template)
self.__pool.give_back(self._cnx, keep_in_pool=keep_in_pool)
def autocommit(self, on):
warnings.warn(
f"Deprecated Methods since 16.0, use {'`_cnx.autocommit = True`' if on else '`_cnx.set_isolation_level`'} instead.",
DeprecationWarning, stacklevel=2
)
if on:
isolation_level = ISOLATION_LEVEL_AUTOCOMMIT
else:
isolation_level = ISOLATION_LEVEL_REPEATABLE_READ if self._serialized else ISOLATION_LEVEL_READ_COMMITTED
self._cnx.set_isolation_level(isolation_level)
def commit(self):
""" Perform an SQL `COMMIT` """
self.flush()
@ -559,9 +570,6 @@ class TestCursor(BaseCursor):
_logger.warning("Found different un-closed cursor when trying to close %s: %s", self, tos)
self._lock.release()
def autocommit(self, on):
warnings.warn("Deprecated method and does nothing since 16.0", DeprecationWarning, 2)
def commit(self):
""" Perform an SQL `COMMIT` """
self._check('commit')
@ -624,7 +632,7 @@ class ConnectionPool(object):
self._lock = threading.Lock()
def __repr__(self):
used = len([1 for c, u in self._connections[:] if u])
used = len([1 for c, u, _ in self._connections[:] if u])
count = len(self._connections)
return "ConnectionPool(used=%d/count=%d/max=%d)" % (used, count, self._maxconn)
@ -634,22 +642,28 @@ class ConnectionPool(object):
@locked
def borrow(self, connection_info):
"""
Borrow a PsycoConnection from the pool. If no connection is available, create a new one
as long as there are still slots available. Perform some garbage-collection in the pool:
idle, dead and leaked connections are removed.
:param dict connection_info: dict of psql connection keywords
:rtype: PsycoConnection
"""
# free dead and leaked connections
for i, (cnx, _) in tools.reverse_enumerate(self._connections):
# free idle, dead and leaked connections
for i, (cnx, used, last_used) in tools.reverse_enumerate(self._connections):
if not used and not cnx.closed and time.time() - last_used > MAX_IDLE_TIMEOUT:
self._debug('Close connection at index %d: %r', i, cnx.dsn)
cnx.close()
if cnx.closed:
self._connections.pop(i)
self._debug('Removing closed connection at index %d: %r', i, cnx.dsn)
continue
if getattr(cnx, 'leaked', False):
delattr(cnx, 'leaked')
self._connections.pop(i)
self._connections.append((cnx, False))
self._connections[i][1] = False
_logger.info('%r: Free leaked connection to %r', self, cnx.dsn)
for i, (cnx, used) in enumerate(self._connections):
for i, (cnx, used, _) in enumerate(self._connections):
if not used and self._dsn_equals(cnx.dsn, connection_info):
try:
cnx.reset()
@ -659,15 +673,14 @@ class ConnectionPool(object):
if not cnx.closed:
cnx.close()
continue
self._connections.pop(i)
self._connections.append((cnx, True))
self._connections[i][1] = True
self._debug('Borrow existing connection to %r at index %d', cnx.dsn, i)
return cnx
if len(self._connections) >= self._maxconn:
# try to remove the oldest connection not used
for i, (cnx, used) in enumerate(self._connections):
for i, (cnx, used, _) in enumerate(self._connections):
if not used:
self._connections.pop(i)
if not cnx.closed:
@ -685,20 +698,22 @@ class ConnectionPool(object):
except psycopg2.Error:
_logger.info('Connection to the database failed')
raise
self._connections.append((result, True))
self._connections.append([result, True, 0])
self._debug('Create new connection backend PID %d', result.get_backend_pid())
return result
@locked
def give_back(self, connection, keep_in_pool=True):
self._debug('Give back connection to %r', connection.dsn)
for i, (cnx, used) in enumerate(self._connections):
for i, (cnx, _, _) in enumerate(self._connections):
if cnx is connection:
self._connections.pop(i)
if keep_in_pool:
self._connections.append((cnx, False))
# Release the connection and record the last time used
self._connections[i][1] = False
self._connections[i][2] = time.time()
self._debug('Put connection to %r in pool', cnx.dsn)
else:
self._connections.pop(i)
self._debug('Forgot connection to %r', cnx.dsn)
cnx.close()
break
@ -709,7 +724,7 @@ class ConnectionPool(object):
def close_all(self, dsn=None):
count = 0
last = None
for i, (cnx, used) in tools.reverse_enumerate(self._connections):
for i, (cnx, _, _) in tools.reverse_enumerate(self._connections):
if dsn is None or self._dsn_equals(cnx.dsn, dsn):
cnx.close()
last = self._connections.pop(i)[0]
@ -747,17 +762,10 @@ class Connection(object):
def dbname(self):
return self.__dbname
def cursor(self, **kwargs):
if 'serialized' in kwargs:
warnings.warn("Since 16.0, 'serialized' parameter is deprecated", DeprecationWarning, 2)
cursor_type = kwargs.pop('serialized', True) and 'serialized ' or ''
_logger.debug('create %scursor to %r', cursor_type, self.dsn)
def cursor(self):
_logger.debug('create cursor to %r', self.dsn)
return Cursor(self.__pool, self.__dbname, self.__dsn)
def serialized_cursor(self, **kwargs):
warnings.warn("Since 16.0, 'serialized_cursor' is deprecated, use `cursor` instead", DeprecationWarning, 2)
return self.cursor(**kwargs)
def __bool__(self):
raise NotImplementedError()
__nonzero__ = __bool__
@ -802,7 +810,10 @@ _Pool = None
def db_connect(to, allow_uri=False):
global _Pool
if _Pool is None:
_Pool = ConnectionPool(int(tools.config['db_maxconn']))
_Pool = ConnectionPool(int(
odoo.evented and tools.config['db_maxconn_gevent']
or tools.config['db_maxconn']
))
db, info = connection_info_for(to)
if not allow_uri and db != to: