Skip to content

Commit 44c3584

Browse files
committed
[FIX] records: use temp table in replacing refs
For the jsonb company dependent indirect references when replacing record refs, building the query using the original id mapping input can generate a large query based on the number of references being updated, leading to a 'stack depth limit exceeded' error. We can use the mapping saved in the temp table _upgrade_rrr instead.
1 parent ed097dd commit 44c3584

File tree

3 files changed

+119
-25
lines changed

3 files changed

+119
-25
lines changed

src/base/tests/test_util.py

+59-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,12 @@
1919
from odoo.tools.safe_eval import safe_eval
2020

2121
from odoo.addons.base.maintenance.migrations import util
22-
from odoo.addons.base.maintenance.migrations.testing import UnitTestCase, parametrize
22+
from odoo.addons.base.maintenance.migrations.testing import (
23+
UnitTestCase,
24+
add_test_trigger,
25+
drop_test_trigger,
26+
parametrize,
27+
)
2328
from odoo.addons.base.maintenance.migrations.util import snippets
2429
from odoo.addons.base.maintenance.migrations.util.domains import (
2530
FALSE_LEAF,
@@ -1364,6 +1369,59 @@ def test_replace_record_references_batch__uniqueness(self):
13641369
[count] = self.env.cr.fetchone()
13651370
self.assertEqual(count, 1)
13661371

1372+
@unittest.skipUnless(util.version_gte("18.0"), "Only work on Odoo >= 18")
1373+
def test_replace_record_references_batch__company_dependent(self):
1374+
partner_model = self.env["ir.model"].search([("model", "=", "res.partner")])
1375+
self.env["ir.model.fields"].create(
1376+
{
1377+
"name": "x_test_curr",
1378+
"ttype": "many2one",
1379+
"model_id": partner_model.id,
1380+
"relation": "res.currency",
1381+
"company_dependent": True,
1382+
}
1383+
)
1384+
c1 = self.env["res.currency"].create({"name": "RC1", "symbol": "RC1"})
1385+
c2 = self.env["res.currency"].create({"name": "RC2", "symbol": "RC2"})
1386+
c3 = self.env["res.currency"].create({"name": "RC3", "symbol": "RC3"})
1387+
c4 = self.env["res.currency"].create({"name": "RC4", "symbol": "RC4"})
1388+
1389+
p1 = self.env["res.partner"].create({"name": "Captain Jack"})
1390+
p2 = self.env["res.partner"].create({"name": "River Song"})
1391+
p3 = self.env["res.partner"].create({"name": "Donna Noble"})
1392+
1393+
old = {
1394+
p1.id: f'{{"1":{c1.id}, "2":{c2.id}, "3":null}}',
1395+
p2.id: f'{{"1":{c1.id}, "2":{c2.id}, "3":{c3.id}, "4":{c4.id}}}',
1396+
p3.id: f'{{"1":{c4.id}}}',
1397+
}
1398+
for id, value in old.items():
1399+
self.env.cr.execute(
1400+
"""
1401+
UPDATE res_partner
1402+
SET x_test_curr = %s
1403+
WHERE id = %s
1404+
""",
1405+
[value, id],
1406+
)
1407+
mapping = {
1408+
c1.id: c2.id,
1409+
c2.id: c3.id,
1410+
c3.id: c1.id,
1411+
}
1412+
add_test_trigger(self.env.cr, "replace_record_references", "res_partner", "UPDATE", f"new.id = {p3.id}")
1413+
util.replace_record_references_batch(self.env.cr, mapping, "res.currency")
1414+
new = {
1415+
p1.id: {"1": c2.id, "2": c3.id, "3": None},
1416+
p2.id: {"1": c2.id, "2": c3.id, "3": c1.id, "4": c4.id},
1417+
p3.id: {"1": c4.id},
1418+
}
1419+
self.env.cr.execute("SELECT id, x_test_curr FROM res_partner WHERE id IN %s", [(p1.id, p2.id)])
1420+
for id, currencies in self.env.cr.fetchall():
1421+
expected = new[id]
1422+
self.assertEqual(currencies, expected)
1423+
drop_test_trigger(self.env.cr, "replace_record_references", "res_partner")
1424+
13671425
def _prepare_test_delete_unused(self):
13681426
def create_cat():
13691427
name = f"test_{uuid.uuid4().hex}"

src/testing.py

+36
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
import os
55
import re
66

7+
from psycopg2 import sql
8+
79
import odoo
810
from odoo import api, release
911
from odoo.modules.registry import Registry
@@ -280,6 +282,40 @@ def get_previous_major(major, minor):
280282
return major, 0
281283

282284

285+
def add_test_trigger(cr, name, table, when, condition):
286+
drop_test_trigger(cr, name, table)
287+
cr.execute(
288+
"""
289+
CREATE OR REPLACE
290+
FUNCTION provoke_error() RETURNS TRIGGER AS $$
291+
BEGIN
292+
RAISE EXCEPTION USING MESSAGE='Error for injected trigger with record: ' || row_to_json(NEW);
293+
END
294+
$$ LANGUAGE PLPGSQL
295+
"""
296+
)
297+
cr.execute(
298+
util.format_query(
299+
cr,
300+
"""
301+
CREATE TRIGGER {name}
302+
BEFORE {when}
303+
ON {table}
304+
FOR EACH ROW WHEN ({cond}) EXECUTE
305+
FUNCTION provoke_error();
306+
""",
307+
name="error_{}".format(name),
308+
when=sql.SQL(when),
309+
table=table,
310+
cond=sql.SQL(condition),
311+
)
312+
)
313+
314+
315+
def drop_test_trigger(cr, name, table):
316+
cr.execute(util.format_query(cr, "DROP TRIGGER IF EXISTS {} ON {}", "error_{}".format(name), table))
317+
318+
283319
# pylint: disable=inherit-non-class
284320
class UpgradeCase(UpgradeCommon, _create_meta(10, "upgrade_case")):
285321
"""

src/util/records.py

+24-24
Original file line numberDiff line numberDiff line change
@@ -1600,30 +1600,30 @@ def replace_record_references_batch(cr, id_mapping, model_src, model_dst=None, r
16001600
ir.res_id,
16011601
],
16021602
)
1603-
json_path = cr.mogrify(
1604-
"$.* ? ({})".format(" || ".join(["@ == %s"] * len(id_mapping))),
1605-
list(id_mapping),
1606-
).decode()
1607-
1608-
query = cr.mogrify(
1609-
format_query(
1610-
cr,
1611-
"""
1612-
UPDATE {table}
1613-
SET {column} = (
1614-
SELECT jsonb_object_agg(key, COALESCE(((jsonb_object(%s::text[]))->>value)::int, value::int))
1615-
FROM jsonb_each_text({column})
1616-
)
1617-
WHERE {column} IS NOT NULL
1618-
AND {column} @? {json_path}
1619-
""",
1620-
table=ir.table,
1621-
column=ir.res_id,
1622-
json_path=sql.Literal(json_path),
1623-
),
1624-
[list(map(list, id_mapping.items()))],
1625-
).decode()
1626-
explode_execute(cr, query, table=ir.table)
1603+
query = format_query(
1604+
cr,
1605+
"""
1606+
WITH _upg_cd AS (
1607+
SELECT t.id,
1608+
jsonb_object_agg(j.key, COALESCE(r.new, j.value::int)) as value
1609+
FROM {table} t
1610+
JOIN jsonb_each_text(t.{column}) j
1611+
ON true
1612+
LEFT JOIN _upgrade_rrr r
1613+
ON r.old = j.value::integer
1614+
WHERE {{parallel_filter}}
1615+
GROUP BY t.id
1616+
HAVING bool_or(r.new IS NOT NULL)
1617+
)
1618+
UPDATE {table} t
1619+
SET {column} = u.value
1620+
FROM _upg_cd u
1621+
WHERE u.id = t.id
1622+
""",
1623+
table=ir.table,
1624+
column=ir.res_id,
1625+
)
1626+
explode_execute(cr, query, table=ir.table, alias="t")
16271627
# ensure all new ids exist
16281628
cr.execute(
16291629
format_query(

0 commit comments

Comments
 (0)