Skip to content

fix: invalid pcv #830

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@ select
'"id": ' || (seq/5) + (seq % 5) || ','
'"timestamp": "' || now() || '",'
'"postings": ['
'{'
'"destination": "sellers:' || (seq % 5) || '",'
'"source": "world",'
'"asset": "SELL",'
'"amount": 1'
'},'
'{'
'"source": "world",'
'"destination": "orders:' || seq || '",'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,25 @@ do $$
drop table if exists moves_view;

create temp table moves_view as
select transactions_id::numeric, public.aggregate_objects(json_build_object(accounts_address, json_build_object(asset, json_build_object('input', (post_commit_volumes).inputs, 'output', (post_commit_volumes).outputs)))::jsonb) as volumes
select transactions_seq, public.aggregate_objects(jsonb_build_object(accounts_address, volumes)) as volumes
from (
SELECT DISTINCT ON (moves.transactions_id, accounts_address, asset) moves.transactions_id, accounts_address, asset,
first_value(post_commit_volumes) OVER (
PARTITION BY moves.transactions_id, accounts_address, asset
select transactions_seq::numeric, accounts_address, public.aggregate_objects(json_build_object(asset, json_build_object('input', (post_commit_volumes).inputs, 'output', (post_commit_volumes).outputs))::jsonb) as volumes
from (
SELECT DISTINCT ON (moves.transactions_seq, accounts_address, asset) moves.transactions_seq, accounts_address, asset,
first_value(post_commit_volumes) OVER (
PARTITION BY moves.transactions_seq, accounts_address, asset
ORDER BY seq DESC
) AS post_commit_volumes
FROM moves
where insertion_date < (
select tstamp from goose_db_version where version_id = 12
)
) moves
group by transactions_id;
) AS post_commit_volumes
FROM moves
where insertion_date < (
select tstamp from goose_db_version where version_id = 12
)
) moves
group by transactions_seq, accounts_address
) data
group by transactions_seq;

create index moves_view_idx on moves_view(transactions_id);
create index moves_view_idx on moves_view(transactions_seq);

if (select count(*) from moves_view) = 0 then
return;
Expand All @@ -32,15 +36,15 @@ do $$

loop
with data as (
select transactions_id, volumes
select transactions_seq, volumes
from moves_view
-- play better than offset/limit
where transactions_id >= _offset and transactions_id < _offset + _batch_size
where transactions_seq >= _offset and transactions_seq < _offset + _batch_size
)
update transactions
set post_commit_volumes = data.volumes
from data
where transactions.id = data.transactions_id;
where transactions.seq = data.transactions_seq;

exit when not found;

Expand All @@ -59,3 +63,4 @@ do $$
not valid;
end
$$;

Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
do $$
declare
expected varchar = '{"fees": {"USD": {"input": 1, "output": 0}}, "world": {"USD": {"input": 0, "output": 100}}, "orders:0": {"USD": {"input": 100, "output": 100}}, "sellers:0": {"USD": {"input": 99, "output": 0}}}';
expected varchar = '{"fees": {"USD": {"input": 1, "output": 0}}, "world": {"USD": {"input": 0, "output": 100}, "SELL": {"input": 0, "output": 1}}, "orders:0": {"USD": {"input": 100, "output": 100}}, "sellers:0": {"USD": {"input": 99, "output": 0}, "SELL": {"input": 1, "output": 0}}}';
begin
set search_path = '{{.Schema}}';
assert (select post_commit_volumes::varchar from transactions where id = 0) = expected,
'post_commit_volumes should be equals to ' || expected || ' but was ' || (select to_jsonb(post_commit_volumes) from transactions where id = 0);
end;
$$

$$
42 changes: 22 additions & 20 deletions internal/storage/bucket/migrations/27-fix-invalid-pcv/up.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,25 @@ do $$
drop table if exists moves_view;

create temp table moves_view as
select transactions_id::numeric, public.aggregate_objects(json_build_object(accounts_address, json_build_object(asset, json_build_object('input', (post_commit_volumes).inputs, 'output', (post_commit_volumes).outputs)))::jsonb) as volumes
select transactions_seq, public.aggregate_objects(jsonb_build_object(accounts_address, volumes)) as volumes
from (
SELECT DISTINCT ON (moves.transactions_id, accounts_address, asset)
moves.transactions_id,
accounts_address,
asset,
first_value(post_commit_volumes) OVER (
PARTITION BY moves.transactions_id, accounts_address, asset
ORDER BY seq DESC
) AS post_commit_volumes
FROM moves
where insertion_date < (
select tstamp from goose_db_version where version_id = 12
)
) moves
group by transactions_id;

create index moves_view_idx on moves_view(transactions_id);
select transactions_seq::numeric, accounts_address, public.aggregate_objects(json_build_object(asset, json_build_object('input', (post_commit_volumes).inputs, 'output', (post_commit_volumes).outputs))::jsonb) as volumes
from (
SELECT DISTINCT ON (moves.transactions_seq, accounts_address, asset) moves.transactions_seq, accounts_address, asset,
first_value(post_commit_volumes) OVER (
PARTITION BY moves.transactions_seq, accounts_address, asset
ORDER BY seq DESC
) AS post_commit_volumes
FROM moves
where insertion_date < (
select tstamp from goose_db_version where version_id = 12
)
) moves
group by transactions_seq, accounts_address
) data
group by transactions_seq;

create index moves_view_idx on moves_view(transactions_seq);

if (select count(*) from moves_view) = 0 then
return;
Expand All @@ -35,15 +36,15 @@ do $$

loop
with data as (
select transactions_id, volumes
select transactions_seq, volumes
from moves_view
-- play better than offset/limit
where transactions_id >= _offset and transactions_id < _offset + _batch_size
where transactions_seq >= _offset and transactions_seq < _offset + _batch_size
)
update transactions
set post_commit_volumes = data.volumes
from data
where transactions.id = data.transactions_id;
where transactions.seq = data.transactions_seq;

exit when not found;

Expand All @@ -57,3 +58,4 @@ do $$
drop table if exists moves_view;
end
$$;

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
name: Fill invalid post_commit_volumes (missing asset)
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
do $$
declare
_offset integer := 0;
_batch_size integer := 1000;
begin
set search_path = '{{ .Schema }}';

drop table if exists moves_view;

create temp table moves_view as
select transactions_seq, public.aggregate_objects(jsonb_build_object(accounts_address, volumes)) as volumes
from (
select transactions_seq::numeric, accounts_address, public.aggregate_objects(json_build_object(asset, json_build_object('input', (post_commit_volumes).inputs, 'output', (post_commit_volumes).outputs))::jsonb) as volumes
from (
SELECT DISTINCT ON (moves.transactions_seq, accounts_address, asset) moves.transactions_seq, accounts_address, asset,
first_value(post_commit_volumes) OVER (
PARTITION BY moves.transactions_seq, accounts_address, asset
ORDER BY seq DESC
) AS post_commit_volumes
FROM moves
where insertion_date < (
select tstamp from goose_db_version where version_id = 12
)
) moves
group by transactions_seq, accounts_address
) data
group by transactions_seq;

create index moves_view_idx on moves_view(transactions_seq);

if (select count(*) from moves_view) = 0 then
return;
end if;

perform pg_notify('migrations-{{ .Schema }}', 'init: ' || (select count(*) from moves_view));

loop
with data as (
select transactions_seq, volumes
from moves_view
-- play better than offset/limit
where transactions_seq >= _offset and transactions_seq < _offset + _batch_size
)
update transactions
set post_commit_volumes = data.volumes
from data
where transactions.seq = data.transactions_seq;

exit when not found;

_offset = _offset + _batch_size;

perform pg_notify('migrations-{{ .Schema }}', 'continue: ' || _batch_size);

commit;
end loop;

drop table if exists moves_view;
end
$$;

Loading