-
Notifications
You must be signed in to change notification settings - Fork 884
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add tests for unique index creation with Hypercore TAM #7491
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1062,3 +1062,115 @@ select * from only_nulls_null; | |
---------+------+--------+----------+-------+------------ | ||
(0 rows) | ||
|
||
-------------------------------------------------- | ||
-- Test unique index creation -- | ||
-------------------------------------------------- | ||
create table uniquetable (time timestamptz not null, value int); | ||
select create_hypertable('uniquetable', 'time', create_default_indexes => false); | ||
create_hypertable | ||
-------------------------- | ||
(7,public,uniquetable,t) | ||
(1 row) | ||
|
||
insert into uniquetable values ('2024-01-01 01:00', 1), ('2024-01-01 02:00', 2); | ||
select format('%I.%I', chunk_schema, chunk_name)::regclass as unique_chunk | ||
from timescaledb_information.chunks | ||
where format('%I.%I', hypertable_schema, hypertable_name)::regclass = 'uniquetable'::regclass | ||
order by unique_chunk asc | ||
limit 1 \gset | ||
alter table uniquetable set (timescaledb.compress_orderby='time'); | ||
WARNING: there was some uncertainty picking the default segment by for the hypertable: You do not have any indexes on columns that can be used for segment_by and thus we are not using segment_by for compression. Please make sure you are not missing any indexes | ||
NOTICE: default segment by for hypertable "uniquetable" is set to "" | ||
-- Create a non-Hypercore TAM compressed chunk | ||
select * from compress_chunk(:'unique_chunk'); | ||
compress_chunk | ||
----------------------------------------- | ||
_timescaledb_internal._hyper_7_19_chunk | ||
(1 row) | ||
|
||
-- Should still be a "heap" chunk | ||
select c.relname, a.amname from pg_class c | ||
inner join pg_am a ON (c.relam = a.oid) | ||
where c.oid = :'unique_chunk'::regclass; | ||
relname | amname | ||
-------------------+-------- | ||
_hyper_7_19_chunk | heap | ||
(1 row) | ||
|
||
insert into uniquetable values ('2024-01-01 01:00', 3); | ||
-- Unique index creation on compressed chunk not supported | ||
\set ON_ERROR_STOP 0 | ||
create unique index time_key on uniquetable (time); | ||
ERROR: operation not supported on hypertables that have compression enabled | ||
\set ON_ERROR_STOP 1 | ||
-- Convert the chunk to using Hypercore TAM | ||
alter table :unique_chunk set access method hypercore; | ||
-- Should now be a chunk using Hypercore TAM | ||
select c.relname, a.amname from pg_class c | ||
inner join pg_am a ON (c.relam = a.oid) | ||
where c.oid = :'unique_chunk'::regclass; | ||
relname | amname | ||
-------------------+----------- | ||
_hyper_7_19_chunk | hypercore | ||
(1 row) | ||
|
||
select _timescaledb_debug.is_compressed_tid(ctid), * from :unique_chunk order by time; | ||
is_compressed_tid | time | value | ||
-------------------+------------------------------+------- | ||
t | Mon Jan 01 01:00:00 2024 PST | 1 | ||
f | Mon Jan 01 01:00:00 2024 PST | 3 | ||
t | Mon Jan 01 02:00:00 2024 PST | 2 | ||
(3 rows) | ||
|
||
-- Unique index creation should work but fail on uniqueness check | ||
\set ON_ERROR_STOP 0 | ||
create unique index time_key on uniquetable (time); | ||
ERROR: could not create unique index "_hyper_7_19_chunk_time_key" | ||
DETAIL: Key ("time")=(Mon Jan 01 01:00:00 2024 PST) is duplicated. | ||
\set ON_ERROR_STOP 1 | ||
-- Recompress to get all values in compressed format | ||
select compress_chunk(:'unique_chunk'); | ||
compress_chunk | ||
----------------------------------------- | ||
_timescaledb_internal._hyper_7_19_chunk | ||
(1 row) | ||
|
||
-- Everything's compressed | ||
select _timescaledb_debug.is_compressed_tid(ctid), * from :unique_chunk order by time; | ||
is_compressed_tid | time | value | ||
-------------------+------------------------------+------- | ||
t | Mon Jan 01 01:00:00 2024 PST | 3 | ||
t | Mon Jan 01 01:00:00 2024 PST | 1 | ||
t | Mon Jan 01 02:00:00 2024 PST | 2 | ||
(3 rows) | ||
|
||
-- Unique index creation should still fail | ||
\set ON_ERROR_STOP 0 | ||
create unique index time_key on uniquetable (time); | ||
ERROR: could not create unique index "_hyper_7_19_chunk_time_key" | ||
DETAIL: Key ("time")=(Mon Jan 01 01:00:00 2024 PST) is duplicated. | ||
\set ON_ERROR_STOP 1 | ||
-- Delete the conflicting value and unique index creation should succeed | ||
delete from uniquetable where value = 3; | ||
select _timescaledb_debug.is_compressed_tid(ctid), * from :unique_chunk order by time; | ||
is_compressed_tid | time | value | ||
-------------------+------------------------------+------- | ||
f | Mon Jan 01 01:00:00 2024 PST | 1 | ||
f | Mon Jan 01 02:00:00 2024 PST | 2 | ||
(2 rows) | ||
|
||
create unique index time_key on uniquetable (time); | ||
explain (costs off) | ||
select * from uniquetable where time = '2024-01-01 01:00'; | ||
QUERY PLAN | ||
----------------------------------------------------------------------------------- | ||
Index Scan using _hyper_7_19_chunk_time_key on _hyper_7_19_chunk | ||
Index Cond: ("time" = 'Mon Jan 01 01:00:00 2024 PST'::timestamp with time zone) | ||
(2 rows) | ||
|
||
select * from uniquetable where time = '2024-01-01 01:00'; | ||
time | value | ||
------------------------------+------- | ||
Mon Jan 01 01:00:00 2024 PST | 1 | ||
(1 row) | ||
|
||
Comment on lines
+1171
to
+1176
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Now that you have a unique index, you should probably have tests that tries to insert rows that violate this constraint and demonstrate that it fails. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Don't we already have those tests? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There's actually some overlap with these tests and old tests. We might want to reconcile. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Since it is already covered, I do not think this is necessary for this pull request. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Although it does not harm to have this check here, I am not sure what potential bug you wanted to catch here.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just to make clear there is a difference and that this error is intact despite supporting unique indexes on Hypercore TAM.