mirror of https://github.com/citusdata/citus.git
Merge pull request #3344 from citusdata/fix-extension-already-exists-test
Fix tests when hll/topn installedpull/3307/head
commit
11368451f4
|
@ -10,13 +10,6 @@ WHERE name = 'hll'
|
||||||
\gset
|
\gset
|
||||||
:create_cmd;
|
:create_cmd;
|
||||||
ERROR: extension "hll" already exists
|
ERROR: extension "hll" already exists
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
ERROR: extension "hll" already exists
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
ERROR: extension "hll" already exists
|
|
||||||
\c - - - :master_port
|
|
||||||
SET citus.shard_count TO 4;
|
SET citus.shard_count TO 4;
|
||||||
CREATE TABLE raw_table (day date, user_id int);
|
CREATE TABLE raw_table (day date, user_id int);
|
||||||
CREATE TABLE daily_uniques(day date, unique_users hll);
|
CREATE TABLE daily_uniques(day date, unique_users hll);
|
||||||
|
@ -126,22 +119,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(25 rows)
|
(25 rows)
|
||||||
|
|
||||||
SET hll.force_groupagg to ON;
|
SET hll.force_groupagg to ON;
|
||||||
|
@ -164,22 +157,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(27 rows)
|
(27 rows)
|
||||||
|
|
||||||
-- Test disabling hash_agg with operator on coordinator query
|
-- Test disabling hash_agg with operator on coordinator query
|
||||||
|
@ -201,22 +194,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(25 rows)
|
(25 rows)
|
||||||
|
|
||||||
SET hll.force_groupagg to ON;
|
SET hll.force_groupagg to ON;
|
||||||
|
@ -239,22 +232,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(27 rows)
|
(27 rows)
|
||||||
|
|
||||||
-- Test disabling hash_agg with expression on coordinator query
|
-- Test disabling hash_agg with expression on coordinator query
|
||||||
|
@ -276,22 +269,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(25 rows)
|
(25 rows)
|
||||||
|
|
||||||
SET hll.force_groupagg to ON;
|
SET hll.force_groupagg to ON;
|
||||||
|
@ -314,22 +307,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(27 rows)
|
(27 rows)
|
||||||
|
|
||||||
-- Test disabling hash_agg with having
|
-- Test disabling hash_agg with having
|
||||||
|
@ -351,22 +344,22 @@ GROUP BY(1);
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> HashAggregate
|
-> HashAggregate
|
||||||
Group Key: day
|
Group Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(25 rows)
|
(25 rows)
|
||||||
|
|
||||||
SET hll.force_groupagg to ON;
|
SET hll.force_groupagg to ON;
|
||||||
|
@ -394,7 +387,7 @@ HAVING hll_cardinality(hll_union_agg(unique_users)) > 1;
|
||||||
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
||||||
-> Sort
|
-> Sort
|
||||||
Sort Key: day
|
Sort Key: day
|
||||||
-> Seq Scan on daily_uniques_360289 daily_uniques
|
-> Seq Scan on daily_uniques_360615 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> GroupAggregate
|
-> GroupAggregate
|
||||||
|
@ -402,7 +395,7 @@ HAVING hll_cardinality(hll_union_agg(unique_users)) > 1;
|
||||||
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
||||||
-> Sort
|
-> Sort
|
||||||
Sort Key: day
|
Sort Key: day
|
||||||
-> Seq Scan on daily_uniques_360290 daily_uniques
|
-> Seq Scan on daily_uniques_360616 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57637 dbname=regression
|
Node: host=localhost port=57637 dbname=regression
|
||||||
-> GroupAggregate
|
-> GroupAggregate
|
||||||
|
@ -410,7 +403,7 @@ HAVING hll_cardinality(hll_union_agg(unique_users)) > 1;
|
||||||
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
||||||
-> Sort
|
-> Sort
|
||||||
Sort Key: day
|
Sort Key: day
|
||||||
-> Seq Scan on daily_uniques_360291 daily_uniques
|
-> Seq Scan on daily_uniques_360617 daily_uniques
|
||||||
-> Task
|
-> Task
|
||||||
Node: host=localhost port=57638 dbname=regression
|
Node: host=localhost port=57638 dbname=regression
|
||||||
-> GroupAggregate
|
-> GroupAggregate
|
||||||
|
@ -418,7 +411,7 @@ HAVING hll_cardinality(hll_union_agg(unique_users)) > 1;
|
||||||
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
Filter: (hll_cardinality(hll_union_agg(unique_users)) > '1'::double precision)
|
||||||
-> Sort
|
-> Sort
|
||||||
Sort Key: day
|
Sort Key: day
|
||||||
-> Seq Scan on daily_uniques_360292 daily_uniques
|
-> Seq Scan on daily_uniques_360618 daily_uniques
|
||||||
(40 rows)
|
(40 rows)
|
||||||
|
|
||||||
DROP TABLE raw_table;
|
DROP TABLE raw_table;
|
||||||
|
@ -432,11 +425,6 @@ AS create_topn FROM pg_available_extensions()
|
||||||
WHERE name = 'topn'
|
WHERE name = 'topn'
|
||||||
\gset
|
\gset
|
||||||
:create_topn;
|
:create_topn;
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_topn;
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_topn;
|
|
||||||
\c - - - :master_port
|
|
||||||
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
||||||
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
||||||
SELECT create_distributed_table('customer_reviews', 'user_id');
|
SELECT create_distributed_table('customer_reviews', 'user_id');
|
||||||
|
|
|
@ -14,21 +14,6 @@ WHERE name = 'hll'
|
||||||
f
|
f
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
hll_present
|
|
||||||
-------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
hll_present
|
|
||||||
-------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
SET citus.shard_count TO 4;
|
SET citus.shard_count TO 4;
|
||||||
CREATE TABLE raw_table (day date, user_id int);
|
CREATE TABLE raw_table (day date, user_id int);
|
||||||
CREATE TABLE daily_uniques(day date, unique_users hll);
|
CREATE TABLE daily_uniques(day date, unique_users hll);
|
||||||
|
@ -210,21 +195,6 @@ WHERE name = 'topn'
|
||||||
f
|
f
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_topn;
|
|
||||||
topn_present
|
|
||||||
--------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_topn;
|
|
||||||
topn_present
|
|
||||||
--------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
||||||
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
||||||
SELECT create_distributed_table('customer_reviews', 'user_id');
|
SELECT create_distributed_table('customer_reviews', 'user_id');
|
||||||
|
|
|
@ -10,13 +10,6 @@ WHERE name = 'hll'
|
||||||
\gset
|
\gset
|
||||||
:create_cmd;
|
:create_cmd;
|
||||||
ERROR: extension "hll" already exists
|
ERROR: extension "hll" already exists
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
ERROR: extension "hll" already exists
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
ERROR: extension "hll" already exists
|
|
||||||
\c - - - :master_port
|
|
||||||
SET citus.shard_count TO 4;
|
SET citus.shard_count TO 4;
|
||||||
CREATE TABLE raw_table (day date, user_id int);
|
CREATE TABLE raw_table (day date, user_id int);
|
||||||
CREATE TABLE daily_uniques(day date, unique_users hll);
|
CREATE TABLE daily_uniques(day date, unique_users hll);
|
||||||
|
@ -432,11 +425,6 @@ AS create_topn FROM pg_available_extensions()
|
||||||
WHERE name = 'topn'
|
WHERE name = 'topn'
|
||||||
\gset
|
\gset
|
||||||
:create_topn;
|
:create_topn;
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_topn;
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_topn;
|
|
||||||
\c - - - :master_port
|
|
||||||
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
||||||
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
||||||
SELECT create_distributed_table('customer_reviews', 'user_id');
|
SELECT create_distributed_table('customer_reviews', 'user_id');
|
||||||
|
|
|
@ -9,11 +9,6 @@ AS create_cmd FROM pg_available_extensions()
|
||||||
WHERE name = 'hll'
|
WHERE name = 'hll'
|
||||||
\gset
|
\gset
|
||||||
:create_cmd;
|
:create_cmd;
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
\c - - - :master_port
|
|
||||||
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
||||||
SELECT count(distinct l_orderkey) FROM lineitem;
|
SELECT count(distinct l_orderkey) FROM lineitem;
|
||||||
count
|
count
|
||||||
|
|
|
@ -14,21 +14,6 @@ WHERE name = 'hll'
|
||||||
f
|
f
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
hll_present
|
|
||||||
-------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
hll_present
|
|
||||||
-------------
|
|
||||||
f
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
||||||
SELECT count(distinct l_orderkey) FROM lineitem;
|
SELECT count(distinct l_orderkey) FROM lineitem;
|
||||||
count
|
count
|
||||||
|
|
|
@ -11,14 +11,6 @@ WHERE name = 'hll'
|
||||||
|
|
||||||
:create_cmd;
|
:create_cmd;
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
|
|
||||||
SET citus.shard_count TO 4;
|
SET citus.shard_count TO 4;
|
||||||
|
|
||||||
CREATE TABLE raw_table (day date, user_id int);
|
CREATE TABLE raw_table (day date, user_id int);
|
||||||
|
@ -160,13 +152,6 @@ WHERE name = 'topn'
|
||||||
|
|
||||||
:create_topn;
|
:create_topn;
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_topn;
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_topn;
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
CREATE TABLE customer_reviews (day date, user_id int, review int);
|
||||||
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
CREATE TABLE popular_reviewer(day date, reviewers jsonb);
|
||||||
|
|
||||||
|
|
|
@ -13,14 +13,6 @@ WHERE name = 'hll'
|
||||||
|
|
||||||
:create_cmd;
|
:create_cmd;
|
||||||
|
|
||||||
\c - - - :worker_1_port
|
|
||||||
:create_cmd;
|
|
||||||
|
|
||||||
\c - - - :worker_2_port
|
|
||||||
:create_cmd;
|
|
||||||
|
|
||||||
\c - - - :master_port
|
|
||||||
|
|
||||||
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
-- Try to execute count(distinct) when approximate distincts aren't enabled
|
||||||
|
|
||||||
SELECT count(distinct l_orderkey) FROM lineitem;
|
SELECT count(distinct l_orderkey) FROM lineitem;
|
||||||
|
|
Loading…
Reference in New Issue