From 7d6c401dd314145bac303663921a629bd7132afd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96nder=20Kalac=C4=B1?= Date: Fri, 6 Oct 2023 13:37:37 +0300 Subject: [PATCH 01/14] Update technical readme (#7248) Fix a wrong query, reported by @naisila --- src/backend/distributed/README.md | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/backend/distributed/README.md b/src/backend/distributed/README.md index f64263ed2..0a3164e0f 100644 --- a/src/backend/distributed/README.md +++ b/src/backend/distributed/README.md @@ -447,14 +447,8 @@ WHERE orders_table.user_id = distinct_user_id; #### Subquery in Target List ```sql --- Sum of max price per product category, filtered by a subquery in the target list -SELECT - (SELECT MAX(price) FROM products_table p WHERE p.category = o.category), - COUNT(DISTINCT o.product_id) -FROM orders_table o, users_table u -WHERE o.user_id = u.user_id AND u.user_id IN -(SELECT user_id FROM special_users_table) -GROUP BY o.category; +-- retrieves the most recent order date for each user +SELECT (SELECT MAX(order_date) FROM orders_table o WHERE o.user_id = u.user_id) FROM users_table u; ``` #### Subquery in WHERE Clause From 858d99be33e0057e04dc74f687f145dad89d6b59 Mon Sep 17 00:00:00 2001 From: Onur Tirtir Date: Mon, 9 Oct 2023 13:13:08 +0300 Subject: [PATCH 02/14] Take improvement_threshold into the account in citus_add_rebalance_strategy() (#7247) DESCRIPTION: Makes sure to take improvement_threshold into the account in `citus_add_rebalance_strategy()`. Fixes https://github.com/citusdata/citus/issues/7188. --- .../distributed/sql/citus--12.1-1--12.2-1.sql | 2 ++ .../sql/downgrades/citus--12.2-1--12.1-1.sql | 3 +- .../citus_add_rebalance_strategy/12.2-1.sql | 32 +++++++++++++++++++ .../citus_add_rebalance_strategy/latest.sql | 6 ++-- .../regress/expected/shard_rebalancer.out | 21 ++++++++++++ .../upgrade_rebalance_strategy_after.out | 2 +- src/test/regress/sql/shard_rebalancer.sql | 14 ++++++++ 7 files changed, 76 insertions(+), 4 deletions(-) create mode 100644 src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/12.2-1.sql diff --git a/src/backend/distributed/sql/citus--12.1-1--12.2-1.sql b/src/backend/distributed/sql/citus--12.1-1--12.2-1.sql index bb9d22969..ec4cc7134 100644 --- a/src/backend/distributed/sql/citus--12.1-1--12.2-1.sql +++ b/src/backend/distributed/sql/citus--12.1-1--12.2-1.sql @@ -1,3 +1,5 @@ -- citus--12.1-1--12.2-1 -- bump version to 12.2-1 + +#include "udfs/citus_add_rebalance_strategy/12.2-1.sql" diff --git a/src/backend/distributed/sql/downgrades/citus--12.2-1--12.1-1.sql b/src/backend/distributed/sql/downgrades/citus--12.2-1--12.1-1.sql index b26fc16bc..93d121a12 100644 --- a/src/backend/distributed/sql/downgrades/citus--12.2-1--12.1-1.sql +++ b/src/backend/distributed/sql/downgrades/citus--12.2-1--12.1-1.sql @@ -1,2 +1,3 @@ -- citus--12.2-1--12.1-1 --- this is an empty downgrade path since citus--12.2-1--12.1-1.sql is empty for now + +#include "../udfs/citus_add_rebalance_strategy/10.1-1.sql" diff --git a/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/12.2-1.sql b/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/12.2-1.sql new file mode 100644 index 000000000..c4f157c2e --- /dev/null +++ b/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/12.2-1.sql @@ -0,0 +1,32 @@ +DROP FUNCTION pg_catalog.citus_add_rebalance_strategy; +CREATE OR REPLACE FUNCTION pg_catalog.citus_add_rebalance_strategy( + name name, + shard_cost_function regproc, + node_capacity_function regproc, + shard_allowed_on_node_function regproc, + default_threshold float4, + minimum_threshold float4 DEFAULT 0, + improvement_threshold float4 DEFAULT 0 +) + RETURNS VOID AS $$ + INSERT INTO + pg_catalog.pg_dist_rebalance_strategy( + name, + shard_cost_function, + node_capacity_function, + shard_allowed_on_node_function, + default_threshold, + minimum_threshold, + improvement_threshold + ) VALUES ( + name, + shard_cost_function, + node_capacity_function, + shard_allowed_on_node_function, + default_threshold, + minimum_threshold, + improvement_threshold + ); + $$ LANGUAGE sql; +COMMENT ON FUNCTION pg_catalog.citus_add_rebalance_strategy(name,regproc,regproc,regproc,float4, float4, float4) + IS 'adds a new rebalance strategy which can be used when rebalancing shards or draining nodes'; diff --git a/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/latest.sql b/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/latest.sql index 4c5f8ba79..c4f157c2e 100644 --- a/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/latest.sql +++ b/src/backend/distributed/sql/udfs/citus_add_rebalance_strategy/latest.sql @@ -16,14 +16,16 @@ CREATE OR REPLACE FUNCTION pg_catalog.citus_add_rebalance_strategy( node_capacity_function, shard_allowed_on_node_function, default_threshold, - minimum_threshold + minimum_threshold, + improvement_threshold ) VALUES ( name, shard_cost_function, node_capacity_function, shard_allowed_on_node_function, default_threshold, - minimum_threshold + minimum_threshold, + improvement_threshold ); $$ LANGUAGE sql; COMMENT ON FUNCTION pg_catalog.citus_add_rebalance_strategy(name,regproc,regproc,regproc,float4, float4, float4) diff --git a/src/test/regress/expected/shard_rebalancer.out b/src/test/regress/expected/shard_rebalancer.out index 7997b5e28..f5b76c14c 100644 --- a/src/test/regress/expected/shard_rebalancer.out +++ b/src/test/regress/expected/shard_rebalancer.out @@ -2184,6 +2184,27 @@ SELECT citus_add_rebalance_strategy( 0.1 ); ERROR: default_threshold cannot be smaller than minimum_threshold +SELECT citus_add_rebalance_strategy( + 'test_improvement_threshold', + 'citus_shard_cost_1', + 'capacity_high_worker_2', + 'citus_shard_allowed_on_node_true', + 0.2, + 0.1, + 0.3 + ); + citus_add_rebalance_strategy +--------------------------------------------------------------------- + +(1 row) + +SELECT * FROM pg_dist_rebalance_strategy WHERE name='test_improvement_threshold'; + name | default_strategy | shard_cost_function | node_capacity_function | shard_allowed_on_node_function | default_threshold | minimum_threshold | improvement_threshold +--------------------------------------------------------------------- + test_improvement_threshold | f | citus_shard_cost_1 | capacity_high_worker_2 | citus_shard_allowed_on_node_true | 0.2 | 0.1 | 0.3 +(1 row) + +DELETE FROM pg_catalog.pg_dist_rebalance_strategy WHERE name='test_improvement_threshold'; -- Make it a data node again SELECT * from master_set_node_property('localhost', :worker_2_port, 'shouldhaveshards', true); master_set_node_property diff --git a/src/test/regress/expected/upgrade_rebalance_strategy_after.out b/src/test/regress/expected/upgrade_rebalance_strategy_after.out index da822fffd..4036af539 100644 --- a/src/test/regress/expected/upgrade_rebalance_strategy_after.out +++ b/src/test/regress/expected/upgrade_rebalance_strategy_after.out @@ -3,6 +3,6 @@ SELECT * FROM pg_catalog.pg_dist_rebalance_strategy ORDER BY name; --------------------------------------------------------------------- by_disk_size | f | citus_shard_cost_by_disk_size | citus_node_capacity_1 | citus_shard_allowed_on_node_true | 0.1 | 0.01 | 0.5 by_shard_count | f | citus_shard_cost_1 | citus_node_capacity_1 | citus_shard_allowed_on_node_true | 0 | 0 | 0 - custom_strategy | t | upgrade_rebalance_strategy.shard_cost_2 | upgrade_rebalance_strategy.capacity_high_worker_1 | upgrade_rebalance_strategy.only_worker_2 | 0.5 | 0.2 | 0 + custom_strategy | t | upgrade_rebalance_strategy.shard_cost_2 | upgrade_rebalance_strategy.capacity_high_worker_1 | upgrade_rebalance_strategy.only_worker_2 | 0.5 | 0.2 | 0.3 (3 rows) diff --git a/src/test/regress/sql/shard_rebalancer.sql b/src/test/regress/sql/shard_rebalancer.sql index 07efa8617..5d8e89b36 100644 --- a/src/test/regress/sql/shard_rebalancer.sql +++ b/src/test/regress/sql/shard_rebalancer.sql @@ -1229,6 +1229,20 @@ SELECT citus_add_rebalance_strategy( 0.1 ); +SELECT citus_add_rebalance_strategy( + 'test_improvement_threshold', + 'citus_shard_cost_1', + 'capacity_high_worker_2', + 'citus_shard_allowed_on_node_true', + 0.2, + 0.1, + 0.3 + ); + +SELECT * FROM pg_dist_rebalance_strategy WHERE name='test_improvement_threshold'; + +DELETE FROM pg_catalog.pg_dist_rebalance_strategy WHERE name='test_improvement_threshold'; + -- Make it a data node again SELECT * from master_set_node_property('localhost', :worker_2_port, 'shouldhaveshards', true); DROP TABLE tab; From 6d8725efb03b23f0a9a08a94cc56ca42e11e6690 Mon Sep 17 00:00:00 2001 From: Nils Dijk Date: Mon, 9 Oct 2023 13:05:51 +0200 Subject: [PATCH 03/14] Fix leaking of memory and memory contexts in Foreign Constraint Graphs (#7236) DESCRIPTION: Fix leaking of memory and memory contexts in Foreign Constraint Graphs Previously, every time we (re)created the Foreign Constraint Relationship Graph, we created a new Memory Context while loosing a reference to the previous context. This old context could still have left over memory in there causing a memory leak. With this patch we statically have one memory context that we lazily initialize the first time we create our foreign constraint relationship graph. On every subsequent creation, beside destroying our previous hashmap we also reset our memory context to remove any left over references. --- .../utils/foreign_key_relationship.c | 57 ++++++++++--------- .../distributed/foreign_key_relationship.h | 1 - 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/src/backend/distributed/utils/foreign_key_relationship.c b/src/backend/distributed/utils/foreign_key_relationship.c index 2858e6ed3..d30c767df 100644 --- a/src/backend/distributed/utils/foreign_key_relationship.c +++ b/src/backend/distributed/utils/foreign_key_relationship.c @@ -28,6 +28,7 @@ #include "distributed/version_compat.h" #include "nodes/pg_list.h" #include "storage/lockdefs.h" +#include "utils/catcache.h" #include "utils/fmgroids.h" #include "utils/hsearch.h" #include "common/hashfn.h" @@ -96,6 +97,8 @@ static List * GetConnectedListHelper(ForeignConstraintRelationshipNode *node, bool isReferencing); static List * GetForeignConstraintRelationshipHelper(Oid relationId, bool isReferencing); +MemoryContext ForeignConstraintRelationshipMemoryContext = NULL; + /* * GetForeignKeyConnectedRelationIdList returns a list of relation id's for @@ -321,17 +324,36 @@ CreateForeignConstraintRelationshipGraph() return; } - ClearForeignConstraintRelationshipGraphContext(); + /* + * Lazily create our memory context once and reset on every reuse. + * Since we have cleared and invalidated the fConstraintRelationshipGraph, right + * before we can simply reset the context if it was already existing. + */ + if (ForeignConstraintRelationshipMemoryContext == NULL) + { + /* make sure we've initialized CacheMemoryContext */ + if (CacheMemoryContext == NULL) + { + CreateCacheMemoryContext(); + } - MemoryContext fConstraintRelationshipMemoryContext = AllocSetContextCreateInternal( - CacheMemoryContext, - "Forign Constraint Relationship Graph Context", - ALLOCSET_DEFAULT_MINSIZE, - ALLOCSET_DEFAULT_INITSIZE, - ALLOCSET_DEFAULT_MAXSIZE); + ForeignConstraintRelationshipMemoryContext = AllocSetContextCreate( + CacheMemoryContext, + "Foreign Constraint Relationship Graph Context", + ALLOCSET_DEFAULT_MINSIZE, + ALLOCSET_DEFAULT_INITSIZE, + ALLOCSET_DEFAULT_MAXSIZE); + } + else + { + fConstraintRelationshipGraph = NULL; + MemoryContextReset(ForeignConstraintRelationshipMemoryContext); + } + + Assert(fConstraintRelationshipGraph == NULL); MemoryContext oldContext = MemoryContextSwitchTo( - fConstraintRelationshipMemoryContext); + ForeignConstraintRelationshipMemoryContext); fConstraintRelationshipGraph = (ForeignConstraintRelationshipGraph *) palloc( sizeof(ForeignConstraintRelationshipGraph)); @@ -631,22 +653,3 @@ CreateOrFindNode(HTAB *adjacencyLists, Oid relid) return node; } - - -/* - * ClearForeignConstraintRelationshipGraphContext clear all the allocated memory obtained - * for foreign constraint relationship graph. Since all the variables of relationship - * graph was obtained within the same context, destroying hash map is enough as - * it deletes the context. - */ -void -ClearForeignConstraintRelationshipGraphContext() -{ - if (fConstraintRelationshipGraph == NULL) - { - return; - } - - hash_destroy(fConstraintRelationshipGraph->nodeMap); - fConstraintRelationshipGraph = NULL; -} diff --git a/src/include/distributed/foreign_key_relationship.h b/src/include/distributed/foreign_key_relationship.h index ef2c5be33..fbbee831e 100644 --- a/src/include/distributed/foreign_key_relationship.h +++ b/src/include/distributed/foreign_key_relationship.h @@ -20,7 +20,6 @@ extern bool ShouldUndistributeCitusLocalTable(Oid relationId); extern List * ReferencedRelationIdList(Oid relationId); extern List * ReferencingRelationIdList(Oid relationId); extern void SetForeignConstraintRelationshipGraphInvalid(void); -extern void ClearForeignConstraintRelationshipGraphContext(void); extern bool OidVisited(HTAB *oidVisitedMap, Oid oid); extern void VisitOid(HTAB *oidVisitedMap, Oid oid); From 76fdfa3c0f15ae0ced716f0f0cc2373fc90dcbb5 Mon Sep 17 00:00:00 2001 From: Nils Dijk Date: Mon, 9 Oct 2023 15:37:21 +0200 Subject: [PATCH 04/14] Add devcontainer for development purposes (#7102) This change adds a devcontainer configuration to the Citus project. This devcontainer allows for quick generation of isolated development environments, either local on the machine of a developer or in a cloud, like github codepaces. The devcontainer is updated automatically by github actions when its configuration changes. For more detailed instructions on how to quickstart the development in a container see CONTRIBUTING.md --- .devcontainer/.gdbinit | 5 + .devcontainer/.gitignore | 1 + .devcontainer/.psqlrc | 7 + .devcontainer/.vscode/Pipfile | 12 + .devcontainer/.vscode/Pipfile.lock | 28 + .../.vscode/generate_c_cpp_properties-json.py | 84 ++ .devcontainer/.vscode/launch.json | 20 + .devcontainer/Dockerfile | 212 ++++ .devcontainer/Makefile | 11 + .devcontainer/devcontainer.json | 32 + .devcontainer/pgenv/config/default.conf | 15 + .devcontainer/src/test/regress/Pipfile | 27 + .devcontainer/src/test/regress/Pipfile.lock | 964 ++++++++++++++++++ .github/workflows/devcontainer.yml | 49 + CONTRIBUTING.md | 24 + 15 files changed, 1491 insertions(+) create mode 100644 .devcontainer/.gdbinit create mode 100644 .devcontainer/.gitignore create mode 100644 .devcontainer/.psqlrc create mode 100644 .devcontainer/.vscode/Pipfile create mode 100644 .devcontainer/.vscode/Pipfile.lock create mode 100755 .devcontainer/.vscode/generate_c_cpp_properties-json.py create mode 100644 .devcontainer/.vscode/launch.json create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/Makefile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/pgenv/config/default.conf create mode 100644 .devcontainer/src/test/regress/Pipfile create mode 100644 .devcontainer/src/test/regress/Pipfile.lock create mode 100644 .github/workflows/devcontainer.yml diff --git a/.devcontainer/.gdbinit b/.devcontainer/.gdbinit new file mode 100644 index 000000000..9c710923f --- /dev/null +++ b/.devcontainer/.gdbinit @@ -0,0 +1,5 @@ +# gdbpg.py contains scripts to nicely print the postgres datastructures +# while in a gdb session. Since the vscode debugger is based on gdb this +# actually also works when debugging with vscode. Providing nice tools +# to understand the internal datastructures we are working with. +source /root/gdbpg.py diff --git a/.devcontainer/.gitignore b/.devcontainer/.gitignore new file mode 100644 index 000000000..3a7f553fc --- /dev/null +++ b/.devcontainer/.gitignore @@ -0,0 +1 @@ +postgresql-*.tar.bz2 diff --git a/.devcontainer/.psqlrc b/.devcontainer/.psqlrc new file mode 100644 index 000000000..7642a9714 --- /dev/null +++ b/.devcontainer/.psqlrc @@ -0,0 +1,7 @@ +\timing on +\pset linestyle unicode +\pset border 2 +\setenv PAGER 'pspg --no-mouse -bX --no-commandbar --no-topbar' +\set HISTSIZE 100000 +\set PROMPT1 '\n%[%033[1m%]%M %n@%/:%>-%p%R%[%033[0m%]%# ' +\set PROMPT2 ' ' diff --git a/.devcontainer/.vscode/Pipfile b/.devcontainer/.vscode/Pipfile new file mode 100644 index 000000000..57909c897 --- /dev/null +++ b/.devcontainer/.vscode/Pipfile @@ -0,0 +1,12 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +docopt = "*" + +[dev-packages] + +[requires] +python_version = "3.9" diff --git a/.devcontainer/.vscode/Pipfile.lock b/.devcontainer/.vscode/Pipfile.lock new file mode 100644 index 000000000..52ee8663c --- /dev/null +++ b/.devcontainer/.vscode/Pipfile.lock @@ -0,0 +1,28 @@ +{ + "_meta": { + "hash": { + "sha256": "6956a6700ead5804aa56bd597c93bb4a13f208d2d49d3b5399365fd240ca0797" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "index": "pypi", + "version": "==0.6.2" + } + }, + "develop": {} +} diff --git a/.devcontainer/.vscode/generate_c_cpp_properties-json.py b/.devcontainer/.vscode/generate_c_cpp_properties-json.py new file mode 100755 index 000000000..6f49a1818 --- /dev/null +++ b/.devcontainer/.vscode/generate_c_cpp_properties-json.py @@ -0,0 +1,84 @@ +#! /usr/bin/env pipenv-shebang +"""Generate C/C++ properties file for VSCode. + +Uses pgenv to iterate postgres versions and generate +a C/C++ properties file for VSCode containing the +include paths for the postgres headers. + +Usage: + generate_c_cpp_properties-json.py + generate_c_cpp_properties-json.py (-h | --help) + generate_c_cpp_properties-json.py --version + +Options: + -h --help Show this screen. + --version Show version. + +""" +import json +import subprocess + +from docopt import docopt + + +def main(args): + target_path = args[''] + + output = subprocess.check_output(['pgenv', 'versions']) + # typical output is: + # 14.8 pgsql-14.8 + # * 15.3 pgsql-15.3 + # 16beta2 pgsql-16beta2 + # where the line marked with a * is the currently active version + # + # we are only interested in the first word of each line, which is the version number + # thus we strip the whitespace and the * from the line and split it into words + # and take the first word + versions = [line.strip('* ').split()[0] for line in output.decode('utf-8').splitlines()] + + # create the list of configurations per version + configurations = [] + for version in versions: + configurations.append(generate_configuration(version)) + + # create the json file + c_cpp_properties = { + "configurations": configurations, + "version": 4 + } + + # write the c_cpp_properties.json file + with open(target_path, 'w') as f: + json.dump(c_cpp_properties, f, indent=4) + + +def generate_configuration(version): + """Returns a configuration for the given postgres version. + + >>> generate_configuration('14.8') + { + "name": "Citus Development Configuration - Postgres 14.8", + "includePath": [ + "/usr/local/include", + "/home/citus/.pgenv/src/postgresql-14.8/src/**", + "${workspaceFolder}/**", + "${workspaceFolder}/src/include/", + ], + "configurationProvider": "ms-vscode.makefile-tools" + } + """ + return { + "name": f"Citus Development Configuration - Postgres {version}", + "includePath": [ + "/usr/local/include", + f"/home/citus/.pgenv/src/postgresql-{version}/src/**", + "${workspaceFolder}/**", + "${workspaceFolder}/src/include/", + ], + "configurationProvider": "ms-vscode.makefile-tools" + } + + +if __name__ == '__main__': + arguments = docopt(__doc__, version='0.1.0') + main(arguments) diff --git a/.devcontainer/.vscode/launch.json b/.devcontainer/.vscode/launch.json new file mode 100644 index 000000000..290f6573a --- /dev/null +++ b/.devcontainer/.vscode/launch.json @@ -0,0 +1,20 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Attach Citus (devcontainer)", + "type": "cppdbg", + "request": "attach", + "processId": "${command:pickProcess}", + "program": "/home/citus/.pgenv/pgsql/bin/postgres", + "additionalSOLibSearchPath": "/home/citus/.pgenv/pgsql/lib", + "setupCommands": [ + { + "text": "handle SIGUSR1 noprint nostop pass", + "description": "let gdb not stop when SIGUSR1 is sent to process", + "ignoreFailures": true + } + ], + }, + ] +} diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..6012dc851 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,212 @@ +FROM ubuntu:22.04 AS base + +# environment is to make python pass an interactive shell, probably not the best timezone given a wide variety of colleagues +ENV TZ=UTC +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +# install build tools +RUN apt update && apt install -y \ + bzip2 \ + cpanminus \ + curl \ + flex \ + gcc \ + git \ + libcurl4-gnutls-dev \ + libicu-dev \ + libkrb5-dev \ + liblz4-dev \ + libpam0g-dev \ + libreadline-dev \ + libselinux1-dev \ + libssl-dev \ + libxslt-dev \ + libzstd-dev \ + locales \ + make \ + perl \ + pkg-config \ + python3 \ + python3-pip \ + software-properties-common \ + sudo \ + uuid-dev \ + valgrind \ + zlib1g-dev \ + && add-apt-repository ppa:deadsnakes/ppa -y \ + && apt install -y \ + python3.9-full \ + && apt clean + +RUN sudo pip3 install pipenv pipenv-shebang + +RUN cpanm install IPC::Run + +RUN locale-gen en_US.UTF-8 + +# add the citus user to sudoers and allow all sudoers to login without a password prompt +RUN useradd -ms /bin/bash citus \ + && usermod -aG sudo citus \ + && echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers + +WORKDIR /home/citus +USER citus + +# run all make commands with the number of cores available +RUN echo "export MAKEFLAGS=\"-j \$(nproc)\"" >> "/home/citus/.bashrc" + +RUN git clone --branch v1.3.2 --depth 1 https://github.com/theory/pgenv.git .pgenv +COPY --chown=citus:citus pgenv/config/ .pgenv/config/ +ENV PATH="/home/citus/.pgenv/bin:${PATH}" +ENV PATH="/home/citus/.pgenv/pgsql/bin:${PATH}" + +USER citus + +# build postgres versions separately for effective parrallelism and caching of already built versions when changing only certain versions +FROM base AS pg14 +RUN MAKEFLAGS="-j $(nproc)" pgenv build 14.9 +RUN rm .pgenv/src/*.tar* +RUN make -C .pgenv/src/postgresql-*/ clean +RUN make -C .pgenv/src/postgresql-*/src/include install + +# create a staging directory with all files we want to copy from our pgenv build +# we will copy the contents of the staged folder into the final image at once +RUN mkdir .pgenv-staging/ +RUN cp -r .pgenv/src .pgenv/pgsql-* .pgenv/config .pgenv-staging/ +RUN rm .pgenv-staging/config/default.conf + +FROM base AS pg15 +RUN MAKEFLAGS="-j $(nproc)" pgenv build 15.4 +RUN rm .pgenv/src/*.tar* +RUN make -C .pgenv/src/postgresql-*/ clean +RUN make -C .pgenv/src/postgresql-*/src/include install + +# create a staging directory with all files we want to copy from our pgenv build +# we will copy the contents of the staged folder into the final image at once +RUN mkdir .pgenv-staging/ +RUN cp -r .pgenv/src .pgenv/pgsql-* .pgenv/config .pgenv-staging/ +RUN rm .pgenv-staging/config/default.conf + +FROM base AS pg16 +RUN MAKEFLAGS="-j $(nproc)" pgenv build 16.0 +RUN rm .pgenv/src/*.tar* +RUN make -C .pgenv/src/postgresql-*/ clean +RUN make -C .pgenv/src/postgresql-*/src/include install + +# create a staging directory with all files we want to copy from our pgenv build +# we will copy the contents of the staged folder into the final image at once +RUN mkdir .pgenv-staging/ +RUN cp -r .pgenv/src .pgenv/pgsql-* .pgenv/config .pgenv-staging/ +RUN rm .pgenv-staging/config/default.conf + +FROM base AS uncrustify-builder + +RUN sudo apt update && sudo apt install -y cmake tree + +WORKDIR /uncrustify +RUN curl -L https://github.com/uncrustify/uncrustify/archive/uncrustify-0.68.1.tar.gz | tar xz +WORKDIR /uncrustify/uncrustify-uncrustify-0.68.1/ +RUN mkdir build +WORKDIR /uncrustify/uncrustify-uncrustify-0.68.1/build/ +RUN cmake .. +RUN make -sj8 + +RUN make install DESTDIR=/uncrustify + +# builder for all pipenv's to get them contained in a single layer +FROM base AS pipenv + +WORKDIR /workspaces/citus/ + +# tools to sync pgenv with vscode +COPY --chown=citus:citus .vscode/Pipfile .vscode/Pipfile.lock .devcontainer/.vscode/ +RUN ( cd .devcontainer/.vscode && pipenv install ) + +# environment to run our failure tests +COPY --chown=citus:citus src/ src/ +RUN ( cd src/test/regress && pipenv install ) + +# assemble the final container by copying over the artifacts from separately build containers +FROM base AS devcontainer + +LABEL org.opencontainers.image.source=https://github.com/citusdata/citus +LABEL org.opencontainers.image.description="Development container for the Citus project" +LABEL org.opencontainers.image.licenses=AGPL-3.0-only + +RUN yes | sudo unminimize + +# install developer productivity tools +RUN sudo apt update \ + && sudo apt install -y \ + autoconf2.69 \ + bash-completion \ + fswatch \ + gdb \ + htop \ + libdbd-pg-perl \ + libdbi-perl \ + lsof \ + man \ + net-tools \ + pspg \ + tree \ + vim \ + && sudo apt clean + +# Since gdb will run in the context of the root user when debugging citus we will need to both +# download the gdbpg.py script as the root user, into their home directory, as well as add .gdbinit +# as a file owned by root +# This will make that as soon as the debugger attaches to a postgres backend (or frankly any other process) +# the gdbpg.py script will be sourced and the developer can direcly use it. +RUN sudo curl -o /root/gdbpg.py https://raw.githubusercontent.com/tvesely/gdbpg/6065eee7872457785f830925eac665aa535caf62/gdbpg.py +COPY --chown=root:root .gdbinit /root/ + +# add some common tools to the final container +# bin directory for user tools +RUN mkdir .bin +ENV PATH="/home/citus/.bin:${PATH}" + +# for persistent bash history across devcontainers we need to have +# a) a directory to store the history in +# b) a prompt command to append the history to the file +# c) specify the history file to store the history in +# b and c are done in the .bashrc to make it persistent across shells only +RUN sudo install -d -o citus -g citus /commandhistory \ + && echo "export PROMPT_COMMAND='history -a' && export HISTFILE=/commandhistory/.bash_history" >> "/home/citus/.bashrc" + +# install citus-dev +RUN git clone --branch develop https://github.com/citusdata/tools.git citus-tools \ + && ( cd citus-tools/citus_dev && pipenv install ) \ + && ln -s /home/citus/citus-tools/citus_dev/citus_dev-pipenv .bin/citus_dev \ + && sudo make -C citus-tools/uncrustify install bindir=/usr/local/bin pkgsysconfdir=/usr/local/etc/ \ + && mkdir -p ~/.local/share/bash-completion/completions/ \ + && ln -s ~/citus-tools/citus_dev/bash_completion ~/.local/share/bash-completion/completions/citus_dev + +# TODO some LC_ALL errors, possibly solved by locale-gen +RUN git clone https://github.com/so-fancy/diff-so-fancy.git \ + && ln -s /home/citus/diff-so-fancy/diff-so-fancy .bin/ + +COPY --link --from=uncrustify-builder /uncrustify/usr/ /usr/ + +COPY --link --from=pg14 /home/citus/.pgenv-staging/ /home/citus/.pgenv/ +COPY --link --from=pg15 /home/citus/.pgenv-staging/ /home/citus/.pgenv/ +COPY --link --from=pg16 /home/citus/.pgenv-staging/ /home/citus/.pgenv/ + +COPY --link --from=pipenv /home/citus/.local/share/virtualenvs/ /home/citus/.local/share/virtualenvs/ + +# place to run your cluster with citus_dev +VOLUME /data +RUN sudo mkdir /data \ + && sudo chown citus:citus /data + +COPY --chown=citus:citus .psqlrc . + +# with the copy linking of layers github actions seem to misbehave with the ownership of the +# directories leading upto the link, hence a small patch layer to have to right ownerships set +RUN sudo chown --from=root:root citus:citus -R ~ + +# sets default pg version +RUN pgenv switch 16.0 + +# make connecting to the coordinator easy +ENV PGPORT=9700 diff --git a/.devcontainer/Makefile b/.devcontainer/Makefile new file mode 100644 index 000000000..8f4174104 --- /dev/null +++ b/.devcontainer/Makefile @@ -0,0 +1,11 @@ + +init: ../.vscode/c_cpp_properties.json ../.vscode/launch.json + +../.vscode: + mkdir -p ../.vscode + +../.vscode/launch.json: ../.vscode .vscode/launch.json + cp .vscode/launch.json ../.vscode/launch.json + +../.vscode/c_cpp_properties.json: ../.vscode + ./.vscode/generate_c_cpp_properties-json.py ../.vscode/c_cpp_properties.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..58c9e07a8 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,32 @@ +{ + "image": "ghcr.io/citusdata/citus-devcontainer:main", + "runArgs": [ + "--cap-add=SYS_PTRACE", + ], + "forwardPorts": [9700], + "customizations": { + "vscode": { + "extensions": [ + "eamodio.gitlens", + "GitHub.copilot-chat", + "GitHub.copilot", + "github.vscode-github-actions", + "github.vscode-pull-request-github", + "ms-vscode.cpptools-extension-pack", + "ms-vsliveshare.vsliveshare", + ], + "settings": { + "files.exclude": { + "**/*.o": true, + "**/.deps/": true, + } + }, + } + }, + "mounts": [ + "type=volume,target=/data", + "source=citus-bashhistory,target=/commandhistory,type=volume", + ], + "updateContentCommand": "./configure", + "postCreateCommand": "make -C .devcontainer/", +} diff --git a/.devcontainer/pgenv/config/default.conf b/.devcontainer/pgenv/config/default.conf new file mode 100644 index 000000000..ab55493f9 --- /dev/null +++ b/.devcontainer/pgenv/config/default.conf @@ -0,0 +1,15 @@ +PGENV_MAKE_OPTIONS=(-s) + +PGENV_CONFIGURE_OPTIONS=( + --enable-debug + --enable-depend + --enable-cassert + --enable-tap-tests + 'CFLAGS=-ggdb -Og -g3 -fno-omit-frame-pointer -DUSE_VALGRIND' + --with-openssl + --with-libxml + --with-libxslt + --with-uuid=e2fs + --with-icu + --with-lz4 +) diff --git a/.devcontainer/src/test/regress/Pipfile b/.devcontainer/src/test/regress/Pipfile new file mode 100644 index 000000000..d4b2cc39f --- /dev/null +++ b/.devcontainer/src/test/regress/Pipfile @@ -0,0 +1,27 @@ +[[source]] +name = "pypi" +url = "https://pypi.python.org/simple" +verify_ssl = true + +[packages] +mitmproxy = {editable = true, ref = "main", git = "https://github.com/citusdata/mitmproxy.git"} +construct = "==2.9.45" +docopt = "==0.6.2" +cryptography = ">=41.0.4" +pytest = "*" +psycopg = "*" +filelock = "*" +pytest-asyncio = "*" +pytest-timeout = "*" +pytest-xdist = "*" +pytest-repeat = "*" +pyyaml = "*" + +[dev-packages] +black = "*" +isort = "*" +flake8 = "*" +flake8-bugbear = "*" + +[requires] +python_version = "3.9" diff --git a/.devcontainer/src/test/regress/Pipfile.lock b/.devcontainer/src/test/regress/Pipfile.lock new file mode 100644 index 000000000..15cb7ecda --- /dev/null +++ b/.devcontainer/src/test/regress/Pipfile.lock @@ -0,0 +1,964 @@ +{ + "_meta": { + "hash": { + "sha256": "b92bf682aeeea1a66a16beaf78584a5318fd0ae908ce85c7e2a4807aa2bee532" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "asgiref": { + "hashes": [ + "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9", + "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214" + ], + "markers": "python_version >= '3.6'", + "version": "==3.4.1" + }, + "blinker": { + "hashes": [ + "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" + ], + "version": "==1.4" + }, + "brotli": { + "hashes": [ + "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019", + "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df", + "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d", + "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8", + "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b", + "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c", + "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c", + "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70", + "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f", + "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181", + "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130", + "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19", + "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be", + "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be", + "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a", + "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa", + "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429", + "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126", + "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7", + "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad", + "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679", + "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4", + "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0", + "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b", + "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6", + "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438", + "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f", + "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389", + "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6", + "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26", + "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337", + "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7", + "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14", + "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2", + "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430", + "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296", + "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12", + "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f", + "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7", + "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d", + "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a", + "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452", + "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c", + "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761", + "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649", + "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b", + "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea", + "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c", + "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f", + "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a", + "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031", + "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267", + "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5", + "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7", + "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d", + "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c", + "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43", + "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa", + "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde", + "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17", + "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f", + "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8", + "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb", + "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb", + "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d", + "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b", + "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4", + "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755", + "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a", + "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d", + "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a", + "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3", + "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7", + "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1", + "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb", + "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a", + "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91", + "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b", + "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1", + "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806", + "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3", + "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1" + ], + "version": "==1.0.9" + }, + "certifi": { + "hashes": [ + "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", + "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" + ], + "markers": "python_version >= '3.6'", + "version": "==2023.7.22" + }, + "cffi": { + "hashes": [ + "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5", + "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef", + "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104", + "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426", + "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405", + "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375", + "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a", + "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e", + "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc", + "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf", + "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185", + "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497", + "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3", + "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35", + "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c", + "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83", + "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21", + "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca", + "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984", + "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac", + "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd", + "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee", + "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a", + "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2", + "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192", + "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7", + "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585", + "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f", + "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e", + "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27", + "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b", + "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e", + "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e", + "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d", + "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c", + "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415", + "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82", + "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02", + "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314", + "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325", + "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c", + "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3", + "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914", + "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045", + "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d", + "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9", + "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5", + "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2", + "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c", + "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3", + "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2", + "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8", + "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d", + "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d", + "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9", + "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162", + "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76", + "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4", + "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e", + "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9", + "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6", + "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b", + "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01", + "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0" + ], + "version": "==1.15.1" + }, + "click": { + "hashes": [ + "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1", + "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb" + ], + "markers": "python_version >= '3.6'", + "version": "==8.0.4" + }, + "construct": { + "hashes": [ + "sha256:2271a0efd0798679dea825ff47e22a4c550456a5db0ba8baa82f7eae0af0118c" + ], + "index": "pypi", + "version": "==2.9.45" + }, + "cryptography": { + "hashes": [ + "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67", + "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311", + "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8", + "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13", + "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143", + "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f", + "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829", + "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd", + "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397", + "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac", + "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d", + "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a", + "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839", + "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e", + "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6", + "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9", + "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860", + "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca", + "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91", + "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d", + "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714", + "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb", + "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==41.0.4" + }, + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "index": "pypi", + "version": "==0.6.2" + }, + "exceptiongroup": { + "hashes": [ + "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9", + "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3" + ], + "markers": "python_version < '3.11'", + "version": "==1.1.3" + }, + "execnet": { + "hashes": [ + "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41", + "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.2" + }, + "filelock": { + "hashes": [ + "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4", + "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==3.12.4" + }, + "flask": { + "hashes": [ + "sha256:59da8a3170004800a2837844bfa84d49b022550616070f7cb1a659682b2e7c9f", + "sha256:e1120c228ca2f553b470df4a5fa927ab66258467526069981b3eb0a91902687d" + ], + "markers": "python_version >= '3.6'", + "version": "==2.0.3" + }, + "h11": { + "hashes": [ + "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6", + "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042" + ], + "markers": "python_version >= '3.6'", + "version": "==0.12.0" + }, + "h2": { + "hashes": [ + "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", + "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==4.1.0" + }, + "hpack": { + "hashes": [ + "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", + "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==4.0.0" + }, + "hyperframe": { + "hashes": [ + "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", + "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==6.0.1" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "itsdangerous": { + "hashes": [ + "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", + "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.2" + }, + "jinja2": { + "hashes": [ + "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", + "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.2" + }, + "kaitaistruct": { + "hashes": [ + "sha256:3d5845817ec8a4d5504379cc11bd570b038850ee49c4580bc0998c8fb1d327ad" + ], + "version": "==0.9" + }, + "ldap3": { + "hashes": [ + "sha256:2bc966556fc4d4fa9f445a1c31dc484ee81d44a51ab0e2d0fd05b62cac75daa6", + "sha256:5630d1383e09ba94839e253e013f1aa1a2cf7a547628ba1265cb7b9a844b5687", + "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70", + "sha256:5ab7febc00689181375de40c396dcad4f2659cd260fc5e94c508b6d77c17e9d5", + "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f" + ], + "version": "==2.9.1" + }, + "markupsafe": { + "hashes": [ + "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e", + "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e", + "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431", + "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686", + "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c", + "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559", + "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc", + "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb", + "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939", + "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c", + "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0", + "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4", + "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9", + "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575", + "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba", + "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d", + "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd", + "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3", + "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00", + "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155", + "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac", + "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52", + "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f", + "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8", + "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b", + "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007", + "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24", + "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea", + "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198", + "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0", + "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee", + "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be", + "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2", + "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1", + "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707", + "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6", + "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c", + "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58", + "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823", + "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779", + "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636", + "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c", + "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad", + "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee", + "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc", + "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2", + "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48", + "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7", + "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e", + "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b", + "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa", + "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5", + "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e", + "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb", + "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9", + "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57", + "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc", + "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc", + "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2", + "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.3" + }, + "mitmproxy": { + "editable": true, + "git": "https://github.com/citusdata/mitmproxy.git", + "markers": "python_version >= '3.10'", + "ref": "2fd18ef051b987925a36337ab1d61aa674353b44" + }, + "msgpack": { + "hashes": [ + "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89", + "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c", + "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e", + "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af", + "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f", + "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9", + "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4", + "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5", + "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8", + "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e", + "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428", + "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d", + "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b", + "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c", + "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9", + "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7", + "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8", + "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887", + "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37", + "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7", + "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc", + "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8", + "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341", + "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb", + "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd", + "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911", + "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685", + "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077", + "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2", + "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787", + "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c", + "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e", + "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823", + "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e", + "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a", + "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80", + "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367", + "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3", + "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f", + "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355", + "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0", + "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e", + "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980", + "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803", + "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8", + "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2", + "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b", + "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f", + "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006", + "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052", + "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed", + "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5", + "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce", + "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76", + "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe", + "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb" + ], + "markers": "python_version >= '3.8'", + "version": "==1.0.6" + }, + "packaging": { + "hashes": [ + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" + ], + "markers": "python_version >= '3.7'", + "version": "==23.1" + }, + "passlib": { + "hashes": [ + "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", + "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04" + ], + "version": "==1.7.4" + }, + "pluggy": { + "hashes": [ + "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12", + "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7" + ], + "markers": "python_version >= '3.8'", + "version": "==1.3.0" + }, + "protobuf": { + "hashes": [ + "sha256:0c44e01f74109decea196b5b313b08edb5316df77313995594a6981e95674259", + "sha256:15cdecb0d192ab5f17cdc21a9c0ae7b5c6c4451e42c8a888a4f3344c190e369c", + "sha256:196a153e487c0e20d62259872bbf2e1c4fa18e2ce97e20984fcbf9d8b151058d", + "sha256:3149c373e9b7ce296bb24d42a3eb677d620185b5dff2c390b2cf57baf79afdc1", + "sha256:370a6b885e94adda021d4cbe43accdfbf6a02af651a0be337a28906a3fa77f3d", + "sha256:474247630834f93214fafce49d2ee6ff4c036c8c5382b88432b7eae6f08f131b", + "sha256:6380aae2683d0d1b41199e591c8ba06f867e8a778d44309af87073c1b34a9f3a", + "sha256:6741d7d1cfcbdd6cf610f38b7976cf8c0b41022203555298925e4061b6616608", + "sha256:700787cb56b4cb7b8ed5f7d197b9d8f30080f257f3c7431eec1fdd8060660929", + "sha256:8117b52c2531e4033f7d02b9be5a78564da41a8b02c255e1b731ad4bd75e7dc0", + "sha256:850da2072d98c6e576b7eb29734cdde6fd9f5d157e43d7818d79f4b373ef5d51", + "sha256:85d1fb5ff1d638a0045bbe4f01a8f287023aa4f2b29011445b1be0edc74a2103", + "sha256:93bca9aaeee8008e15696c2a6b5e56b992da03f9d237ff54310e397d635f8305", + "sha256:98d414513ec44bb3ba77ebdeffcbbe6ebbf3630c767d37a285890c2414fdd4e2", + "sha256:a7f91a4e5bf3cc58b2830c9cb01b04ac5e211c288048e9296cd407ec0455fb89", + "sha256:abbcb8ecd19cfb729b9b71f9a453e37c0c1c017be4bff47804ff25150685386d", + "sha256:b03966ca4d1aa7850f5bf0d841c22a8eeb6ce091f77e585ffeb8b95a6b0a96c4", + "sha256:cde2a73b03049b904dbc5d0f500b97e11abb4109dbe2940e6a1595e2eef4e8a9", + "sha256:d52a687e2c74c40f45abd6906f833d4e40f0f8cfa4226a80e4695fedafe6c57e", + "sha256:e68ad00695547d9397dd14abd3efba23cb31cef67228f4512d41396971889812", + "sha256:e9bffd52d6ee039a1cafb72475b2900c6fd0f0dca667fb7a09af0a3e119e78cb" + ], + "markers": "python_version >= '3.5'", + "version": "==3.18.3" + }, + "psycopg": { + "hashes": [ + "sha256:7542c45810ea16356e5126c9b4291cbc3802aa326fcbba09ff154fe380de29be", + "sha256:cd711edb64b07d7f8a233c365806caf7e55bbe7cbbd8d5c680f672bb5353c8d5" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==3.1.11" + }, + "publicsuffix2": { + "hashes": [ + "sha256:00f8cc31aa8d0d5592a5ced19cccba7de428ebca985db26ac852d920ddd6fe7b", + "sha256:786b5e36205b88758bd3518725ec8cfe7a8173f5269354641f581c6b80a99893" + ], + "version": "==2.20191221" + }, + "pyasn1": { + "hashes": [ + "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", + "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==0.5.0" + }, + "pycparser": { + "hashes": [ + "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", + "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206" + ], + "version": "==2.21" + }, + "pyopenssl": { + "hashes": [ + "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2", + "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac" + ], + "markers": "python_version >= '3.6'", + "version": "==23.2.0" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" + }, + "pyperclip": { + "hashes": [ + "sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57" + ], + "version": "==1.8.2" + }, + "pytest": { + "hashes": [ + "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002", + "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==7.4.2" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d", + "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==0.21.1" + }, + "pytest-repeat": { + "hashes": [ + "sha256:4474a7d9e9137f6d8cc8ae297f8c4168d33c56dd740aa78cfffe562557e6b96e", + "sha256:5cd3289745ab3156d43eb9c8e7f7d00a926f3ae5c9cf425bec649b2fe15bad5b" + ], + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==0.9.1" + }, + "pytest-timeout": { + "hashes": [ + "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9", + "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6" + ], + "index": "pypi", + "markers": "python_version >= '3.6'", + "version": "==2.1.0" + }, + "pytest-xdist": { + "hashes": [ + "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93", + "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==3.3.1" + }, + "pyyaml": { + "hashes": [ + "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", + "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", + "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", + "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", + "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206", + "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27", + "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595", + "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62", + "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98", + "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696", + "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290", + "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", + "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", + "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6", + "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867", + "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47", + "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", + "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", + "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3", + "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", + "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938", + "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", + "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c", + "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735", + "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d", + "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", + "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", + "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba", + "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", + "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5", + "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd", + "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3", + "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", + "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515", + "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c", + "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", + "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924", + "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", + "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", + "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", + "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", + "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", + "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a", + "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", + "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", + "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa", + "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", + "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585", + "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d", + "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" + ], + "index": "pypi", + "markers": "python_version >= '3.6'", + "version": "==6.0.1" + }, + "ruamel.yaml": { + "hashes": [ + "sha256:1a771fc92d3823682b7f0893ad56cb5a5c87c48e62b5399d6f42c8759a583b33", + "sha256:ea21da1198c4b41b8e7a259301cc9710d3b972bf8ba52f06218478e6802dd1f1" + ], + "markers": "python_version >= '3'", + "version": "==0.17.16" + }, + "sortedcontainers": { + "hashes": [ + "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", + "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0" + ], + "version": "==2.4.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + }, + "tornado": { + "hashes": [ + "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f", + "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5", + "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d", + "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3", + "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2", + "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a", + "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16", + "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a", + "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17", + "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0", + "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe" + ], + "markers": "python_version >= '3.8'", + "version": "==6.3.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", + "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" + ], + "markers": "python_version >= '3.8'", + "version": "==4.8.0" + }, + "urwid": { + "hashes": [ + "sha256:588bee9c1cb208d0906a9f73c613d2bd32c3ed3702012f51efe318a3f2127eae" + ], + "version": "==2.1.2" + }, + "werkzeug": { + "hashes": [ + "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8", + "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528" + ], + "markers": "python_version >= '3.8'", + "version": "==2.3.7" + }, + "wsproto": { + "hashes": [ + "sha256:868776f8456997ad0d9720f7322b746bbe9193751b5b290b7f924659377c8c38", + "sha256:d8345d1808dd599b5ffb352c25a367adb6157e664e140dbecba3f9bc007edb9f" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==1.0.0" + }, + "zstandard": { + "hashes": [ + "sha256:1c5ef399f81204fbd9f0df3debf80389fd8aa9660fe1746d37c80b0d45f809e9", + "sha256:1faefe33e3d6870a4dce637bcb41f7abb46a1872a595ecc7b034016081c37543", + "sha256:1fb23b1754ce834a3a1a1e148cc2faad76eeadf9d889efe5e8199d3fb839d3c6", + "sha256:22f127ff5da052ffba73af146d7d61db874f5edb468b36c9cb0b857316a21b3d", + "sha256:2353b61f249a5fc243aae3caa1207c80c7e6919a58b1f9992758fa496f61f839", + "sha256:24cdcc6f297f7c978a40fb7706877ad33d8e28acc1786992a52199502d6da2a4", + "sha256:31e35790434da54c106f05fa93ab4d0fab2798a6350e8a73928ec602e8505836", + "sha256:3547ff4eee7175d944a865bbdf5529b0969c253e8a148c287f0668fe4eb9c935", + "sha256:378ac053c0cfc74d115cbb6ee181540f3e793c7cca8ed8cd3893e338af9e942c", + "sha256:3e1cd2db25117c5b7c7e86a17cde6104a93719a9df7cb099d7498e4c1d13ee5c", + "sha256:3fe469a887f6142cc108e44c7f42c036e43620ebaf500747be2317c9f4615d4f", + "sha256:4800ab8ec94cbf1ed09c2b4686288750cab0642cb4d6fba2a56db66b923aeb92", + "sha256:52de08355fd5cfb3ef4533891092bb96229d43c2069703d4aff04fdbedf9c92f", + "sha256:5752f44795b943c99be367fee5edf3122a1690b0d1ecd1bd5ec94c7fd2c39c94", + "sha256:5d53f02aeb8fdd48b88bc80bece82542d084fb1a7ba03bf241fd53b63aee4f22", + "sha256:69b7a5720b8dfab9005a43c7ddb2e3ccacbb9a2442908ae4ed49dd51ab19698a", + "sha256:6cc162b5b6e3c40b223163a9ea86cd332bd352ddadb5fd142fc0706e5e4eaaff", + "sha256:6f5d0330bc992b1e267a1b69fbdbb5ebe8c3a6af107d67e14c7a5b1ede2c5945", + "sha256:6ffadd48e6fe85f27ca3ca10cfd3ef3d0f933bef7316870285ffeb58d791ca9c", + "sha256:72a011678c654df8323aa7b687e3147749034fdbe994d346f139ab9702b59cea", + "sha256:77d26452676f471223571efd73131fd4a626622c7960458aab2763e025836fc5", + "sha256:7a88cc773ffe55992ff7259a8df5fb3570168d7138c69aadba40142d0e5ce39a", + "sha256:7b16bd74ae7bfbaca407a127e11058b287a4267caad13bd41305a5e630472549", + "sha256:855d95ec78b6f0ff66e076d5461bf12d09d8e8f7e2b3fc9de7236d1464fd730e", + "sha256:8baf7991547441458325ca8fafeae79ef1501cb4354022724f3edd62279c5b2b", + "sha256:8fb77dd152054c6685639d855693579a92f276b38b8003be5942de31d241ebfb", + "sha256:92d49cc3b49372cfea2d42f43a2c16a98a32a6bc2f42abcde121132dbfc2f023", + "sha256:94d0de65e37f5677165725f1fc7fb1616b9542d42a9832a9a0bdcba0ed68b63b", + "sha256:9867206093d7283d7de01bd2bf60389eb4d19b67306a0a763d1a8a4dbe2fb7c3", + "sha256:9ee3c992b93e26c2ae827404a626138588e30bdabaaf7aa3aa25082a4e718790", + "sha256:a4f8af277bb527fa3d56b216bda4da931b36b2d3fe416b6fc1744072b2c1dbd9", + "sha256:ab9f19460dfa4c5dd25431b75bee28b5f018bf43476858d64b1aa1046196a2a0", + "sha256:ac43c1821ba81e9344d818c5feed574a17f51fca27976ff7d022645c378fbbf5", + "sha256:af5a011609206e390b44847da32463437505bf55fd8985e7a91c52d9da338d4b", + "sha256:b0975748bb6ec55b6d0f6665313c2cf7af6f536221dccd5879b967d76f6e7899", + "sha256:b4963dad6cf28bfe0b61c3265d1c74a26a7605df3445bfcd3ba25de012330b2d", + "sha256:b7d3a484ace91ed827aa2ef3b44895e2ec106031012f14d28bd11a55f24fa734", + "sha256:bd3c478a4a574f412efc58ba7e09ab4cd83484c545746a01601636e87e3dbf23", + "sha256:c9e2dcb7f851f020232b991c226c5678dc07090256e929e45a89538d82f71d2e", + "sha256:d25c8eeb4720da41e7afbc404891e3a945b8bb6d5230e4c53d23ac4f4f9fc52c", + "sha256:dc8c03d0c5c10c200441ffb4cce46d869d9e5c4ef007f55856751dc288a2dffd", + "sha256:ec58e84d625553d191a23d5988a19c3ebfed519fff2a8b844223e3f074152163", + "sha256:eda0719b29792f0fea04a853377cfff934660cb6cd72a0a0eeba7a1f0df4a16e", + "sha256:edde82ce3007a64e8434ccaf1b53271da4f255224d77b880b59e7d6d73df90c8", + "sha256:f36722144bc0a5068934e51dca5a38a5b4daac1be84f4423244277e4baf24e7a", + "sha256:f8bb00ced04a8feff05989996db47906673ed45b11d86ad5ce892b5741e5f9dd", + "sha256:f98fc5750aac2d63d482909184aac72a979bfd123b112ec53fd365104ea15b1c", + "sha256:ff5b75f94101beaa373f1511319580a010f6e03458ee51b1a386d7de5331440a" + ], + "markers": "python_version >= '3.5'", + "version": "==0.15.2" + } + }, + "develop": { + "attrs": { + "hashes": [ + "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", + "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" + ], + "markers": "python_version >= '3.7'", + "version": "==23.1.0" + }, + "black": { + "hashes": [ + "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f", + "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7", + "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100", + "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573", + "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d", + "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f", + "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9", + "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300", + "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948", + "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325", + "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9", + "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71", + "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186", + "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f", + "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe", + "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855", + "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80", + "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393", + "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c", + "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204", + "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377", + "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==23.9.1" + }, + "click": { + "hashes": [ + "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1", + "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb" + ], + "markers": "python_version >= '3.6'", + "version": "==8.0.4" + }, + "flake8": { + "hashes": [ + "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23", + "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==6.1.0" + }, + "flake8-bugbear": { + "hashes": [ + "sha256:90cf04b19ca02a682feb5aac67cae8de742af70538590509941ab10ae8351f71", + "sha256:b182cf96ea8f7a8595b2f87321d7d9b28728f4d9c3318012d896543d19742cb5" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==23.9.16" + }, + "isort": { + "hashes": [ + "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504", + "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.0'", + "version": "==5.12.0" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "packaging": { + "hashes": [ + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" + ], + "markers": "python_version >= '3.7'", + "version": "==23.1" + }, + "pathspec": { + "hashes": [ + "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20", + "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3" + ], + "markers": "python_version >= '3.7'", + "version": "==0.11.2" + }, + "platformdirs": { + "hashes": [ + "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d", + "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d" + ], + "markers": "python_version >= '3.7'", + "version": "==3.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0", + "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8" + ], + "markers": "python_version >= '3.8'", + "version": "==2.11.0" + }, + "pyflakes": { + "hashes": [ + "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774", + "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc" + ], + "markers": "python_version >= '3.8'", + "version": "==3.1.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + }, + "typing-extensions": { + "hashes": [ + "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", + "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef" + ], + "markers": "python_version >= '3.8'", + "version": "==4.8.0" + } + } +} diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml new file mode 100644 index 000000000..c62fe11df --- /dev/null +++ b/.github/workflows/devcontainer.yml @@ -0,0 +1,49 @@ +name: "Build devcontainer" + +# Since building of containers can be quite time consuming, and take up some storage, +# there is no need to finish a build for a tag if new changes are concurrently being made. +# This cancels any previous builds for the same tag, and only the latest one will be kept. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +on: + push: + paths: + - ".devcontainer/**" + workflow_dispatch: + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: | + ghcr.io/citusdata/citus-devcontainer + tags: | + type=ref,event=branch + type=sha + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - + name: 'Login to GitHub Container Registry' + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{github.actor}} + password: ${{secrets.GITHUB_TOKEN}} + - + name: Build and push + uses: docker/build-push-action@v4 + with: + context: "{{defaultContext}}:.devcontainer" + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 34bbf780c..eaec55c3e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,6 +11,30 @@ sign a Contributor License Agreement (CLA). For an explanation of why we ask this as well as instructions for how to proceed, see the [Microsoft CLA](https://cla.opensource.microsoft.com/). +### Devcontainer / Github Codespaces + +The easiest way to start contributing is via our devcontainer. This container works both locally in visual studio code with docker-desktop/docker-for-mac as well as [Github Codespaces](https://github.com/features/codespaces). To open the project in vscode you will need the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers). For codespaces you will need to [create a new codespace](https://codespace.new/citusdata/citus). + +With the extension installed you can run the following from the command pallet to get started + +``` +> Dev Containers: Clone Repository in Container Volume... +``` + +In the subsequent popup paste the url to the repo and hit enter. + +``` +https://github.com/citusdata/citus +``` + +This will create an isolated Workspace in vscode, complete with all tools required to build, test and run the Citus extension. We keep this container up to date with the supported postgres versions as well as the exact versions of tooling we use. + +To quickly start we suggest splitting your terminal once to have two shells. The left one in the `/workspaces/citus`, the second one changed to `/data`. The left terminal will be used to interact with the project, the right one with a testing cluster. + +To get citus installed from source we run `make install -s` in the first terminal. Once installed you can start a Citus cluster in the second terminal via `citus_dev make citus`. The cluster will run in the background, and can be interacted with via `citus_dev`. To get an overview of the available commands. + +With the Citus cluster running you can connect to the coordinator in the first terminal via `psql -p9700`. Because the coordinator is the most common entrypoint the `PGPORT` environment is set accordingly, so a simple `psql` will connect directly to the coordinator. + ### Getting and building [PostgreSQL documentation](https://www.postgresql.org/support/versioning/) has a From e9035f6d32ae58405a3ecd64cd77cebe34ab7286 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emel=20=C5=9Eim=C5=9Fek?= Date: Mon, 9 Oct 2023 22:33:08 +0300 Subject: [PATCH 05/14] Send keepalive messages in split decoder periodically to avoid wal receiver timeouts during large shard splits. (#7229) DESCRIPTION: Send keepalive messages during the logical replication phase of large shard splits to avoid timeouts. During the logical replication part of the shard split process, split decoder filters out the wal records produced by the initial copy. If the number of wal records is big, then split decoder ends up processing for a long time before sending out any wal records through pgoutput. Hence the wal receiver may time out and restarts repeatedly causing our split driver code catch up logic to fail. Notes: 1. If the wal_receiver_timeout is set to a very small number e.g. 600ms, it may time out before receiving the keepalives. My tests show that this code works best when the` wal_receiver_timeout `is set to 1minute, which is the default value. 2. Once a logical replication worker time outs, a new one gets launched. The new logical replication worker sets the pg_stat_subscription columns to initial values. E.g. the latest_end_lsn is set to 0. Our driver logic in `WaitForGroupedLogicalRepTargetsToCatchUp` can not handle LSN value to go back. This is the main reason for it to get stuck in the infinite loop. --- .../shardsplit/shardsplit_decoder.c | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/backend/distributed/shardsplit/shardsplit_decoder.c b/src/backend/distributed/shardsplit/shardsplit_decoder.c index 1386a21b0..7145b4dfa 100644 --- a/src/backend/distributed/shardsplit/shardsplit_decoder.c +++ b/src/backend/distributed/shardsplit/shardsplit_decoder.c @@ -90,6 +90,46 @@ replication_origin_filter_cb(LogicalDecodingContext *ctx, RepOriginId origin_id) } +/* + * update_replication_progress is copied from Postgres 15. We use it to send keepalive + * messages when we are filtering out the wal changes resulting from the initial copy. + * If we do not send out messages long enough, wal reciever will time out. + * Postgres 16 has refactored this code such that keepalive messages are sent during + * reordering phase which is above change_cb. So we do not need to send keepalive in + * change_cb. + */ +#if (PG_VERSION_NUM < PG_VERSION_16) +static void +update_replication_progress(LogicalDecodingContext *ctx, bool skipped_xact) +{ + static int changes_count = 0; + + /* + * We don't want to try sending a keepalive message after processing each + * change as that can have overhead. Tests revealed that there is no + * noticeable overhead in doing it after continuously processing 100 or so + * changes. + */ +#define CHANGES_THRESHOLD 100 + + /* + * After continuously processing CHANGES_THRESHOLD changes, we + * try to send a keepalive message if required. + */ + if (ctx->end_xact || ++changes_count >= CHANGES_THRESHOLD) + { +#if (PG_VERSION_NUM >= PG_VERSION_15) + OutputPluginUpdateProgress(ctx, skipped_xact); +#else + OutputPluginUpdateProgress(ctx); +#endif + changes_count = 0; + } +} + + +#endif + /* * shard_split_change_cb function emits the incoming tuple change * to the appropriate destination shard. @@ -108,6 +148,12 @@ shard_split_change_cb(LogicalDecodingContext *ctx, ReorderBufferTXN *txn, return; } +#if (PG_VERSION_NUM < PG_VERSION_16) + + /* Send replication keepalive. */ + update_replication_progress(ctx, false); +#endif + /* check if the relation is publishable.*/ if (!is_publishable_relation(relation)) { From e0b0cdbb87de7aad7e00d2858b4a19ac496377d5 Mon Sep 17 00:00:00 2001 From: Gokhan Gulbiz Date: Tue, 10 Oct 2023 16:58:50 +0300 Subject: [PATCH 06/14] CircleCI to GHA migration (#7154) Co-authored-by: Hanefi Onaldi --- .circleci/config.yml | 3 - .github/actions/parallelization/action.yml | 23 + .../actions/save_logs_and_results/action.yml | 38 ++ .github/actions/setup_extension/action.yml | 35 ++ .github/actions/upload_coverage/action.yml | 27 + .github/workflows/build_and_test.yml | 474 ++++++++++++++++++ .github/workflows/flaky_test_debugging.yml | 79 +++ ci/build-citus.sh | 5 +- 8 files changed, 677 insertions(+), 7 deletions(-) create mode 100644 .github/actions/parallelization/action.yml create mode 100644 .github/actions/save_logs_and_results/action.yml create mode 100644 .github/actions/setup_extension/action.yml create mode 100644 .github/actions/upload_coverage/action.yml create mode 100644 .github/workflows/build_and_test.yml create mode 100644 .github/workflows/flaky_test_debugging.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 797a47cef..376c44331 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -698,7 +698,6 @@ jobs: workflows: version: 2 flaky_test_debugging: - when: << pipeline.parameters.flaky_test >> jobs: - build: name: build-flaky-15 @@ -714,8 +713,6 @@ workflows: runs: << pipeline.parameters.flaky_test_runs_per_job >> build_and_test: - when: - not: << pipeline.parameters.flaky_test >> jobs: - build: name: build-14 diff --git a/.github/actions/parallelization/action.yml b/.github/actions/parallelization/action.yml new file mode 100644 index 000000000..1f7d00202 --- /dev/null +++ b/.github/actions/parallelization/action.yml @@ -0,0 +1,23 @@ +name: 'Parallelization matrix' +inputs: + count: + required: false + default: 32 +outputs: + json: + value: ${{ steps.generate_matrix.outputs.json }} +runs: + using: "composite" + steps: + - name: Generate parallelization matrix + id: generate_matrix + shell: bash + run: |- + json_array="{\"include\": [" + for ((i = 1; i <= ${{ inputs.count }}; i++)); do + json_array+="{\"id\":\"$i\"}," + done + json_array=${json_array%,} + json_array+=" ]}" + echo "json=$json_array" >> "$GITHUB_OUTPUT" + echo "json=$json_array" diff --git a/.github/actions/save_logs_and_results/action.yml b/.github/actions/save_logs_and_results/action.yml new file mode 100644 index 000000000..0f238835d --- /dev/null +++ b/.github/actions/save_logs_and_results/action.yml @@ -0,0 +1,38 @@ +name: save_logs_and_results +inputs: + folder: + required: false + default: "log" +runs: + using: composite + steps: + - uses: actions/upload-artifact@v3.1.1 + name: Upload logs + with: + name: ${{ inputs.folder }} + if-no-files-found: ignore + path: | + src/test/**/proxy.output + src/test/**/results/ + src/test/**/tmp_check/master/log + src/test/**/tmp_check/worker.57638/log + src/test/**/tmp_check/worker.57637/log + src/test/**/*.diffs + src/test/**/out/ddls.sql + src/test/**/out/queries.sql + src/test/**/logfile_* + /tmp/pg_upgrade_newData_logs + - name: Publish regression.diffs + run: |- + diffs="$(find src/test/regress -name "*.diffs" -exec cat {} \;)" + if ! [ -z "$diffs" ]; then + echo '```diff' >> $GITHUB_STEP_SUMMARY + echo -E "$diffs" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + echo -E $diffs + fi + shell: bash + - name: Print stack traces + run: "./ci/print_stack_trace.sh" + if: failure() + shell: bash diff --git a/.github/actions/setup_extension/action.yml b/.github/actions/setup_extension/action.yml new file mode 100644 index 000000000..96b408e7e --- /dev/null +++ b/.github/actions/setup_extension/action.yml @@ -0,0 +1,35 @@ +name: setup_extension +inputs: + pg_major: + required: false + skip_installation: + required: false + default: false + type: boolean +runs: + using: composite + steps: + - name: Expose $PG_MAJOR to Github Env + run: |- + if [ -z "${{ inputs.pg_major }}" ]; then + echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV + else + echo "PG_MAJOR=${{ inputs.pg_major }}" >> $GITHUB_ENV + fi + shell: bash + - uses: actions/download-artifact@v3.0.1 + with: + name: build-${{ env.PG_MAJOR }} + - name: Install Extension + if: ${{ inputs.skip_installation == 'false' }} + run: tar xfv "install-$PG_MAJOR.tar" --directory / + shell: bash + - name: Configure + run: |- + chown -R circleci . + git config --global --add safe.directory ${GITHUB_WORKSPACE} + gosu circleci ./configure --without-pg-version-check + shell: bash + - name: Enable core dumps + run: ulimit -c unlimited + shell: bash diff --git a/.github/actions/upload_coverage/action.yml b/.github/actions/upload_coverage/action.yml new file mode 100644 index 000000000..0b5f581a6 --- /dev/null +++ b/.github/actions/upload_coverage/action.yml @@ -0,0 +1,27 @@ +name: coverage +inputs: + flags: + required: false + codecov_token: + required: true +runs: + using: composite + steps: + - uses: codecov/codecov-action@v3 + with: + flags: ${{ inputs.flags }} + token: ${{ inputs.codecov_token }} + verbose: true + gcov: true + - name: Create codeclimate coverage + run: |- + lcov --directory . --capture --output-file lcov.info + lcov --remove lcov.info -o lcov.info '/usr/*' + sed "s=^SF:$PWD/=SF:=g" -i lcov.info # relative pats are required by codeclimate + mkdir -p /tmp/codeclimate + cc-test-reporter format-coverage -t lcov -o /tmp/codeclimate/${{ inputs.flags }}.json lcov.info + shell: bash + - uses: actions/upload-artifact@v3.1.1 + with: + path: "/tmp/codeclimate/*.json" + name: codeclimate diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml new file mode 100644 index 000000000..90a4b1432 --- /dev/null +++ b/.github/workflows/build_and_test.yml @@ -0,0 +1,474 @@ +name: Build & Test +run-name: Build & Test - ${{ github.event.pull_request.title || github.ref_name }} +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +on: + workflow_dispatch: + inputs: + skip_test_flakyness: + required: false + default: false + type: boolean + pull_request: + types: [opened, reopened,synchronize] +jobs: + check-sql-snapshots: + runs-on: ubuntu-20.04 + container: + image: ${{ vars.build_image_name }}:latest + options: --user root + steps: + - uses: actions/checkout@v3.5.0 + - name: Check Snapshots + run: | + git config --global --add safe.directory ${GITHUB_WORKSPACE} + ci/check_sql_snapshots.sh + check-style: + runs-on: ubuntu-20.04 + container: + image: ${{ vars.style_checker_image_name }}:${{ vars.style_checker_tools_version }}${{ vars.image_suffix }} + steps: + - name: Check Snapshots + run: | + git config --global --add safe.directory ${GITHUB_WORKSPACE} + - uses: actions/checkout@v3.5.0 + with: + fetch-depth: 0 + - name: Check C Style + run: citus_indent --check + - name: Check Python style + run: black --check . + - name: Check Python import order + run: isort --check . + - name: Check Python lints + run: flake8 . + - name: Fix whitespace + run: ci/editorconfig.sh && git diff --exit-code + - name: Remove useless declarations + run: ci/remove_useless_declarations.sh && git diff --cached --exit-code + - name: Normalize test output + run: ci/normalize_expected.sh && git diff --exit-code + - name: Check for C-style comments in migration files + run: ci/disallow_c_comments_in_migrations.sh && git diff --exit-code + - name: 'Check for comment--cached ns that start with # character in spec files' + run: ci/disallow_hash_comments_in_spec_files.sh && git diff --exit-code + - name: Check for gitignore entries .for source files + run: ci/fix_gitignore.sh && git diff --exit-code + - name: Check for lengths of changelog entries + run: ci/disallow_long_changelog_entries.sh + - name: Check for banned C API usage + run: ci/banned.h.sh + - name: Check for tests missing in schedules + run: ci/check_all_tests_are_run.sh + - name: Check if all CI scripts are actually run + run: ci/check_all_ci_scripts_are_run.sh + - name: Check if all GUCs are sorted alphabetically + run: ci/check_gucs_are_alphabetically_sorted.sh + - name: Check for missing downgrade scripts + run: ci/check_migration_files.sh + build: + name: Build for PG ${{ matrix.pg_version}} + strategy: + fail-fast: false + matrix: + image_name: + - ${{ vars.build_image_name }} + image_suffix: + - ${{ vars.image_suffix}} + pg_version: + - ${{ vars.pg14_version }} + - ${{ vars.pg15_version }} + - ${{ vars.pg16_version }} + runs-on: ubuntu-20.04 + container: + image: "${{ matrix.image_name }}:${{ matrix.pg_version }}${{ matrix.image_suffix }}" + options: --user root + steps: + - uses: actions/checkout@v3.5.0 + - name: Expose $PG_MAJOR to Github Env + run: echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV + shell: bash + - name: Build + run: "./ci/build-citus.sh" + shell: bash + - uses: actions/upload-artifact@v3.1.1 + with: + name: build-${{ env.PG_MAJOR }} + path: |- + ./build-${{ env.PG_MAJOR }}/* + ./install-${{ env.PG_MAJOR }}.tar + test-citus: + name: PG${{ matrix.pg_version }} - ${{ matrix.make }} + strategy: + fail-fast: false + matrix: + suite: + - regress + image_name: + - ${{ vars.test_image_name }} + pg_version: + - ${{ vars.pg14_version }} + - ${{ vars.pg15_version }} + - ${{ vars.pg16_version }} + make: + - check-split + - check-multi + - check-multi-1 + - check-multi-mx + - check-vanilla + - check-isolation + - check-operations + - check-follower-cluster + - check-columnar + - check-columnar-isolation + - check-enterprise + - check-enterprise-isolation + - check-enterprise-isolation-logicalrep-1 + - check-enterprise-isolation-logicalrep-2 + - check-enterprise-isolation-logicalrep-3 + include: + - make: check-failure + pg_version: ${{ vars.pg14_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-failure + pg_version: ${{ vars.pg15_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-failure + pg_version: ${{ vars.pg16_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-enterprise-failure + pg_version: ${{ vars.pg14_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-enterprise-failure + pg_version: ${{ vars.pg15_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-enterprise-failure + pg_version: ${{ vars.pg16_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-pytest + pg_version: ${{ vars.pg14_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-pytest + pg_version: ${{ vars.pg15_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-pytest + pg_version: ${{ vars.pg16_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: installcheck + suite: cdc + image_name: ${{ vars.test_image_name }} + pg_version: ${{ vars.pg15_version }} + - make: installcheck + suite: cdc + image_name: ${{ vars.test_image_name }} + pg_version: ${{ vars.pg16_version }} + - make: check-query-generator + pg_version: ${{ vars.pg14_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-query-generator + pg_version: ${{ vars.pg15_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + - make: check-query-generator + pg_version: ${{ vars.pg16_version }} + suite: regress + image_name: ${{ vars.fail_test_image_name }} + runs-on: ubuntu-20.04 + container: + image: "${{ matrix.image_name }}:${{ matrix.pg_version }}${{ vars.image_suffix }}" + options: --user root --dns=8.8.8.8 + # Due to Github creates a default network for each job, we need to use + # --dns= to have similar DNS settings as our other CI systems or local + # machines. Otherwise, we may see different results. + needs: + - build + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/setup_extension" + - name: Run Test + run: gosu circleci make -C src/test/${{ matrix.suite }} ${{ matrix.make }} + timeout-minutes: 20 + - uses: "./.github/actions/save_logs_and_results" + if: always() + with: + folder: ${{ matrix.pg_version }}_${{ matrix.make }} + - uses: "./.github/actions/upload_coverage" + if: always() + with: + flags: ${{ env.PG_MAJOR }}_${{ matrix.suite }}_${{ matrix.make }} + codecov_token: ${{ secrets.CODECOV_TOKEN }} + test-arbitrary-configs: + name: PG${{ matrix.pg_version }} - check-arbitrary-configs-${{ matrix.parallel }} + runs-on: ["self-hosted", "1ES.Pool=1es-gha-citusdata-pool"] + container: + image: "${{ matrix.image_name }}:${{ matrix.pg_version }}${{ vars.image_suffix }}" + options: --user root + needs: + - build + strategy: + fail-fast: false + matrix: + image_name: + - ${{ vars.fail_test_image_name }} + pg_version: + - ${{ vars.pg14_version }} + - ${{ vars.pg15_version }} + - ${{ vars.pg16_version }} + parallel: [0,1,2,3,4,5] # workaround for running 6 parallel jobs + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/setup_extension" + - name: Test arbitrary configs + run: |- + # we use parallel jobs to split the tests into 6 parts and run them in parallel + # the script below extracts the tests for the current job + N=6 # Total number of jobs (see matrix.parallel) + X=${{ matrix.parallel }} # Current job number + TESTS=$(src/test/regress/citus_tests/print_test_names.py | + tr '\n' ',' | awk -v N="$N" -v X="$X" -F, '{ + split("", parts) + for (i = 1; i <= NF; i++) { + parts[i % N] = parts[i % N] $i "," + } + print substr(parts[X], 1, length(parts[X])-1) + }') + echo $TESTS + gosu circleci \ + make -C src/test/regress \ + check-arbitrary-configs parallel=4 CONFIGS=$TESTS + - uses: "./.github/actions/save_logs_and_results" + if: always() + - uses: "./.github/actions/upload_coverage" + if: always() + with: + flags: ${{ env.pg_major }}_upgrade + codecov_token: ${{ secrets.CODECOV_TOKEN }} + test-pg-upgrade: + name: PG${{ matrix.old_pg_major }}-PG${{ matrix.new_pg_major }} - check-pg-upgrade + runs-on: ubuntu-20.04 + container: + image: "${{ vars.pgupgrade_image_name }}:${{ vars.upgrade_pg_versions }}${{ vars.image_suffix }}" + options: --user root + needs: + - build + strategy: + fail-fast: false + matrix: + include: + - old_pg_major: 14 + new_pg_major: 15 + - old_pg_major: 15 + new_pg_major: 16 + - old_pg_major: 14 + new_pg_major: 16 + env: + old_pg_major: ${{ matrix.old_pg_major }} + new_pg_major: ${{ matrix.new_pg_major }} + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/setup_extension" + with: + pg_major: "${{ env.old_pg_major }}" + - uses: "./.github/actions/setup_extension" + with: + pg_major: "${{ env.new_pg_major }}" + - name: Install and test postgres upgrade + run: |- + gosu circleci \ + make -C src/test/regress \ + check-pg-upgrade \ + old-bindir=/usr/lib/postgresql/${{ env.old_pg_major }}/bin \ + new-bindir=/usr/lib/postgresql/${{ env.new_pg_major }}/bin + - name: Copy pg_upgrade logs for newData dir + run: |- + mkdir -p /tmp/pg_upgrade_newData_logs + if ls src/test/regress/tmp_upgrade/newData/*.log 1> /dev/null 2>&1; then + cp src/test/regress/tmp_upgrade/newData/*.log /tmp/pg_upgrade_newData_logs + fi + if: failure() + - uses: "./.github/actions/save_logs_and_results" + if: always() + - uses: "./.github/actions/upload_coverage" + if: always() + with: + flags: ${{ env.old_pg_major }}_${{ env.new_pg_major }}_upgrade + codecov_token: ${{ secrets.CODECOV_TOKEN }} + test-citus-upgrade: + name: PG${{ vars.pg14_version }} - check-citus-upgrade + runs-on: ubuntu-20.04 + container: + image: "${{ vars.citusupgrade_image_name }}:${{ vars.pg14_version }}${{ vars.image_suffix }}" + options: --user root + needs: + - build + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/setup_extension" + with: + skip_installation: true + - name: Install and test citus upgrade + run: |- + # run make check-citus-upgrade for all citus versions + # the image has ${CITUS_VERSIONS} set with all verions it contains the binaries of + for citus_version in ${CITUS_VERSIONS}; do \ + gosu circleci \ + make -C src/test/regress \ + check-citus-upgrade \ + bindir=/usr/lib/postgresql/${PG_MAJOR}/bin \ + citus-old-version=${citus_version} \ + citus-pre-tar=/install-pg${PG_MAJOR}-citus${citus_version}.tar \ + citus-post-tar=${GITHUB_WORKSPACE}/install-$PG_MAJOR.tar; \ + done; + # run make check-citus-upgrade-mixed for all citus versions + # the image has ${CITUS_VERSIONS} set with all verions it contains the binaries of + for citus_version in ${CITUS_VERSIONS}; do \ + gosu circleci \ + make -C src/test/regress \ + check-citus-upgrade-mixed \ + citus-old-version=${citus_version} \ + bindir=/usr/lib/postgresql/${PG_MAJOR}/bin \ + citus-pre-tar=/install-pg${PG_MAJOR}-citus${citus_version}.tar \ + citus-post-tar=${GITHUB_WORKSPACE}/install-$PG_MAJOR.tar; \ + done; + - uses: "./.github/actions/save_logs_and_results" + if: always() + - uses: "./.github/actions/upload_coverage" + if: always() + with: + flags: ${{ env.pg_major }}_upgrade + codecov_token: ${{ secrets.CODECOV_TOKEN }} + upload-coverage: + if: always() + env: + CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} + runs-on: ubuntu-20.04 + container: + image: ${{ vars.test_image_name }}:${{ vars.pg16_version }}${{ vars.image_suffix }} + needs: + - test-citus + - test-arbitrary-configs + - test-citus-upgrade + - test-pg-upgrade + steps: + - uses: actions/download-artifact@v3.0.1 + with: + name: "codeclimate" + path: "codeclimate" + - name: Upload coverage results to Code Climate + run: |- + cc-test-reporter sum-coverage codeclimate/*.json -o total.json + cc-test-reporter upload-coverage -i total.json + ch_benchmark: + name: CH Benchmark + if: startsWith(github.ref, 'refs/heads/ch_benchmark/') + runs-on: ubuntu-20.04 + needs: + - build + steps: + - uses: actions/checkout@v3.5.0 + - uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + - name: install dependencies and run ch_benchmark tests + uses: azure/CLI@v1 + with: + inlineScript: | + cd ./src/test/hammerdb + chmod +x run_hammerdb.sh + run_hammerdb.sh citusbot_ch_benchmark_rg + tpcc_benchmark: + name: TPCC Benchmark + if: startsWith(github.ref, 'refs/heads/tpcc_benchmark/') + runs-on: ubuntu-20.04 + needs: + - build + steps: + - uses: actions/checkout@v3.5.0 + - uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + - name: install dependencies and run tpcc_benchmark tests + uses: azure/CLI@v1 + with: + inlineScript: | + cd ./src/test/hammerdb + chmod +x run_hammerdb.sh + run_hammerdb.sh citusbot_tpcc_benchmark_rg + prepare_parallelization_matrix_32: + name: Parallel 32 + if: ${{ needs.test-flakyness-pre.outputs.tests != ''}} + needs: test-flakyness-pre + runs-on: ubuntu-20.04 + outputs: + json: ${{ steps.parallelization.outputs.json }} + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/parallelization" + id: parallelization + with: + count: 32 + test-flakyness-pre: + name: Detect regression tests need to be ran + if: ${{ !inputs.skip_test_flakyness }}} + runs-on: ubuntu-20.04 + needs: build + outputs: + tests: ${{ steps.detect-regression-tests.outputs.tests }} + steps: + - uses: actions/checkout@v3.5.0 + with: + fetch-depth: 0 + - name: Detect regression tests need to be ran + id: detect-regression-tests + run: |- + detected_changes=$(git diff origin/main... --name-only --diff-filter=AM | (grep 'src/test/regress/sql/.*\.sql\|src/test/regress/spec/.*\.spec\|src/test/regress/citus_tests/test/test_.*\.py' || true)) + tests=${detected_changes} + if [ -z "$tests" ]; then + echo "No test found." + else + echo "Detected tests " $tests + fi + echo tests="$tests" >> "$GITHUB_OUTPUT" + test-flakyness: + if: ${{ needs.test-flakyness-pre.outputs.tests != ''}} + name: Test flakyness + runs-on: ubuntu-20.04 + container: + image: ${{ vars.fail_test_image_name }}:${{ vars.pg16_version }}${{ vars.image_suffix }} + options: --user root + env: + runs: 8 + needs: + - build + - test-flakyness-pre + - prepare_parallelization_matrix_32 + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.prepare_parallelization_matrix_32.outputs.json) }} + steps: + - uses: actions/checkout@v3.5.0 + - uses: actions/download-artifact@v3.0.1 + - uses: "./.github/actions/setup_extension" + - name: Run minimal tests + run: |- + tests="${{ needs.test-flakyness-pre.outputs.tests }}" + tests_array=($tests) + for test in "${tests_array[@]}" + do + test_name=$(echo "$test" | sed -r "s/.+\/(.+)\..+/\1/") + gosu circleci src/test/regress/citus_tests/run_test.py $test_name --repeat ${{ env.runs }} --use-base-schedule --use-whole-schedule-line + done + shell: bash + - uses: "./.github/actions/save_logs_and_results" + if: always() diff --git a/.github/workflows/flaky_test_debugging.yml b/.github/workflows/flaky_test_debugging.yml new file mode 100644 index 000000000..a666c1cd5 --- /dev/null +++ b/.github/workflows/flaky_test_debugging.yml @@ -0,0 +1,79 @@ +name: Flaky test debugging +run-name: Flaky test debugging - ${{ inputs.flaky_test }} (${{ inputs.flaky_test_runs_per_job }}x${{ inputs.flaky_test_parallel_jobs }}) +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +on: + workflow_dispatch: + inputs: + flaky_test: + required: true + type: string + description: Test to run + flaky_test_runs_per_job: + required: false + default: 8 + type: number + description: Number of times to run the test + flaky_test_parallel_jobs: + required: false + default: 32 + type: number + description: Number of parallel jobs to run +jobs: + build: + name: Build Citus + runs-on: ubuntu-latest + container: + image: ${{ vars.build_image_name }}:${{ vars.pg15_version }}${{ vars.image_suffix }} + options: --user root + steps: + - uses: actions/checkout@v3.5.0 + - name: Configure, Build, and Install + run: | + echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV + ./ci/build-citus.sh + shell: bash + - uses: actions/upload-artifact@v3.1.1 + with: + name: build-${{ env.PG_MAJOR }} + path: |- + ./build-${{ env.PG_MAJOR }}/* + ./install-${{ env.PG_MAJOR }}.tar + prepare_parallelization_matrix: + name: Prepare parallelization matrix + runs-on: ubuntu-latest + outputs: + json: ${{ steps.parallelization.outputs.json }} + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/parallelization" + id: parallelization + with: + count: ${{ inputs.flaky_test_parallel_jobs }} + test_flakyness: + name: Test flakyness + runs-on: ubuntu-latest + container: + image: ${{ vars.fail_test_image_name }}:${{ vars.pg15_version }}${{ vars.image_suffix }} + options: --user root + needs: + [build, prepare_parallelization_matrix] + env: + test: "${{ inputs.flaky_test }}" + runs: "${{ inputs.flaky_test_runs_per_job }}" + skip: false + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.prepare_parallelization_matrix.outputs.json) }} + steps: + - uses: actions/checkout@v3.5.0 + - uses: "./.github/actions/setup_extension" + - name: Run minimal tests + run: |- + gosu circleci src/test/regress/citus_tests/run_test.py ${{ env.test }} --repeat ${{ env.runs }} --use-base-schedule --use-whole-schedule-line + shell: bash + - uses: "./.github/actions/save_logs_and_results" + if: always() + with: + folder: ${{ matrix.id }} diff --git a/ci/build-citus.sh b/ci/build-citus.sh index 49f92e691..678fd515c 100755 --- a/ci/build-citus.sh +++ b/ci/build-citus.sh @@ -15,9 +15,6 @@ PG_MAJOR=${PG_MAJOR:?please provide the postgres major version} codename=${VERSION#*(} codename=${codename%)*} -# get project from argument -project="${CIRCLE_PROJECT_REPONAME}" - # we'll do everything with absolute paths basedir="$(pwd)" @@ -28,7 +25,7 @@ build_ext() { pg_major="$1" builddir="${basedir}/build-${pg_major}" - echo "Beginning build of ${project} for PostgreSQL ${pg_major}..." >&2 + echo "Beginning build for PostgreSQL ${pg_major}..." >&2 # do everything in a subdirectory to avoid clutter in current directory mkdir -p "${builddir}" && cd "${builddir}" From fb08f9b1987aeedc42cb3e60ff57c71e70ad7dcf Mon Sep 17 00:00:00 2001 From: Nils Dijk Date: Thu, 12 Oct 2023 17:47:44 +0200 Subject: [PATCH 07/14] Remove software-properties-common from dev container after use (#7255) During the creation of the devcontainer we need to add a ppa repository, which is easiest done via software-properies-common. As turns out this installes pkexec into the container as a side effect. When vscode tries to attach a debugger it first checks if pkexec is installed as this gives a nicer popup asking for elevation of rights to attach to the process. However, since dev containers don't have a windowing system running pkexec isn't working as expected and thus prevents the debugger from attaching. Without pkexec in the container vscode 'falls back' to plain old sudo which we can run passwordless in the container. For pkexec to be removed we need to first purge software-propertied-common as well as autoremove all packages that were installed due to the installation of said package. By performing this all in one step we minimize the size of the layer we are creating. --- .devcontainer/Dockerfile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 6012dc851..1c1a2f083 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -36,6 +36,10 @@ RUN apt update && apt install -y \ && add-apt-repository ppa:deadsnakes/ppa -y \ && apt install -y \ python3.9-full \ + # software properties pulls in pkexec, which makes the debugger unusable in vscode + && apt purge -y \ + software-properties-common \ + && apt autoremove -y \ && apt clean RUN sudo pip3 install pipenv pipenv-shebang @@ -109,7 +113,7 @@ WORKDIR /uncrustify/uncrustify-uncrustify-0.68.1/ RUN mkdir build WORKDIR /uncrustify/uncrustify-uncrustify-0.68.1/build/ RUN cmake .. -RUN make -sj8 +RUN MAKEFLAGS="-j $(nproc)" make -s RUN make install DESTDIR=/uncrustify From 788e09a39aba40367241d92af4e2ab0d2950f3fd Mon Sep 17 00:00:00 2001 From: Jelte Fennema-Nio Date: Mon, 16 Oct 2023 11:38:24 +0200 Subject: [PATCH 08/14] Add a test for citus_shards where table names have spaces (#7224) There was a bug reported for previous versions of Citus where shard\_size was returning NULL for tables with spaces in them. It works fine on the main branch though, but I'm still adding a test for this to the main branch because it seems a good test to have. --- src/test/regress/citus_tests/run_test.py | 1 + src/test/regress/expected/citus_shards.out | 37 ++++++++++++++++++++++ src/test/regress/multi_1_schedule | 2 ++ src/test/regress/sql/citus_shards.sql | 17 ++++++++++ 4 files changed, 57 insertions(+) create mode 100644 src/test/regress/expected/citus_shards.out create mode 100644 src/test/regress/sql/citus_shards.sql diff --git a/src/test/regress/citus_tests/run_test.py b/src/test/regress/citus_tests/run_test.py index f1e1ec827..6528834ae 100755 --- a/src/test/regress/citus_tests/run_test.py +++ b/src/test/regress/citus_tests/run_test.py @@ -152,6 +152,7 @@ DEPS = { worker_count=6, ), "function_propagation": TestDeps("minimal_schedule"), + "citus_shards": TestDeps("minimal_schedule"), "grant_on_foreign_server_propagation": TestDeps("minimal_schedule"), "multi_modifying_xacts": TestDeps("minimal_schedule"), "multi_mx_modifying_xacts": TestDeps(None, ["multi_mx_create_table"]), diff --git a/src/test/regress/expected/citus_shards.out b/src/test/regress/expected/citus_shards.out new file mode 100644 index 000000000..b434a984b --- /dev/null +++ b/src/test/regress/expected/citus_shards.out @@ -0,0 +1,37 @@ +CREATE SCHEMA citus_shards; +SET search_path TO citus_shards; +SET citus.shard_count TO 4; +SET citus.shard_replication_factor TO 1; +SET citus.next_shard_id TO 99456900; +ALTER SEQUENCE pg_catalog.pg_dist_colocationid_seq RESTART 456900; +CREATE TABLE t1 (i int); +SELECT create_distributed_table('t1', 'i'); + create_distributed_table +--------------------------------------------------------------------- + +(1 row) + +CREATE TABLE "t with space" (i int); +SELECT create_distributed_table('"t with space"', 'i'); + create_distributed_table +--------------------------------------------------------------------- + +(1 row) + +INSERT INTO t1 SELECT generate_series(1, 100); +INSERT INTO "t with space" SELECT generate_series(1, 1000); +SELECT * FROM citus_shards; + table_name | shardid | shard_name | citus_table_type | colocation_id | nodename | nodeport | shard_size +--------------------------------------------------------------------- + "t with space" | 99456904 | citus_shards."t with space_99456904" | distributed | 456900 | localhost | 57637 | 40960 + "t with space" | 99456905 | citus_shards."t with space_99456905" | distributed | 456900 | localhost | 57638 | 40960 + "t with space" | 99456906 | citus_shards."t with space_99456906" | distributed | 456900 | localhost | 57637 | 40960 + "t with space" | 99456907 | citus_shards."t with space_99456907" | distributed | 456900 | localhost | 57638 | 40960 + t1 | 99456900 | citus_shards.t1_99456900 | distributed | 456900 | localhost | 57637 | 8192 + t1 | 99456901 | citus_shards.t1_99456901 | distributed | 456900 | localhost | 57638 | 8192 + t1 | 99456902 | citus_shards.t1_99456902 | distributed | 456900 | localhost | 57637 | 8192 + t1 | 99456903 | citus_shards.t1_99456903 | distributed | 456900 | localhost | 57638 | 8192 +(8 rows) + +SET client_min_messages TO WARNING; +DROP SCHEMA citus_shards CASCADE; diff --git a/src/test/regress/multi_1_schedule b/src/test/regress/multi_1_schedule index 4dead5be3..ad70f136e 100644 --- a/src/test/regress/multi_1_schedule +++ b/src/test/regress/multi_1_schedule @@ -53,6 +53,8 @@ test: multi_read_from_secondaries test: grant_on_database_propagation test: alter_database_propagation +test: citus_shards + # ---------- # multi_citus_tools tests utility functions written for citus tools # ---------- diff --git a/src/test/regress/sql/citus_shards.sql b/src/test/regress/sql/citus_shards.sql new file mode 100644 index 000000000..9234ffd2e --- /dev/null +++ b/src/test/regress/sql/citus_shards.sql @@ -0,0 +1,17 @@ +CREATE SCHEMA citus_shards; +SET search_path TO citus_shards; +SET citus.shard_count TO 4; +SET citus.shard_replication_factor TO 1; +SET citus.next_shard_id TO 99456900; +ALTER SEQUENCE pg_catalog.pg_dist_colocationid_seq RESTART 456900; + +CREATE TABLE t1 (i int); +SELECT create_distributed_table('t1', 'i'); +CREATE TABLE "t with space" (i int); +SELECT create_distributed_table('"t with space"', 'i'); +INSERT INTO t1 SELECT generate_series(1, 100); +INSERT INTO "t with space" SELECT generate_series(1, 1000); +SELECT * FROM citus_shards; + +SET client_min_messages TO WARNING; +DROP SCHEMA citus_shards CASCADE; From 5eaf6c221e6c13b89c7e85c8d65ca7cd793dda92 Mon Sep 17 00:00:00 2001 From: Onur Tirtir Date: Mon, 16 Oct 2023 14:20:55 +0300 Subject: [PATCH 09/14] Fix flaky test detection job (#7256) We were getting such errors in flaky-test detection job: ``` Unable to process file command 'output' successfully ``` Even though we don't seem to be writing multiple lines to $GITHUB_OUTPUT, this seems to be the right fix. https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings --- .github/workflows/build_and_test.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 90a4b1432..5944c38db 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -439,7 +439,10 @@ jobs: else echo "Detected tests " $tests fi - echo tests="$tests" >> "$GITHUB_OUTPUT" + + echo 'tests<> $GITHUB_OUTPUT + echo "$tests" >> "$GITHUB_OUTPUT" + echo 'EOF' >> $GITHUB_OUTPUT test-flakyness: if: ${{ needs.test-flakyness-pre.outputs.tests != ''}} name: Test flakyness From 71a4633dad767154668975e4482c08d493b685cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=BCrkan=20=C4=B0ndibay?= Date: Tue, 17 Oct 2023 16:39:37 +0300 Subject: [PATCH 10/14] Fixes typo and renames multi_process_utility (#7259) --- src/backend/distributed/commands/index.c | 6 +-- .../distributed/commands/utility_hook.c | 40 +++++++++---------- src/backend/distributed/commands/vacuum.c | 4 +- .../executor/executor_util_tasks.c | 4 +- src/backend/distributed/shared_library_init.c | 2 +- .../distributed/utils/citus_copyfuncs.c | 2 +- .../distributed/utils/citus_outfuncs.c | 2 +- .../distributed/commands/utility_hook.h | 2 +- .../distributed/multi_physical_planner.h | 2 +- 9 files changed, 32 insertions(+), 32 deletions(-) diff --git a/src/backend/distributed/commands/index.c b/src/backend/distributed/commands/index.c index 8271cc4f4..275f253b3 100644 --- a/src/backend/distributed/commands/index.c +++ b/src/backend/distributed/commands/index.c @@ -938,7 +938,7 @@ CreateIndexTaskList(IndexStmt *indexStmt) task->dependentTaskList = NULL; task->anchorShardId = shardId; task->taskPlacementList = ActiveShardPlacementList(shardId); - task->cannotBeExecutedInTransction = indexStmt->concurrent; + task->cannotBeExecutedInTransaction = indexStmt->concurrent; taskList = lappend(taskList, task); @@ -983,7 +983,7 @@ CreateReindexTaskList(Oid relationId, ReindexStmt *reindexStmt) task->dependentTaskList = NULL; task->anchorShardId = shardId; task->taskPlacementList = ActiveShardPlacementList(shardId); - task->cannotBeExecutedInTransction = + task->cannotBeExecutedInTransaction = IsReindexWithParam_compat(reindexStmt, "concurrently"); taskList = lappend(taskList, task); @@ -1309,7 +1309,7 @@ DropIndexTaskList(Oid relationId, Oid indexId, DropStmt *dropStmt) task->dependentTaskList = NULL; task->anchorShardId = shardId; task->taskPlacementList = ActiveShardPlacementList(shardId); - task->cannotBeExecutedInTransction = dropStmt->concurrent; + task->cannotBeExecutedInTransaction = dropStmt->concurrent; taskList = lappend(taskList, task); diff --git a/src/backend/distributed/commands/utility_hook.c b/src/backend/distributed/commands/utility_hook.c index cf8e0644e..579b6979e 100644 --- a/src/backend/distributed/commands/utility_hook.c +++ b/src/backend/distributed/commands/utility_hook.c @@ -95,13 +95,13 @@ int UtilityHookLevel = 0; /* Local functions forward declarations for helper functions */ -static void ProcessUtilityInternal(PlannedStmt *pstmt, - const char *queryString, - ProcessUtilityContext context, - ParamListInfo params, - struct QueryEnvironment *queryEnv, - DestReceiver *dest, - QueryCompletion *completionTag); +static void citus_ProcessUtilityInternal(PlannedStmt *pstmt, + const char *queryString, + ProcessUtilityContext context, + ParamListInfo params, + struct QueryEnvironment *queryEnv, + DestReceiver *dest, + QueryCompletion *completionTag); static void set_indexsafe_procflags(void); static char * CurrentSearchPath(void); static void IncrementUtilityHookCountersIfNecessary(Node *parsetree); @@ -130,7 +130,7 @@ ProcessUtilityParseTree(Node *node, const char *queryString, ProcessUtilityConte /* - * multi_ProcessUtility is the main entry hook for implementing Citus-specific + * citus_ProcessUtility is the main entry hook for implementing Citus-specific * utility behavior. Its primary responsibilities are intercepting COPY and DDL * commands and augmenting the coordinator's command with corresponding tasks * to be run on worker nodes, after suitably ensuring said commands' options @@ -139,7 +139,7 @@ ProcessUtilityParseTree(Node *node, const char *queryString, ProcessUtilityConte * TRUNCATE and VACUUM are also supported. */ void -multi_ProcessUtility(PlannedStmt *pstmt, +citus_ProcessUtility(PlannedStmt *pstmt, const char *queryString, bool readOnlyTree, ProcessUtilityContext context, @@ -329,8 +329,8 @@ multi_ProcessUtility(PlannedStmt *pstmt, PG_TRY(); { - ProcessUtilityInternal(pstmt, queryString, context, params, queryEnv, dest, - completionTag); + citus_ProcessUtilityInternal(pstmt, queryString, context, params, queryEnv, dest, + completionTag); if (UtilityHookLevel == 1) { @@ -404,7 +404,7 @@ multi_ProcessUtility(PlannedStmt *pstmt, /* - * ProcessUtilityInternal is a helper function for multi_ProcessUtility where majority + * citus_ProcessUtilityInternal is a helper function for citus_ProcessUtility where majority * of the Citus specific utility statements are handled here. The distinction between * both functions is that Citus_ProcessUtility does not handle CALL and DO statements. * The reason for the distinction is implemented to be able to find the "top-level" DDL @@ -412,13 +412,13 @@ multi_ProcessUtility(PlannedStmt *pstmt, * this goal. */ static void -ProcessUtilityInternal(PlannedStmt *pstmt, - const char *queryString, - ProcessUtilityContext context, - ParamListInfo params, - struct QueryEnvironment *queryEnv, - DestReceiver *dest, - QueryCompletion *completionTag) +citus_ProcessUtilityInternal(PlannedStmt *pstmt, + const char *queryString, + ProcessUtilityContext context, + ParamListInfo params, + struct QueryEnvironment *queryEnv, + DestReceiver *dest, + QueryCompletion *completionTag) { Node *parsetree = pstmt->utilityStmt; List *ddlJobs = NIL; @@ -1386,7 +1386,7 @@ PostStandardProcessUtility(Node *parsetree) * on the local table first. However, in order to decide whether the * command leads to an invalidation, we need to check before the command * is being executed since we read pg_constraint table. Thus, we maintain a - * local flag and do the invalidation after multi_ProcessUtility, + * local flag and do the invalidation after citus_ProcessUtility, * before ExecuteDistributedDDLJob(). */ InvalidateForeignKeyGraphForDDL(); diff --git a/src/backend/distributed/commands/vacuum.c b/src/backend/distributed/commands/vacuum.c index ee03aeae1..21638ba7f 100644 --- a/src/backend/distributed/commands/vacuum.c +++ b/src/backend/distributed/commands/vacuum.c @@ -279,7 +279,7 @@ VacuumTaskList(Oid relationId, CitusVacuumParams vacuumParams, List *vacuumColum task->replicationModel = REPLICATION_MODEL_INVALID; task->anchorShardId = shardId; task->taskPlacementList = ActiveShardPlacementList(shardId); - task->cannotBeExecutedInTransction = ((vacuumParams.options) & VACOPT_VACUUM); + task->cannotBeExecutedInTransaction = ((vacuumParams.options) & VACOPT_VACUUM); taskList = lappend(taskList, task); } @@ -719,7 +719,7 @@ ExecuteUnqualifiedVacuumTasks(VacuumStmt *vacuumStmt, CitusVacuumParams vacuumPa SetTaskQueryStringList(task, unqualifiedVacuumCommands); task->dependentTaskList = NULL; task->replicationModel = REPLICATION_MODEL_INVALID; - task->cannotBeExecutedInTransction = ((vacuumParams.options) & VACOPT_VACUUM); + task->cannotBeExecutedInTransaction = ((vacuumParams.options) & VACOPT_VACUUM); bool hasPeerWorker = false; diff --git a/src/backend/distributed/executor/executor_util_tasks.c b/src/backend/distributed/executor/executor_util_tasks.c index abf721196..483fd55a7 100644 --- a/src/backend/distributed/executor/executor_util_tasks.c +++ b/src/backend/distributed/executor/executor_util_tasks.c @@ -61,7 +61,7 @@ TaskListRequiresRollback(List *taskList) } Task *task = (Task *) linitial(taskList); - if (task->cannotBeExecutedInTransction) + if (task->cannotBeExecutedInTransaction) { /* vacuum, create index concurrently etc. */ return false; @@ -164,7 +164,7 @@ TaskListCannotBeExecutedInTransaction(List *taskList) Task *task = NULL; foreach_ptr(task, taskList) { - if (task->cannotBeExecutedInTransction) + if (task->cannotBeExecutedInTransaction) { return true; } diff --git a/src/backend/distributed/shared_library_init.c b/src/backend/distributed/shared_library_init.c index e5d593295..1ac20c8bc 100644 --- a/src/backend/distributed/shared_library_init.c +++ b/src/backend/distributed/shared_library_init.c @@ -543,7 +543,7 @@ _PG_init(void) */ PrevProcessUtility = (ProcessUtility_hook != NULL) ? ProcessUtility_hook : standard_ProcessUtility; - ProcessUtility_hook = multi_ProcessUtility; + ProcessUtility_hook = citus_ProcessUtility; /* * Acquire symbols for columnar functions that citus calls. diff --git a/src/backend/distributed/utils/citus_copyfuncs.c b/src/backend/distributed/utils/citus_copyfuncs.c index 7e1379ef3..fe4429f04 100644 --- a/src/backend/distributed/utils/citus_copyfuncs.c +++ b/src/backend/distributed/utils/citus_copyfuncs.c @@ -326,7 +326,7 @@ CopyNodeTask(COPYFUNC_ARGS) COPY_STRING_FIELD(fetchedExplainAnalyzePlan); COPY_SCALAR_FIELD(fetchedExplainAnalyzeExecutionDuration); COPY_SCALAR_FIELD(isLocalTableModification); - COPY_SCALAR_FIELD(cannotBeExecutedInTransction); + COPY_SCALAR_FIELD(cannotBeExecutedInTransaction); } diff --git a/src/backend/distributed/utils/citus_outfuncs.c b/src/backend/distributed/utils/citus_outfuncs.c index b4062751a..9b4ac809c 100644 --- a/src/backend/distributed/utils/citus_outfuncs.c +++ b/src/backend/distributed/utils/citus_outfuncs.c @@ -535,7 +535,7 @@ OutTask(OUTFUNC_ARGS) WRITE_STRING_FIELD(fetchedExplainAnalyzePlan); WRITE_FLOAT_FIELD(fetchedExplainAnalyzeExecutionDuration, "%.2f"); WRITE_BOOL_FIELD(isLocalTableModification); - WRITE_BOOL_FIELD(cannotBeExecutedInTransction); + WRITE_BOOL_FIELD(cannotBeExecutedInTransaction); } diff --git a/src/include/distributed/commands/utility_hook.h b/src/include/distributed/commands/utility_hook.h index f02f83fe3..34b2945ac 100644 --- a/src/include/distributed/commands/utility_hook.h +++ b/src/include/distributed/commands/utility_hook.h @@ -78,7 +78,7 @@ typedef struct DDLJob extern ProcessUtility_hook_type PrevProcessUtility; -extern void multi_ProcessUtility(PlannedStmt *pstmt, const char *queryString, +extern void citus_ProcessUtility(PlannedStmt *pstmt, const char *queryString, bool readOnlyTree, ProcessUtilityContext context, ParamListInfo params, struct QueryEnvironment *queryEnv, DestReceiver *dest, diff --git a/src/include/distributed/multi_physical_planner.h b/src/include/distributed/multi_physical_planner.h index b7acc0574..35d83eb33 100644 --- a/src/include/distributed/multi_physical_planner.h +++ b/src/include/distributed/multi_physical_planner.h @@ -329,7 +329,7 @@ typedef struct Task /* * Vacuum, create/drop/reindex concurrently cannot be executed in a transaction. */ - bool cannotBeExecutedInTransction; + bool cannotBeExecutedInTransaction; Const *partitionKeyValue; int colocationId; From db13afaa7b77777003b7a724a01fdb0f5df481d1 Mon Sep 17 00:00:00 2001 From: Onur Tirtir Date: Tue, 17 Oct 2023 16:58:17 +0300 Subject: [PATCH 11/14] Fix flaky columnar_create.sql test (#7266) --- src/test/regress/expected/columnar_create.out | 43 +++++++++---------- src/test/regress/sql/columnar_create.sql | 32 +++++++++----- 2 files changed, 43 insertions(+), 32 deletions(-) diff --git a/src/test/regress/expected/columnar_create.out b/src/test/regress/expected/columnar_create.out index 73b891177..a134fd063 100644 --- a/src/test/regress/expected/columnar_create.out +++ b/src/test/regress/expected/columnar_create.out @@ -178,32 +178,31 @@ SELECT columnar_test_helpers.columnar_metadata_has_storage_id(:columnar_table_1_ CREATE TEMPORARY TABLE columnar_temp(i int) USING columnar; -- reserve some chunks and a stripe INSERT INTO columnar_temp SELECT i FROM generate_series(1,5) i; -SELECT columnar.get_storage_id(oid) AS columnar_temp_storage_id -FROM pg_class WHERE relname='columnar_temp' \gset -SELECT pg_backend_pid() AS val INTO old_backend_pid; +SELECT columnar.get_storage_id(oid) as oid INTO columnar_temp_storage_id +FROM pg_class WHERE relname='columnar_temp'; \c - - - :master_port SET search_path TO columnar_create; --- wait until old backend to expire to make sure that temp table cleanup is complete -SELECT columnar_test_helpers.pg_waitpid(val) FROM old_backend_pid; - pg_waitpid ---------------------------------------------------------------------- +-- wait until temporary table and its metadata is removed +DO $$ +DECLARE + loop_wait_count integer := 0; +BEGIN + WHILE ( + (SELECT COUNT(*) > 0 FROM pg_class WHERE relname='columnar_temp') OR + (SELECT columnar_test_helpers.columnar_metadata_has_storage_id(oid) FROM columnar_temp_storage_id) + ) + LOOP + IF loop_wait_count > 1000 THEN + RAISE EXCEPTION 'Timeout while waiting for temporary table to be dropped'; + END IF; -(1 row) - -DROP TABLE old_backend_pid; --- show that temporary table itself and its metadata is removed -SELECT COUNT(*)=0 FROM pg_class WHERE relname='columnar_temp'; - ?column? ---------------------------------------------------------------------- - t -(1 row) - -SELECT columnar_test_helpers.columnar_metadata_has_storage_id(:columnar_temp_storage_id); - columnar_metadata_has_storage_id ---------------------------------------------------------------------- - f -(1 row) + PERFORM pg_sleep(0.001); + loop_wait_count := loop_wait_count + 1; + END LOOP; +END; +$$ language plpgsql; +DROP TABLE columnar_temp_storage_id; -- connect to another session and create a temp table with same name CREATE TEMPORARY TABLE columnar_temp(i int) USING columnar; -- reserve some chunks and a stripe diff --git a/src/test/regress/sql/columnar_create.sql b/src/test/regress/sql/columnar_create.sql index 408ce126e..a0708aeac 100644 --- a/src/test/regress/sql/columnar_create.sql +++ b/src/test/regress/sql/columnar_create.sql @@ -136,22 +136,34 @@ CREATE TEMPORARY TABLE columnar_temp(i int) USING columnar; -- reserve some chunks and a stripe INSERT INTO columnar_temp SELECT i FROM generate_series(1,5) i; -SELECT columnar.get_storage_id(oid) AS columnar_temp_storage_id -FROM pg_class WHERE relname='columnar_temp' \gset - -SELECT pg_backend_pid() AS val INTO old_backend_pid; +SELECT columnar.get_storage_id(oid) as oid INTO columnar_temp_storage_id +FROM pg_class WHERE relname='columnar_temp'; \c - - - :master_port SET search_path TO columnar_create; --- wait until old backend to expire to make sure that temp table cleanup is complete -SELECT columnar_test_helpers.pg_waitpid(val) FROM old_backend_pid; +-- wait until temporary table and its metadata is removed +DO $$ +DECLARE + loop_wait_count integer := 0; +BEGIN + WHILE ( + (SELECT COUNT(*) > 0 FROM pg_class WHERE relname='columnar_temp') OR + (SELECT columnar_test_helpers.columnar_metadata_has_storage_id(oid) FROM columnar_temp_storage_id) + ) + LOOP + IF loop_wait_count > 1000 THEN + RAISE EXCEPTION 'Timeout while waiting for temporary table to be dropped'; + END IF; -DROP TABLE old_backend_pid; + PERFORM pg_sleep(0.001); --- show that temporary table itself and its metadata is removed -SELECT COUNT(*)=0 FROM pg_class WHERE relname='columnar_temp'; -SELECT columnar_test_helpers.columnar_metadata_has_storage_id(:columnar_temp_storage_id); + loop_wait_count := loop_wait_count + 1; + END LOOP; +END; +$$ language plpgsql; + +DROP TABLE columnar_temp_storage_id; -- connect to another session and create a temp table with same name CREATE TEMPORARY TABLE columnar_temp(i int) USING columnar; From 2d1444188c9021d8a424b638e9aa77343f28cf9c Mon Sep 17 00:00:00 2001 From: zhjwpku Date: Wed, 18 Oct 2023 16:53:00 +0800 Subject: [PATCH 12/14] Fix wrong comments around HasDistributionKey() (#7223) HasDistributionKey & HasDistributionKeyCacheEntry returns true when the corresponding table has a distribution key, the comments state the opposite, which should be fixed. Signed-off-by: Zhao Junwang Co-authored-by: Onur Tirtir --- src/backend/distributed/metadata/metadata_cache.c | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/backend/distributed/metadata/metadata_cache.c b/src/backend/distributed/metadata/metadata_cache.c index 55d0f11c5..85a945308 100644 --- a/src/backend/distributed/metadata/metadata_cache.c +++ b/src/backend/distributed/metadata/metadata_cache.c @@ -521,8 +521,7 @@ IsCitusTableTypeCacheEntry(CitusTableCacheEntry *tableEntry, CitusTableType tabl /* - * HasDistributionKey returs true if given Citus table doesn't have a - * distribution key. + * HasDistributionKey returns true if given Citus table has a distribution key. */ bool HasDistributionKey(Oid relationId) @@ -538,8 +537,8 @@ HasDistributionKey(Oid relationId) /* - * HasDistributionKey returs true if given cache entry identifies a Citus - * table that doesn't have a distribution key. + * HasDistributionKeyCacheEntry returns true if given cache entry identifies a + * Citus table that has a distribution key. */ bool HasDistributionKeyCacheEntry(CitusTableCacheEntry *tableEntry) From 1fe16fa7464033b16064b87be067ed30ccd90d02 Mon Sep 17 00:00:00 2001 From: Naisila Puka <37271756+naisila@users.noreply.github.com> Date: Mon, 23 Oct 2023 13:01:48 +0300 Subject: [PATCH 13/14] Remove unnecessary pre-fastpath code (#7262) This code was here because we first implemented `fast path planner` via [#2606](https://github.com/citusdata/citus/pull/2606) and then later `deferred pruning` [#3369](https://github.com/citusdata/citus/pull/3369) So, for some years, this code was useful. --- .../planner/multi_router_planner.c | 22 +++---------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/src/backend/distributed/planner/multi_router_planner.c b/src/backend/distributed/planner/multi_router_planner.c index 0d7a0de78..e70de5bbd 100644 --- a/src/backend/distributed/planner/multi_router_planner.c +++ b/src/backend/distributed/planner/multi_router_planner.c @@ -2324,27 +2324,11 @@ PlanRouterQuery(Query *originalQuery, TargetShardIntervalForFastPathQuery(originalQuery, &isMultiShardQuery, distributionKeyValue, partitionValueConst); - - /* - * This could only happen when there is a parameter on the distribution key. - * We defer error here, later the planner is forced to use a generic plan - * by assigning arbitrarily high cost to the plan. - */ - if (UpdateOrDeleteOrMergeQuery(originalQuery) && isMultiShardQuery) - { - planningError = DeferredError(ERRCODE_FEATURE_NOT_SUPPORTED, - "Router planner cannot handle multi-shard " - "modify queries", NULL, NULL); - return planningError; - } + Assert(!isMultiShardQuery); *prunedShardIntervalListList = shardIntervalList; - - if (!isMultiShardQuery) - { - ereport(DEBUG2, (errmsg("Distributed planning for a fast-path router " - "query"))); - } + ereport(DEBUG2, (errmsg("Distributed planning for a fast-path router " + "query"))); } else { From 10198b18e84e78478b009c3da8ecbb89aaedb474 Mon Sep 17 00:00:00 2001 From: Naisila Puka <37271756+naisila@users.noreply.github.com> Date: Mon, 23 Oct 2023 13:43:43 +0300 Subject: [PATCH 14/14] Technical readme small fixes (#7261) --- src/backend/distributed/README.md | 33 ++++++++++--------- .../planner/fast_path_router_planner.c | 2 +- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/src/backend/distributed/README.md b/src/backend/distributed/README.md index 0a3164e0f..7c4f43add 100644 --- a/src/backend/distributed/README.md +++ b/src/backend/distributed/README.md @@ -245,6 +245,7 @@ CREATE TABLE country_codes ( country_code VARCHAR(3) PRIMARY KEY, country_name VARCHAR(50) ); +SELECT create_reference_table('country_codes'); -- Reference Table: Order Status CREATE TABLE order_status ( @@ -269,14 +270,17 @@ The aim of this planner is to avoid relying on PostgreSQL's standard_planner() f ### Main C Functions Involved: -- `FastPathRouterPlan()`: The primary function for creating the fast-path query plan. +- `FastPathPlanner()`: The primary function for creating the fast-path query plan. - `FastPathRouterQuery()`: Validates if a query is eligible for fast-path routing by checking its structure and the WHERE clause. With set client_min_messages to debug4; you should see the following in the DEBUG messages: "DEBUG: Distributed planning for a fast-path router query" ```sql -- Fetches the count of users born in the same year, but only --- for a single country +-- for a single country, with a filter on the distribution column +-- Normally we have a single user with id = 15 because it's a PRIMARY KEY +-- this is just to demonstrate that fast-path can handle complex queries +-- with EXTRACT(), COUNT(), GROUP BY, HAVING, etc. SELECT EXTRACT(YEAR FROM date_of_birth) as birth_year, COUNT(*) FROM users_table WHERE country_code = 'USA' AND user_id = 15 @@ -382,11 +386,10 @@ FROM users_table u, orders_table o WHERE u.user_id = o.user_id AND u.user_id = 42; -- With Subqueries: - -- Fetch the username and their total order amount -- for a specific user SELECT u.username, - (SELECT MAX(o.product_id) FROM orders_table o + (SELECT COUNT(*) FROM orders_table o WHERE o.user_id = 42 AND o.user_id = u.user_id) FROM users_table u @@ -692,7 +695,7 @@ Assume that there are two subqueries; each subquery is individually joined on th -- The join condition between them is: sub1.user_id != sub2.user_id, which does not preserve distribution key equality. -- Citus qualifies sub1 as the anchor subquery and checks whether all other subqueries are joined on the distribution key. -- In this case, sub2 is not joined on the distribution key, so Citus decides to recursively plan the whole sub2. -SELECT a.user_id, b.user_id +SELECT sub1.user_id, sub2.user_id FROM ( SELECT u.user_id FROM users_table u @@ -884,7 +887,7 @@ Citus has a rules-based optimizer. The core function `MultiLogicalPlanCreate()` For instance, one simple optimization pushes the "filter" operation below the "MultiCollect." Such rules are defined in the function `Commutative()` in `multi_logical_optimizer.c`. -The most interesting part of the optimizer is usually in the final stage, when handling the more complex operators (GROUP BY, DISTINCT window functions, ORDER BY, aggregates). These operators are conjoined in a `MultiExtendedOpNode`. In many cases, they can only partially be pushed down into the worker nodes, which results in one `MultiExtendedOpNode` above the `MultiCollection` (which will run on the coordinator and aggregates across worker nodes), and another `MultiExtendedOpNode` below the `MultiCollect` (which will be pushed down to worker nodes). The bulk of the logic for generating the two nodes lives in `MasterExtendedOpNode()` and `WorkerExtendedOpNode()`, respectively. +The most interesting part of the optimizer is usually in the final stage, when handling the more complex operators (GROUP BY, DISTINCT window functions, ORDER BY, aggregates). These operators are conjoined in a `MultiExtendedOpNode`. In many cases, they can only partially be pushed down into the worker nodes, which results in one `MultiExtendedOpNode` above the `MultiCollect` (which will run on the coordinator and aggregates across worker nodes), and another `MultiExtendedOpNode` below the `MultiCollect` (which will be pushed down to worker nodes). The bulk of the logic for generating the two nodes lives in `MasterExtendedOpNode()` and `WorkerExtendedOpNode()`, respectively. ##### Aggregate functions @@ -1034,8 +1037,8 @@ SELECT * FROM cte_1; -- but as the same cte used twice -- Citus converts the CTE to intermediate result WITH cte_1 AS (SELECT DISTINCT user_id FROM orders_table) -SELECT * FROM cte_1 as c1 JOIN - cte_1 as c2 USING (user_id); +SELECT * FROM cte_1 as c1 + JOIN cte_1 as c2 USING (user_id); ``` - **Citus Specific Materialization**: @@ -1051,8 +1054,7 @@ As of writing this document, Citus does NOT support ```sql WITH users_that_have_orders AS (SELECT users_table.* FROM users_table JOIN orders_table USING (user_id)) -SELECT - max(date_of_birth) +SELECT max(date_of_birth) FROM users_that_have_orders GROUP BY GROUPING SETS (user_id, email); ... @@ -1099,7 +1101,7 @@ INSERT INTO orders_table (order_id, user_id) VALUES ``` **Debug Info**: - Debug information shows how the query is rebuilt for different user_ids. + Debug information shows how the query is rebuilt for different user_ids. Here, the shard_count is 4. ```sql -- for user_id: 1 DEBUG: query after rebuilding: INSERT INTO public.orders_table_102041 AS citus_table_alias (order_id, user_id) VALUES ('1'::bigint,'1'::bigint), ('3'::bigint,'1'::bigint) @@ -1133,7 +1135,7 @@ DEBUG: query after rebuilding: INSERT INTO public.orders_table_102064 AS citus **Examples**: The following section will delve into examples, starting with simple ones and moving to more complex scenarios. -### INSERT.. SELECT Advanced Scenarios +### INSERT.. SELECT Query Planning **Overview**: The `INSERT .. SELECT` pushdown logic builds upon the pushdown planning for `SELECT` commands. The key requirements include colocated tables and matching distribution columns. Relevant C functions are `CreateDistributedInsertSelectPlan`, `DistributedInsertSelectSupported()`, and `AllDistributionKeysInQueryAreEqual`. @@ -1267,7 +1269,7 @@ WHERE user_id IN (SELECT user_id FROM high_value_users); Used for more complex queries, like those with subqueries or joins that can't be pushed down. The queries are planned recursively. ```sql DELETE FROM users_table WHERE user_id -IN (SELECT user_id FROM orders_table WHERE total > 100 ORDER BY total DESC LIMIT 5); +IN (SELECT user_id FROM orders_table WHERE order_date < '2023-01-01' ORDER BY order_date LIMIT 5); ``` ### Correlated/Lateral Subqueries in Planning @@ -1279,8 +1281,7 @@ Correlated or LATERAL subqueries have special behavior in Citus. They can often **Key Code Details**: For more information on the code, check the following functions: `DeferErrorIfCannotPushdownSubquery()` -> - `ContainsReferencesToOuterQuery()` -> - `DeferErrorIfSubqueryRequiresMerge()`. + `ContainsReferencesToOuterQuery()`, `DeferErrorIfSubqueryRequiresMerge()`, `DeferredErrorIfUnsupportedLateralSubquery()`. LATERAL queries are different/unique: even if the subquery requires a merge step such as a `LIMIT`, if the correlation is on the distribution column, we can push it down. See [#4385](https://github.com/citusdata/citus/pull/4385). @@ -1409,7 +1410,7 @@ WITH recent_orders AS ( ) SELECT u.* FROM users_table u -JOIN recent_orders o ON u.user_id = o.product_id; +JOIN recent_orders o ON u.user_id = o.product_id JOIN orders_table o2 ON o2.product_id = o.product_id; ERROR: complex joins are only supported when all distributed tables are co-located and joined on their distribution columns ``` diff --git a/src/backend/distributed/planner/fast_path_router_planner.c b/src/backend/distributed/planner/fast_path_router_planner.c index 933ee7425..ed256296c 100644 --- a/src/backend/distributed/planner/fast_path_router_planner.c +++ b/src/backend/distributed/planner/fast_path_router_planner.c @@ -154,7 +154,7 @@ GeneratePlaceHolderPlannedStmt(Query *parse) * being a fast path router query. * The requirements for the fast path query can be listed below: * - * - SELECT query without CTES, sublinks-subqueries, set operations + * - SELECT/UPDATE/DELETE query without CTES, sublinks-subqueries, set operations * - The query should touch only a single hash distributed or reference table * - The distribution with equality operator should be in the WHERE clause * and it should be ANDed with any other filters. Also, the distribution