mirror of https://github.com/citusdata/citus.git
Compare commits
8 Commits
Author | SHA1 | Date |
---|---|---|
|
2c630eca50 | |
|
b421479d46 | |
|
2502e7e754 | |
|
a4fe969947 | |
|
e59ffbf549 | |
|
3b908eec2a | |
|
9b6ffece5e | |
|
1b4d7a51f8 |
1131
.circleci/config.yml
1131
.circleci/config.yml
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,23 @@
|
|||
name: 'Parallelization matrix'
|
||||
inputs:
|
||||
count:
|
||||
required: false
|
||||
default: 32
|
||||
outputs:
|
||||
json:
|
||||
value: ${{ steps.generate_matrix.outputs.json }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Generate parallelization matrix
|
||||
id: generate_matrix
|
||||
shell: bash
|
||||
run: |-
|
||||
json_array="{\"include\": ["
|
||||
for ((i = 1; i <= ${{ inputs.count }}; i++)); do
|
||||
json_array+="{\"id\":\"$i\"},"
|
||||
done
|
||||
json_array=${json_array%,}
|
||||
json_array+=" ]}"
|
||||
echo "json=$json_array" >> "$GITHUB_OUTPUT"
|
||||
echo "json=$json_array"
|
|
@ -0,0 +1,38 @@
|
|||
name: save_logs_and_results
|
||||
inputs:
|
||||
folder:
|
||||
required: false
|
||||
default: "log"
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/upload-artifact@v3.1.1
|
||||
name: Upload logs
|
||||
with:
|
||||
name: ${{ inputs.folder }}
|
||||
if-no-files-found: ignore
|
||||
path: |
|
||||
src/test/**/proxy.output
|
||||
src/test/**/results/
|
||||
src/test/**/tmp_check/master/log
|
||||
src/test/**/tmp_check/worker.57638/log
|
||||
src/test/**/tmp_check/worker.57637/log
|
||||
src/test/**/*.diffs
|
||||
src/test/**/out/ddls.sql
|
||||
src/test/**/out/queries.sql
|
||||
src/test/**/logfile_*
|
||||
/tmp/pg_upgrade_newData_logs
|
||||
- name: Publish regression.diffs
|
||||
run: |-
|
||||
diffs="$(find src/test/regress -name "*.diffs" -exec cat {} \;)"
|
||||
if ! [ -z "$diffs" ]; then
|
||||
echo '```diff' >> $GITHUB_STEP_SUMMARY
|
||||
echo -E "$diffs" >> $GITHUB_STEP_SUMMARY
|
||||
echo '```' >> $GITHUB_STEP_SUMMARY
|
||||
echo -E $diffs
|
||||
fi
|
||||
shell: bash
|
||||
- name: Print stack traces
|
||||
run: "./ci/print_stack_trace.sh"
|
||||
if: failure()
|
||||
shell: bash
|
|
@ -0,0 +1,35 @@
|
|||
name: setup_extension
|
||||
inputs:
|
||||
pg_major:
|
||||
required: false
|
||||
skip_installation:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Expose $PG_MAJOR to Github Env
|
||||
run: |-
|
||||
if [ -z "${{ inputs.pg_major }}" ]; then
|
||||
echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PG_MAJOR=${{ inputs.pg_major }}" >> $GITHUB_ENV
|
||||
fi
|
||||
shell: bash
|
||||
- uses: actions/download-artifact@v3.0.1
|
||||
with:
|
||||
name: build-${{ env.PG_MAJOR }}
|
||||
- name: Install Extension
|
||||
if: ${{ inputs.skip_installation == 'false' }}
|
||||
run: tar xfv "install-$PG_MAJOR.tar" --directory /
|
||||
shell: bash
|
||||
- name: Configure
|
||||
run: |-
|
||||
chown -R circleci .
|
||||
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||
gosu circleci ./configure --without-pg-version-check
|
||||
shell: bash
|
||||
- name: Enable core dumps
|
||||
run: ulimit -c unlimited
|
||||
shell: bash
|
|
@ -0,0 +1,27 @@
|
|||
name: coverage
|
||||
inputs:
|
||||
flags:
|
||||
required: false
|
||||
codecov_token:
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: codecov/codecov-action@v3
|
||||
with:
|
||||
flags: ${{ inputs.flags }}
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
gcov: true
|
||||
- name: Create codeclimate coverage
|
||||
run: |-
|
||||
lcov --directory . --capture --output-file lcov.info
|
||||
lcov --remove lcov.info -o lcov.info '/usr/*'
|
||||
sed "s=^SF:$PWD/=SF:=g" -i lcov.info # relative pats are required by codeclimate
|
||||
mkdir -p /tmp/codeclimate
|
||||
cc-test-reporter format-coverage -t lcov -o /tmp/codeclimate/${{ inputs.flags }}.json lcov.info
|
||||
shell: bash
|
||||
- uses: actions/upload-artifact@v3.1.1
|
||||
with:
|
||||
path: "/tmp/codeclimate/*.json"
|
||||
name: codeclimate
|
|
@ -0,0 +1,505 @@
|
|||
name: Build & Test
|
||||
run-name: Build & Test - ${{ github.event.pull_request.title || github.ref_name }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_test_flakyness:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
types: [opened, reopened,synchronize]
|
||||
jobs:
|
||||
# Since GHA does not interpolate env varibles in matrix context, we need to
|
||||
# define them in a separate job and use them in other jobs.
|
||||
params:
|
||||
runs-on: ubuntu-latest
|
||||
name: Initialize parameters
|
||||
outputs:
|
||||
build_image_name: "citus/extbuilder"
|
||||
test_image_name: "citus/exttester"
|
||||
citusupgrade_image_name: "citus/citusupgradetester"
|
||||
fail_test_image_name: "citus/failtester"
|
||||
pgupgrade_image_name: "citus/pgupgradetester"
|
||||
style_checker_image_name: "citus/stylechecker"
|
||||
style_checker_tools_version: "0.8.18"
|
||||
image_suffix: "-v9d71045"
|
||||
pg14_version: '{ "major": "14", "full": "14.9" }'
|
||||
pg15_version: '{ "major": "15", "full": "15.4" }'
|
||||
pg16_version: '{ "major": "16", "full": "16.0" }'
|
||||
upgrade_pg_versions: "14.9-15.4-16.0"
|
||||
steps:
|
||||
# Since GHA jobs needs at least one step we use a noop step here.
|
||||
- name: Set up parameters
|
||||
run: echo 'noop'
|
||||
check-sql-snapshots:
|
||||
needs: params
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: ${{ needs.params.outputs.build_image_name }}:latest
|
||||
options: --user root
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- name: Check Snapshots
|
||||
run: |
|
||||
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||
ci/check_sql_snapshots.sh
|
||||
check-style:
|
||||
needs: params
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: ${{ needs.params.outputs.style_checker_image_name }}:${{ needs.params.outputs.style_checker_tools_version }}${{ needs.params.outputs.image_suffix }}
|
||||
steps:
|
||||
- name: Check Snapshots
|
||||
run: |
|
||||
git config --global --add safe.directory ${GITHUB_WORKSPACE}
|
||||
- uses: actions/checkout@v3.5.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check C Style
|
||||
run: citus_indent --check
|
||||
- name: Check Python style
|
||||
run: black --check .
|
||||
- name: Check Python import order
|
||||
run: isort --check .
|
||||
- name: Check Python lints
|
||||
run: flake8 .
|
||||
- name: Fix whitespace
|
||||
run: ci/editorconfig.sh && git diff --exit-code
|
||||
- name: Remove useless declarations
|
||||
run: ci/remove_useless_declarations.sh && git diff --cached --exit-code
|
||||
- name: Normalize test output
|
||||
run: ci/normalize_expected.sh && git diff --exit-code
|
||||
- name: Check for C-style comments in migration files
|
||||
run: ci/disallow_c_comments_in_migrations.sh && git diff --exit-code
|
||||
- name: 'Check for comment--cached ns that start with # character in spec files'
|
||||
run: ci/disallow_hash_comments_in_spec_files.sh && git diff --exit-code
|
||||
- name: Check for gitignore entries .for source files
|
||||
run: ci/fix_gitignore.sh && git diff --exit-code
|
||||
- name: Check for lengths of changelog entries
|
||||
run: ci/disallow_long_changelog_entries.sh
|
||||
- name: Check for banned C API usage
|
||||
run: ci/banned.h.sh
|
||||
- name: Check for tests missing in schedules
|
||||
run: ci/check_all_tests_are_run.sh
|
||||
- name: Check if all CI scripts are actually run
|
||||
run: ci/check_all_ci_scripts_are_run.sh
|
||||
- name: Check if all GUCs are sorted alphabetically
|
||||
run: ci/check_gucs_are_alphabetically_sorted.sh
|
||||
- name: Check for missing downgrade scripts
|
||||
run: ci/check_migration_files.sh
|
||||
build:
|
||||
needs: params
|
||||
name: Build for PG${{ fromJson(matrix.pg_version).major }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image_name:
|
||||
- ${{ needs.params.outputs.build_image_name }}
|
||||
image_suffix:
|
||||
- ${{ needs.params.outputs.image_suffix}}
|
||||
pg_version:
|
||||
- ${{ needs.params.outputs.pg14_version }}
|
||||
- ${{ needs.params.outputs.pg15_version }}
|
||||
- ${{ needs.params.outputs.pg16_version }}
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: "${{ matrix.image_name }}:${{ fromJson(matrix.pg_version).full }}${{ matrix.image_suffix }}"
|
||||
options: --user root
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- name: Expose $PG_MAJOR to Github Env
|
||||
run: echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
- name: Build
|
||||
run: "./ci/build-citus.sh"
|
||||
shell: bash
|
||||
- uses: actions/upload-artifact@v3.1.1
|
||||
with:
|
||||
name: build-${{ env.PG_MAJOR }}
|
||||
path: |-
|
||||
./build-${{ env.PG_MAJOR }}/*
|
||||
./install-${{ env.PG_MAJOR }}.tar
|
||||
test-citus:
|
||||
name: PG${{ fromJson(matrix.pg_version).major }} - ${{ matrix.make }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite:
|
||||
- regress
|
||||
image_name:
|
||||
- ${{ needs.params.outputs.test_image_name }}
|
||||
pg_version:
|
||||
- ${{ needs.params.outputs.pg14_version }}
|
||||
- ${{ needs.params.outputs.pg15_version }}
|
||||
- ${{ needs.params.outputs.pg16_version }}
|
||||
make:
|
||||
- check-split
|
||||
- check-multi
|
||||
- check-multi-1
|
||||
- check-multi-mx
|
||||
- check-vanilla
|
||||
- check-isolation
|
||||
- check-operations
|
||||
- check-follower-cluster
|
||||
- check-columnar
|
||||
- check-columnar-isolation
|
||||
- check-enterprise
|
||||
- check-enterprise-isolation
|
||||
- check-enterprise-isolation-logicalrep-1
|
||||
- check-enterprise-isolation-logicalrep-2
|
||||
- check-enterprise-isolation-logicalrep-3
|
||||
include:
|
||||
- make: check-failure
|
||||
pg_version: ${{ needs.params.outputs.pg14_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-failure
|
||||
pg_version: ${{ needs.params.outputs.pg15_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-failure
|
||||
pg_version: ${{ needs.params.outputs.pg16_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-enterprise-failure
|
||||
pg_version: ${{ needs.params.outputs.pg14_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-enterprise-failure
|
||||
pg_version: ${{ needs.params.outputs.pg15_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-enterprise-failure
|
||||
pg_version: ${{ needs.params.outputs.pg16_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-pytest
|
||||
pg_version: ${{ needs.params.outputs.pg14_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-pytest
|
||||
pg_version: ${{ needs.params.outputs.pg15_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-pytest
|
||||
pg_version: ${{ needs.params.outputs.pg16_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: installcheck
|
||||
suite: cdc
|
||||
image_name: ${{ needs.params.outputs.test_image_name }}
|
||||
pg_version: ${{ needs.params.outputs.pg15_version }}
|
||||
- make: installcheck
|
||||
suite: cdc
|
||||
image_name: ${{ needs.params.outputs.test_image_name }}
|
||||
pg_version: ${{ needs.params.outputs.pg16_version }}
|
||||
- make: check-query-generator
|
||||
pg_version: ${{ needs.params.outputs.pg14_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-query-generator
|
||||
pg_version: ${{ needs.params.outputs.pg15_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
- make: check-query-generator
|
||||
pg_version: ${{ needs.params.outputs.pg16_version }}
|
||||
suite: regress
|
||||
image_name: ${{ needs.params.outputs.fail_test_image_name }}
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: "${{ matrix.image_name }}:${{ fromJson(matrix.pg_version).full }}${{ needs.params.outputs.image_suffix }}"
|
||||
options: --user root --dns=8.8.8.8
|
||||
# Due to Github creates a default network for each job, we need to use
|
||||
# --dns= to have similar DNS settings as our other CI systems or local
|
||||
# machines. Otherwise, we may see different results.
|
||||
needs:
|
||||
- params
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
- name: Run Test
|
||||
run: gosu circleci make -C src/test/${{ matrix.suite }} ${{ matrix.make }}
|
||||
timeout-minutes: 20
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
||||
with:
|
||||
folder: ${{ fromJson(matrix.pg_version).major }}_${{ matrix.make }}
|
||||
- uses: "./.github/actions/upload_coverage"
|
||||
if: always()
|
||||
with:
|
||||
flags: ${{ env.PG_MAJOR }}_${{ matrix.suite }}_${{ matrix.make }}
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-arbitrary-configs:
|
||||
name: PG${{ fromJson(matrix.pg_version).major }} - check-arbitrary-configs-${{ matrix.parallel }}
|
||||
runs-on: ["self-hosted", "1ES.Pool=1es-gha-citusdata-pool"]
|
||||
container:
|
||||
image: "${{ matrix.image_name }}:${{ fromJson(matrix.pg_version).full }}${{ needs.params.outputs.image_suffix }}"
|
||||
options: --user root
|
||||
needs:
|
||||
- params
|
||||
- build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
image_name:
|
||||
- ${{ needs.params.outputs.fail_test_image_name }}
|
||||
pg_version:
|
||||
- ${{ needs.params.outputs.pg14_version }}
|
||||
- ${{ needs.params.outputs.pg15_version }}
|
||||
- ${{ needs.params.outputs.pg16_version }}
|
||||
parallel: [0,1,2,3,4,5] # workaround for running 6 parallel jobs
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
- name: Test arbitrary configs
|
||||
run: |-
|
||||
# we use parallel jobs to split the tests into 6 parts and run them in parallel
|
||||
# the script below extracts the tests for the current job
|
||||
N=6 # Total number of jobs (see matrix.parallel)
|
||||
X=${{ matrix.parallel }} # Current job number
|
||||
TESTS=$(src/test/regress/citus_tests/print_test_names.py |
|
||||
tr '\n' ',' | awk -v N="$N" -v X="$X" -F, '{
|
||||
split("", parts)
|
||||
for (i = 1; i <= NF; i++) {
|
||||
parts[i % N] = parts[i % N] $i ","
|
||||
}
|
||||
print substr(parts[X], 1, length(parts[X])-1)
|
||||
}')
|
||||
echo $TESTS
|
||||
gosu circleci \
|
||||
make -C src/test/regress \
|
||||
check-arbitrary-configs parallel=4 CONFIGS=$TESTS
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
||||
- uses: "./.github/actions/upload_coverage"
|
||||
if: always()
|
||||
with:
|
||||
flags: ${{ env.pg_major }}_upgrade
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-pg-upgrade:
|
||||
name: PG${{ matrix.old_pg_major }}-PG${{ matrix.new_pg_major }} - check-pg-upgrade
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: "${{ needs.params.outputs.pgupgrade_image_name }}:${{ needs.params.outputs.upgrade_pg_versions }}${{ needs.params.outputs.image_suffix }}"
|
||||
options: --user root
|
||||
needs:
|
||||
- params
|
||||
- build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- old_pg_major: 14
|
||||
new_pg_major: 15
|
||||
- old_pg_major: 15
|
||||
new_pg_major: 16
|
||||
- old_pg_major: 14
|
||||
new_pg_major: 16
|
||||
env:
|
||||
old_pg_major: ${{ matrix.old_pg_major }}
|
||||
new_pg_major: ${{ matrix.new_pg_major }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
with:
|
||||
pg_major: "${{ env.old_pg_major }}"
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
with:
|
||||
pg_major: "${{ env.new_pg_major }}"
|
||||
- name: Install and test postgres upgrade
|
||||
run: |-
|
||||
gosu circleci \
|
||||
make -C src/test/regress \
|
||||
check-pg-upgrade \
|
||||
old-bindir=/usr/lib/postgresql/${{ env.old_pg_major }}/bin \
|
||||
new-bindir=/usr/lib/postgresql/${{ env.new_pg_major }}/bin
|
||||
- name: Copy pg_upgrade logs for newData dir
|
||||
run: |-
|
||||
mkdir -p /tmp/pg_upgrade_newData_logs
|
||||
if ls src/test/regress/tmp_upgrade/newData/*.log 1> /dev/null 2>&1; then
|
||||
cp src/test/regress/tmp_upgrade/newData/*.log /tmp/pg_upgrade_newData_logs
|
||||
fi
|
||||
if: failure()
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
||||
- uses: "./.github/actions/upload_coverage"
|
||||
if: always()
|
||||
with:
|
||||
flags: ${{ env.old_pg_major }}_${{ env.new_pg_major }}_upgrade
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-citus-upgrade:
|
||||
name: PG${{ fromJson(needs.params.outputs.pg14_version).major }} - check-citus-upgrade
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: "${{ needs.params.outputs.citusupgrade_image_name }}:${{ fromJson(needs.params.outputs.pg14_version).full }}${{ needs.params.outputs.image_suffix }}"
|
||||
options: --user root
|
||||
needs:
|
||||
- params
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
with:
|
||||
skip_installation: true
|
||||
- name: Install and test citus upgrade
|
||||
run: |-
|
||||
# run make check-citus-upgrade for all citus versions
|
||||
# the image has ${CITUS_VERSIONS} set with all verions it contains the binaries of
|
||||
for citus_version in ${CITUS_VERSIONS}; do \
|
||||
gosu circleci \
|
||||
make -C src/test/regress \
|
||||
check-citus-upgrade \
|
||||
bindir=/usr/lib/postgresql/${PG_MAJOR}/bin \
|
||||
citus-old-version=${citus_version} \
|
||||
citus-pre-tar=/install-pg${PG_MAJOR}-citus${citus_version}.tar \
|
||||
citus-post-tar=${GITHUB_WORKSPACE}/install-$PG_MAJOR.tar; \
|
||||
done;
|
||||
# run make check-citus-upgrade-mixed for all citus versions
|
||||
# the image has ${CITUS_VERSIONS} set with all verions it contains the binaries of
|
||||
for citus_version in ${CITUS_VERSIONS}; do \
|
||||
gosu circleci \
|
||||
make -C src/test/regress \
|
||||
check-citus-upgrade-mixed \
|
||||
citus-old-version=${citus_version} \
|
||||
bindir=/usr/lib/postgresql/${PG_MAJOR}/bin \
|
||||
citus-pre-tar=/install-pg${PG_MAJOR}-citus${citus_version}.tar \
|
||||
citus-post-tar=${GITHUB_WORKSPACE}/install-$PG_MAJOR.tar; \
|
||||
done;
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
||||
- uses: "./.github/actions/upload_coverage"
|
||||
if: always()
|
||||
with:
|
||||
flags: ${{ env.pg_major }}_upgrade
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
upload-coverage:
|
||||
if: always()
|
||||
env:
|
||||
CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }}
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: ${{ needs.params.outputs.test_image_name }}:${{ fromJson(needs.params.outputs.pg16_version).full }}${{ needs.params.outputs.image_suffix }}
|
||||
needs:
|
||||
- params
|
||||
- test-citus
|
||||
- test-arbitrary-configs
|
||||
- test-citus-upgrade
|
||||
- test-pg-upgrade
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3.0.1
|
||||
with:
|
||||
name: "codeclimate"
|
||||
path: "codeclimate"
|
||||
- name: Upload coverage results to Code Climate
|
||||
run: |-
|
||||
cc-test-reporter sum-coverage codeclimate/*.json -o total.json
|
||||
cc-test-reporter upload-coverage -i total.json
|
||||
ch_benchmark:
|
||||
name: CH Benchmark
|
||||
if: startsWith(github.ref, 'refs/heads/ch_benchmark/')
|
||||
runs-on: ubuntu-20.04
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: azure/login@v1
|
||||
with:
|
||||
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
||||
- name: install dependencies and run ch_benchmark tests
|
||||
uses: azure/CLI@v1
|
||||
with:
|
||||
inlineScript: |
|
||||
cd ./src/test/hammerdb
|
||||
chmod +x run_hammerdb.sh
|
||||
run_hammerdb.sh citusbot_ch_benchmark_rg
|
||||
tpcc_benchmark:
|
||||
name: TPCC Benchmark
|
||||
if: startsWith(github.ref, 'refs/heads/tpcc_benchmark/')
|
||||
runs-on: ubuntu-20.04
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: azure/login@v1
|
||||
with:
|
||||
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
||||
- name: install dependencies and run tpcc_benchmark tests
|
||||
uses: azure/CLI@v1
|
||||
with:
|
||||
inlineScript: |
|
||||
cd ./src/test/hammerdb
|
||||
chmod +x run_hammerdb.sh
|
||||
run_hammerdb.sh citusbot_tpcc_benchmark_rg
|
||||
prepare_parallelization_matrix_32:
|
||||
name: Parallel 32
|
||||
if: ${{ needs.test-flakyness-pre.outputs.tests != ''}}
|
||||
needs: test-flakyness-pre
|
||||
runs-on: ubuntu-20.04
|
||||
outputs:
|
||||
json: ${{ steps.parallelization.outputs.json }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/parallelization"
|
||||
id: parallelization
|
||||
with:
|
||||
count: 32
|
||||
test-flakyness-pre:
|
||||
name: Detect regression tests need to be ran
|
||||
if: ${{ !inputs.skip_test_flakyness }}}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: build
|
||||
outputs:
|
||||
tests: ${{ steps.detect-regression-tests.outputs.tests }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Detect regression tests need to be ran
|
||||
id: detect-regression-tests
|
||||
run: |-
|
||||
detected_changes=$(git diff origin/release-12.1... --name-only --diff-filter=AM | (grep 'src/test/regress/sql/.*\.sql\|src/test/regress/spec/.*\.spec\|src/test/regress/citus_tests/test/test_.*\.py' || true))
|
||||
tests=${detected_changes}
|
||||
if [ -z "$tests" ]; then
|
||||
echo "No test found."
|
||||
else
|
||||
echo "Detected tests " $tests
|
||||
fi
|
||||
echo tests="$tests" >> "$GITHUB_OUTPUT"
|
||||
test-flakyness:
|
||||
if: ${{ needs.test-flakyness-pre.outputs.tests != ''}}
|
||||
name: Test flakyness
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: ${{ needs.params.outputs.fail_test_image_name }}:${{ needs.params.outputs.pg16_version }}${{ needs.params.outputs.image_suffix }}
|
||||
options: --user root
|
||||
env:
|
||||
runs: 8
|
||||
needs:
|
||||
- params
|
||||
- build
|
||||
- test-flakyness-pre
|
||||
- prepare_parallelization_matrix_32
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.prepare_parallelization_matrix_32.outputs.json) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: actions/download-artifact@v3.0.1
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
- name: Run minimal tests
|
||||
run: |-
|
||||
tests="${{ needs.test-flakyness-pre.outputs.tests }}"
|
||||
tests_array=($tests)
|
||||
for test in "${tests_array[@]}"
|
||||
do
|
||||
test_name=$(echo "$test" | sed -r "s/.+\/(.+)\..+/\1/")
|
||||
gosu circleci src/test/regress/citus_tests/run_test.py $test_name --repeat ${{ env.runs }} --use-base-schedule --use-whole-schedule-line
|
||||
done
|
||||
shell: bash
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
|
@ -0,0 +1,79 @@
|
|||
name: Flaky test debugging
|
||||
run-name: Flaky test debugging - ${{ inputs.flaky_test }} (${{ inputs.flaky_test_runs_per_job }}x${{ inputs.flaky_test_parallel_jobs }})
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
flaky_test:
|
||||
required: true
|
||||
type: string
|
||||
description: Test to run
|
||||
flaky_test_runs_per_job:
|
||||
required: false
|
||||
default: 8
|
||||
type: number
|
||||
description: Number of times to run the test
|
||||
flaky_test_parallel_jobs:
|
||||
required: false
|
||||
default: 32
|
||||
type: number
|
||||
description: Number of parallel jobs to run
|
||||
jobs:
|
||||
build:
|
||||
name: Build Citus
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ vars.build_image_name }}:${{ vars.pg15_version }}${{ vars.image_suffix }}
|
||||
options: --user root
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- name: Configure, Build, and Install
|
||||
run: |
|
||||
echo "PG_MAJOR=${PG_MAJOR}" >> $GITHUB_ENV
|
||||
./ci/build-citus.sh
|
||||
shell: bash
|
||||
- uses: actions/upload-artifact@v3.1.1
|
||||
with:
|
||||
name: build-${{ env.PG_MAJOR }}
|
||||
path: |-
|
||||
./build-${{ env.PG_MAJOR }}/*
|
||||
./install-${{ env.PG_MAJOR }}.tar
|
||||
prepare_parallelization_matrix:
|
||||
name: Prepare parallelization matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
json: ${{ steps.parallelization.outputs.json }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/parallelization"
|
||||
id: parallelization
|
||||
with:
|
||||
count: ${{ inputs.flaky_test_parallel_jobs }}
|
||||
test_flakyness:
|
||||
name: Test flakyness
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ vars.fail_test_image_name }}:${{ vars.pg15_version }}${{ vars.image_suffix }}
|
||||
options: --user root
|
||||
needs:
|
||||
[build, prepare_parallelization_matrix]
|
||||
env:
|
||||
test: "${{ inputs.flaky_test }}"
|
||||
runs: "${{ inputs.flaky_test_runs_per_job }}"
|
||||
skip: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.prepare_parallelization_matrix.outputs.json) }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.0
|
||||
- uses: "./.github/actions/setup_extension"
|
||||
- name: Run minimal tests
|
||||
run: |-
|
||||
gosu circleci src/test/regress/citus_tests/run_test.py ${{ env.test }} --repeat ${{ env.runs }} --use-base-schedule --use-whole-schedule-line
|
||||
shell: bash
|
||||
- uses: "./.github/actions/save_logs_and_results"
|
||||
if: always()
|
||||
with:
|
||||
folder: ${{ matrix.id }}
|
|
@ -24,14 +24,16 @@ jobs:
|
|||
- name: Get Postgres Versions
|
||||
id: get-postgres-versions
|
||||
run: |
|
||||
# Postgres versions are stored in .circleci/config.yml file in "build-[pg-version] format. Below command
|
||||
# extracts the versions and get the unique values.
|
||||
pg_versions=`grep -Eo 'build-[[:digit:]]{2}' .circleci/config.yml|sed -e "s/^build-//"|sort|uniq|tr '\n' ','| head -c -1`
|
||||
set -euxo pipefail
|
||||
# Postgres versions are stored in .github/workflows/build_and_test.yml
|
||||
# file in json strings with major and full keys.
|
||||
# Below command extracts the versions and get the unique values.
|
||||
pg_versions=$(cat .github/workflows/build_and_test.yml | grep -oE '"major": "[0-9]+", "full": "[0-9.]+"' | sed -E 's/"major": "([0-9]+)", "full": "([0-9.]+)"/\1/g' | sort | uniq | tr '\n', ',')
|
||||
pg_versions_array="[ ${pg_versions} ]"
|
||||
echo "Supported PG Versions: ${pg_versions_array}"
|
||||
# Below line is needed to set the output variable to be used in the next job
|
||||
echo "pg_versions=${pg_versions_array}" >> $GITHUB_OUTPUT
|
||||
|
||||
shell: bash
|
||||
rpm_build_tests:
|
||||
name: rpm_build_tests
|
||||
needs: get_postgres_versions_from_file
|
||||
|
@ -101,7 +103,7 @@ jobs:
|
|||
echo "Postgres version: ${POSTGRES_VERSION}"
|
||||
|
||||
## Install required packages to execute packaging tools for rpm based distros
|
||||
yum install python3-pip python3-devel postgresql-devel -y
|
||||
yum install python3-pip python3-devel postgresql-devel -y || true
|
||||
python3 -m pip install wheel
|
||||
|
||||
./.github/packaging/validate_build_output.sh "rpm"
|
||||
|
|
12
CHANGELOG.md
12
CHANGELOG.md
|
@ -1,3 +1,11 @@
|
|||
### citus v12.1.1 (November 9, 2023) ###
|
||||
|
||||
* Fixes leaking of memory and memory contexts in Citus foreign key cache
|
||||
(#7219)
|
||||
|
||||
* Makes sure to disallow creating a replicated distributed table concurrently
|
||||
(#7236)
|
||||
|
||||
### citus v12.1.0 (September 12, 2023) ###
|
||||
|
||||
* Adds support for PostgreSQL 16.0 (#7173)
|
||||
|
@ -5,8 +13,8 @@
|
|||
* Add `citus_schema_move()` function which moves tables within a
|
||||
distributed schema to another node (#7180)
|
||||
|
||||
* Adds `citus_pause_node()` UDF that allows pausing the node with given id
|
||||
(#7089)
|
||||
* Adds `citus_pause_node_within_txn()` UDF that allows pausing the node with
|
||||
given id (#7089)
|
||||
|
||||
* Makes sure to enforce shard level colocation with the GUC
|
||||
`citus.enable_non_colocated_router_query_pushdown` (#7076)
|
||||
|
|
|
@ -15,9 +15,6 @@ PG_MAJOR=${PG_MAJOR:?please provide the postgres major version}
|
|||
codename=${VERSION#*(}
|
||||
codename=${codename%)*}
|
||||
|
||||
# get project from argument
|
||||
project="${CIRCLE_PROJECT_REPONAME}"
|
||||
|
||||
# we'll do everything with absolute paths
|
||||
basedir="$(pwd)"
|
||||
|
||||
|
@ -28,7 +25,7 @@ build_ext() {
|
|||
pg_major="$1"
|
||||
|
||||
builddir="${basedir}/build-${pg_major}"
|
||||
echo "Beginning build of ${project} for PostgreSQL ${pg_major}..." >&2
|
||||
echo "Beginning build for PostgreSQL ${pg_major}..." >&2
|
||||
|
||||
# do everything in a subdirectory to avoid clutter in current directory
|
||||
mkdir -p "${builddir}" && cd "${builddir}"
|
||||
|
|
|
@ -14,8 +14,8 @@ ci_scripts=$(
|
|||
grep -v -E '^(ci_helpers.sh|fix_style.sh)$'
|
||||
)
|
||||
for script in $ci_scripts; do
|
||||
if ! grep "\\bci/$script\\b" .circleci/config.yml > /dev/null; then
|
||||
echo "ERROR: CI script with name \"$script\" is not actually used in .circleci/config.yml"
|
||||
if ! grep "\\bci/$script\\b" -r .github > /dev/null; then
|
||||
echo "ERROR: CI script with name \"$script\" is not actually used in .github folder"
|
||||
exit 1
|
||||
fi
|
||||
if ! grep "^## \`$script\`\$" ci/README.md > /dev/null; then
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Testing this script locally requires you to set the following environment
|
||||
# variables:
|
||||
# CIRCLE_BRANCH, GIT_USERNAME and GIT_TOKEN
|
||||
|
||||
# fail if trying to reference a variable that is not set.
|
||||
set -u
|
||||
# exit immediately if a command fails
|
||||
set -e
|
||||
# Fail on pipe failures
|
||||
set -o pipefail
|
||||
|
||||
PR_BRANCH="${CIRCLE_BRANCH}"
|
||||
ENTERPRISE_REMOTE="https://${GIT_USERNAME}:${GIT_TOKEN}@github.com/citusdata/citus-enterprise"
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source ci/ci_helpers.sh
|
||||
|
||||
# List executed commands. This is done so debugging this script is easier when
|
||||
# it fails. It's explicitly done after git remote add so username and password
|
||||
# are not shown in CI output (even though it's also filtered out by CircleCI)
|
||||
set -x
|
||||
|
||||
check_compile () {
|
||||
echo "INFO: checking if merged code can be compiled"
|
||||
./configure --without-libcurl
|
||||
make -j10
|
||||
}
|
||||
|
||||
# Clone current git repo (which should be community) to a temporary working
|
||||
# directory and go there
|
||||
GIT_DIR_ROOT="$(git rev-parse --show-toplevel)"
|
||||
TMP_GIT_DIR="$(mktemp --directory -t citus-merge-check.XXXXXXXXX)"
|
||||
git clone "$GIT_DIR_ROOT" "$TMP_GIT_DIR"
|
||||
cd "$TMP_GIT_DIR"
|
||||
|
||||
# Fails in CI without this
|
||||
git config user.email "citus-bot@microsoft.com"
|
||||
git config user.name "citus bot"
|
||||
|
||||
# Disable "set -x" temporarily, because $ENTERPRISE_REMOTE contains passwords
|
||||
{ set +x ; } 2> /dev/null
|
||||
git remote add enterprise "$ENTERPRISE_REMOTE"
|
||||
set -x
|
||||
|
||||
git remote set-url --push enterprise no-pushing
|
||||
|
||||
# Fetch enterprise-master
|
||||
git fetch enterprise enterprise-master
|
||||
|
||||
|
||||
git checkout "enterprise/enterprise-master"
|
||||
|
||||
if git merge --no-commit "origin/$PR_BRANCH"; then
|
||||
echo "INFO: community PR branch could be merged into enterprise-master"
|
||||
# check that we can compile after the merge
|
||||
if check_compile; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "WARN: Failed to compile after community PR branch was merged into enterprise"
|
||||
fi
|
||||
|
||||
# undo partial merge
|
||||
git merge --abort
|
||||
|
||||
# If we have a conflict on enterprise merge on the master branch, we have a problem.
|
||||
# Provide an error message to indicate that enterprise merge is needed to fix this check.
|
||||
if [[ $PR_BRANCH = master ]]; then
|
||||
echo "ERROR: Master branch has merge conflicts with enterprise-master."
|
||||
echo "Try re-running this CI job after merging your changes into enterprise-master."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! git fetch enterprise "$PR_BRANCH" ; then
|
||||
echo "ERROR: enterprise/$PR_BRANCH was not found and community PR branch could not be merged into enterprise-master"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show the top commit of the enterprise PR branch to make debugging easier
|
||||
git log -n 1 "enterprise/$PR_BRANCH"
|
||||
|
||||
# Check that this branch contains the top commit of the current community PR
|
||||
# branch. If it does not it means it's not up to date with the current PR, so
|
||||
# the enterprise branch should be updated.
|
||||
if ! git merge-base --is-ancestor "origin/$PR_BRANCH" "enterprise/$PR_BRANCH" ; then
|
||||
echo "ERROR: enterprise/$PR_BRANCH is not up to date with community PR branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Now check if we can merge the enterprise PR into enterprise-master without
|
||||
# issues.
|
||||
git merge --no-commit "enterprise/$PR_BRANCH"
|
||||
# check that we can compile after the merge
|
||||
check_compile
|
|
@ -1,6 +1,6 @@
|
|||
#! /bin/sh
|
||||
# Guess values for system-dependent variables and create Makefiles.
|
||||
# Generated by GNU Autoconf 2.69 for Citus 12.1devel.
|
||||
# Generated by GNU Autoconf 2.69 for Citus 12.1.1.
|
||||
#
|
||||
#
|
||||
# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc.
|
||||
|
@ -579,8 +579,8 @@ MAKEFLAGS=
|
|||
# Identity of this package.
|
||||
PACKAGE_NAME='Citus'
|
||||
PACKAGE_TARNAME='citus'
|
||||
PACKAGE_VERSION='12.1devel'
|
||||
PACKAGE_STRING='Citus 12.1devel'
|
||||
PACKAGE_VERSION='12.1.1'
|
||||
PACKAGE_STRING='Citus 12.1.1'
|
||||
PACKAGE_BUGREPORT=''
|
||||
PACKAGE_URL=''
|
||||
|
||||
|
@ -1262,7 +1262,7 @@ if test "$ac_init_help" = "long"; then
|
|||
# Omit some internal or obsolete options to make the list less imposing.
|
||||
# This message is too long to be a string in the A/UX 3.1 sh.
|
||||
cat <<_ACEOF
|
||||
\`configure' configures Citus 12.1devel to adapt to many kinds of systems.
|
||||
\`configure' configures Citus 12.1.1 to adapt to many kinds of systems.
|
||||
|
||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||
|
||||
|
@ -1324,7 +1324,7 @@ fi
|
|||
|
||||
if test -n "$ac_init_help"; then
|
||||
case $ac_init_help in
|
||||
short | recursive ) echo "Configuration of Citus 12.1devel:";;
|
||||
short | recursive ) echo "Configuration of Citus 12.1.1:";;
|
||||
esac
|
||||
cat <<\_ACEOF
|
||||
|
||||
|
@ -1429,7 +1429,7 @@ fi
|
|||
test -n "$ac_init_help" && exit $ac_status
|
||||
if $ac_init_version; then
|
||||
cat <<\_ACEOF
|
||||
Citus configure 12.1devel
|
||||
Citus configure 12.1.1
|
||||
generated by GNU Autoconf 2.69
|
||||
|
||||
Copyright (C) 2012 Free Software Foundation, Inc.
|
||||
|
@ -1912,7 +1912,7 @@ cat >config.log <<_ACEOF
|
|||
This file contains any messages produced by compilers while
|
||||
running configure, to aid debugging if configure makes a mistake.
|
||||
|
||||
It was created by Citus $as_me 12.1devel, which was
|
||||
It was created by Citus $as_me 12.1.1, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
$ $0 $@
|
||||
|
@ -5393,7 +5393,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
|
|||
# report actual input values of CONFIG_FILES etc. instead of their
|
||||
# values after options handling.
|
||||
ac_log="
|
||||
This file was extended by Citus $as_me 12.1devel, which was
|
||||
This file was extended by Citus $as_me 12.1.1, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
CONFIG_FILES = $CONFIG_FILES
|
||||
|
@ -5455,7 +5455,7 @@ _ACEOF
|
|||
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
|
||||
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
|
||||
ac_cs_version="\\
|
||||
Citus config.status 12.1devel
|
||||
Citus config.status 12.1.1
|
||||
configured by $0, generated by GNU Autoconf 2.69,
|
||||
with options \\"\$ac_cs_config\\"
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# everyone needing autoconf installed, the resulting files are checked
|
||||
# into the SCM.
|
||||
|
||||
AC_INIT([Citus], [12.1devel])
|
||||
AC_INIT([Citus], [12.1.1])
|
||||
AC_COPYRIGHT([Copyright (c) Citus Data, Inc.])
|
||||
|
||||
# we'll need sed and awk for some of the version commands
|
||||
|
|
|
@ -445,6 +445,19 @@ CreateDistributedTableConcurrently(Oid relationId, char *distributionColumnName,
|
|||
if (!IsColocateWithDefault(colocateWithTableName) && !IsColocateWithNone(
|
||||
colocateWithTableName))
|
||||
{
|
||||
if (replicationModel != REPLICATION_MODEL_STREAMING)
|
||||
{
|
||||
ereport(ERROR, (errmsg("cannot create distributed table "
|
||||
"concurrently because Citus allows "
|
||||
"concurrent table distribution only when "
|
||||
"citus.shard_replication_factor = 1"),
|
||||
errhint("table %s is requested to be colocated "
|
||||
"with %s which has "
|
||||
"citus.shard_replication_factor > 1",
|
||||
get_rel_name(relationId),
|
||||
colocateWithTableName)));
|
||||
}
|
||||
|
||||
EnsureColocateWithTableIsValid(relationId, distributionMethod,
|
||||
distributionColumnName,
|
||||
colocateWithTableName);
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
#include "distributed/version_compat.h"
|
||||
#include "nodes/pg_list.h"
|
||||
#include "storage/lockdefs.h"
|
||||
#include "utils/catcache.h"
|
||||
#include "utils/fmgroids.h"
|
||||
#include "utils/hsearch.h"
|
||||
#include "common/hashfn.h"
|
||||
|
@ -96,6 +97,8 @@ static List * GetConnectedListHelper(ForeignConstraintRelationshipNode *node,
|
|||
bool isReferencing);
|
||||
static List * GetForeignConstraintRelationshipHelper(Oid relationId, bool isReferencing);
|
||||
|
||||
MemoryContext ForeignConstraintRelationshipMemoryContext = NULL;
|
||||
|
||||
|
||||
/*
|
||||
* GetForeignKeyConnectedRelationIdList returns a list of relation id's for
|
||||
|
@ -321,17 +324,36 @@ CreateForeignConstraintRelationshipGraph()
|
|||
return;
|
||||
}
|
||||
|
||||
ClearForeignConstraintRelationshipGraphContext();
|
||||
/*
|
||||
* Lazily create our memory context once and reset on every reuse.
|
||||
* Since we have cleared and invalidated the fConstraintRelationshipGraph, right
|
||||
* before we can simply reset the context if it was already existing.
|
||||
*/
|
||||
if (ForeignConstraintRelationshipMemoryContext == NULL)
|
||||
{
|
||||
/* make sure we've initialized CacheMemoryContext */
|
||||
if (CacheMemoryContext == NULL)
|
||||
{
|
||||
CreateCacheMemoryContext();
|
||||
}
|
||||
|
||||
MemoryContext fConstraintRelationshipMemoryContext = AllocSetContextCreateInternal(
|
||||
CacheMemoryContext,
|
||||
"Forign Constraint Relationship Graph Context",
|
||||
ALLOCSET_DEFAULT_MINSIZE,
|
||||
ALLOCSET_DEFAULT_INITSIZE,
|
||||
ALLOCSET_DEFAULT_MAXSIZE);
|
||||
ForeignConstraintRelationshipMemoryContext = AllocSetContextCreate(
|
||||
CacheMemoryContext,
|
||||
"Foreign Constraint Relationship Graph Context",
|
||||
ALLOCSET_DEFAULT_MINSIZE,
|
||||
ALLOCSET_DEFAULT_INITSIZE,
|
||||
ALLOCSET_DEFAULT_MAXSIZE);
|
||||
}
|
||||
else
|
||||
{
|
||||
fConstraintRelationshipGraph = NULL;
|
||||
MemoryContextReset(ForeignConstraintRelationshipMemoryContext);
|
||||
}
|
||||
|
||||
Assert(fConstraintRelationshipGraph == NULL);
|
||||
|
||||
MemoryContext oldContext = MemoryContextSwitchTo(
|
||||
fConstraintRelationshipMemoryContext);
|
||||
ForeignConstraintRelationshipMemoryContext);
|
||||
|
||||
fConstraintRelationshipGraph = (ForeignConstraintRelationshipGraph *) palloc(
|
||||
sizeof(ForeignConstraintRelationshipGraph));
|
||||
|
@ -631,22 +653,3 @@ CreateOrFindNode(HTAB *adjacencyLists, Oid relid)
|
|||
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* ClearForeignConstraintRelationshipGraphContext clear all the allocated memory obtained
|
||||
* for foreign constraint relationship graph. Since all the variables of relationship
|
||||
* graph was obtained within the same context, destroying hash map is enough as
|
||||
* it deletes the context.
|
||||
*/
|
||||
void
|
||||
ClearForeignConstraintRelationshipGraphContext()
|
||||
{
|
||||
if (fConstraintRelationshipGraph == NULL)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
hash_destroy(fConstraintRelationshipGraph->nodeMap);
|
||||
fConstraintRelationshipGraph = NULL;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ extern bool ShouldUndistributeCitusLocalTable(Oid relationId);
|
|||
extern List * ReferencedRelationIdList(Oid relationId);
|
||||
extern List * ReferencingRelationIdList(Oid relationId);
|
||||
extern void SetForeignConstraintRelationshipGraphInvalid(void);
|
||||
extern void ClearForeignConstraintRelationshipGraphContext(void);
|
||||
extern bool OidVisited(HTAB *oidVisitedMap, Oid oid);
|
||||
extern void VisitOid(HTAB *oidVisitedMap, Oid oid);
|
||||
|
||||
|
|
|
@ -46,6 +46,8 @@ get_guc_variables_compat(int *gucCount)
|
|||
#define object_ownercheck(a, b, c) object_ownercheck(a, b, c)
|
||||
#define object_aclcheck(a, b, c, d) object_aclcheck(a, b, c, d)
|
||||
|
||||
#define pgstat_fetch_stat_local_beentry(a) pgstat_get_local_beentry_by_index(a)
|
||||
|
||||
#else
|
||||
|
||||
#include "catalog/pg_class_d.h"
|
||||
|
|
|
@ -36,6 +36,19 @@ set citus.shard_replication_factor to 2;
|
|||
select create_distributed_table_concurrently('test','key', 'hash');
|
||||
ERROR: cannot distribute a table concurrently when citus.shard_replication_factor > 1
|
||||
set citus.shard_replication_factor to 1;
|
||||
set citus.shard_replication_factor to 2;
|
||||
create table dist_1(a int);
|
||||
select create_distributed_table('dist_1', 'a');
|
||||
create_distributed_table
|
||||
---------------------------------------------------------------------
|
||||
|
||||
(1 row)
|
||||
|
||||
set citus.shard_replication_factor to 1;
|
||||
create table dist_2(a int);
|
||||
select create_distributed_table_concurrently('dist_2', 'a', colocate_with=>'dist_1');
|
||||
ERROR: cannot create distributed table concurrently because Citus allows concurrent table distribution only when citus.shard_replication_factor = 1
|
||||
HINT: table dist_2 is requested to be colocated with dist_1 which has citus.shard_replication_factor > 1
|
||||
begin;
|
||||
select create_distributed_table_concurrently('test','key');
|
||||
ERROR: create_distributed_table_concurrently cannot run inside a transaction block
|
||||
|
@ -138,27 +151,8 @@ select count(*) from test;
|
|||
rollback;
|
||||
-- verify that we can undistribute the table
|
||||
begin;
|
||||
set local client_min_messages to warning;
|
||||
select undistribute_table('test', cascade_via_foreign_keys := true);
|
||||
NOTICE: converting the partitions of create_distributed_table_concurrently.test
|
||||
NOTICE: creating a new table for create_distributed_table_concurrently.test
|
||||
NOTICE: dropping the old create_distributed_table_concurrently.test
|
||||
NOTICE: renaming the new table to create_distributed_table_concurrently.test
|
||||
NOTICE: creating a new table for create_distributed_table_concurrently.ref
|
||||
NOTICE: moving the data of create_distributed_table_concurrently.ref
|
||||
NOTICE: dropping the old create_distributed_table_concurrently.ref
|
||||
NOTICE: drop cascades to constraint test_id_fkey_1190041 on table create_distributed_table_concurrently.test_1190041
|
||||
CONTEXT: SQL statement "SELECT citus_drop_all_shards(v_obj.objid, v_obj.schema_name, v_obj.object_name, drop_shards_metadata_only := false)"
|
||||
PL/pgSQL function citus_drop_trigger() line XX at PERFORM
|
||||
SQL statement "DROP TABLE create_distributed_table_concurrently.ref CASCADE"
|
||||
NOTICE: renaming the new table to create_distributed_table_concurrently.ref
|
||||
NOTICE: creating a new table for create_distributed_table_concurrently.test_1
|
||||
NOTICE: moving the data of create_distributed_table_concurrently.test_1
|
||||
NOTICE: dropping the old create_distributed_table_concurrently.test_1
|
||||
NOTICE: renaming the new table to create_distributed_table_concurrently.test_1
|
||||
NOTICE: creating a new table for create_distributed_table_concurrently.test_2
|
||||
NOTICE: moving the data of create_distributed_table_concurrently.test_2
|
||||
NOTICE: dropping the old create_distributed_table_concurrently.test_2
|
||||
NOTICE: renaming the new table to create_distributed_table_concurrently.test_2
|
||||
undistribute_table
|
||||
---------------------------------------------------------------------
|
||||
|
||||
|
@ -245,7 +239,7 @@ insert into dist_table4 select s from generate_series(1,100) s;
|
|||
select count(*) as total from dist_table4;
|
||||
total
|
||||
---------------------------------------------------------------------
|
||||
100
|
||||
100
|
||||
(1 row)
|
||||
|
||||
-- verify we do not allow foreign keys from distributed table to citus local table concurrently
|
||||
|
@ -295,13 +289,13 @@ select count(*) from test_columnar;
|
|||
select id from test_columnar where id = 1;
|
||||
id
|
||||
---------------------------------------------------------------------
|
||||
1
|
||||
1
|
||||
(1 row)
|
||||
|
||||
select id from test_columnar where id = 51;
|
||||
id
|
||||
---------------------------------------------------------------------
|
||||
51
|
||||
51
|
||||
(1 row)
|
||||
|
||||
select count(*) from test_columnar_1;
|
||||
|
|
|
@ -1413,7 +1413,7 @@ DROP TABLE multi_extension.prev_objects, multi_extension.extension_diff;
|
|||
SHOW citus.version;
|
||||
citus.version
|
||||
---------------------------------------------------------------------
|
||||
12.1devel
|
||||
12.1.1
|
||||
(1 row)
|
||||
|
||||
-- ensure no unexpected objects were created outside pg_catalog
|
||||
|
|
|
@ -28,6 +28,14 @@ set citus.shard_replication_factor to 2;
|
|||
select create_distributed_table_concurrently('test','key', 'hash');
|
||||
set citus.shard_replication_factor to 1;
|
||||
|
||||
set citus.shard_replication_factor to 2;
|
||||
create table dist_1(a int);
|
||||
select create_distributed_table('dist_1', 'a');
|
||||
set citus.shard_replication_factor to 1;
|
||||
|
||||
create table dist_2(a int);
|
||||
select create_distributed_table_concurrently('dist_2', 'a', colocate_with=>'dist_1');
|
||||
|
||||
begin;
|
||||
select create_distributed_table_concurrently('test','key');
|
||||
rollback;
|
||||
|
@ -63,6 +71,7 @@ rollback;
|
|||
|
||||
-- verify that we can undistribute the table
|
||||
begin;
|
||||
set local client_min_messages to warning;
|
||||
select undistribute_table('test', cascade_via_foreign_keys := true);
|
||||
rollback;
|
||||
|
||||
|
|
Loading…
Reference in New Issue