Skip to content

Commit

Permalink
Merge branch 'main' into kwannoel/gen-joins
Browse files Browse the repository at this point in the history
  • Loading branch information
kwannoel authored Mar 15, 2023
2 parents a3c5dd0 + 8b09f5e commit 68a81cf
Show file tree
Hide file tree
Showing 737 changed files with 25,141 additions and 8,559 deletions.
107 changes: 107 additions & 0 deletions .github/workflows/intergration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
name: Integration Tests CI

on:
schedule:
# Currently we build docker images at 12:00 (UTC), so run this at 13:00
- cron: '0 13 * * *'

jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/setup-go@v3
with:
go-version: 1.18
- uses: actions/checkout@v3
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
working-directory: integration_tests/datagen
args: --timeout=120s
- name: Go build
run: |
go mod tidy
git diff --exit-code go.mod go.sum
go build .
working-directory: integration_tests/datagen
run-demos:
strategy:
matrix:
testcase:
- ad-click
- ad-ctr
- cdn-metrics
- clickstream
- livestream
- twitter
- prometheus
- schema-registry
- mysql-cdc
- postgres-cdc
#- mysql-sink
- postgres-sink
- iceberg-sink
format: ["json", "protobuf"]
exclude:
- testcase: ad-click
format: protobuf
- testcase: ad-ctr
format: protobuf
- testcase: cdn-metrics
format: protobuf
- testcase: clickstream
format: protobuf
- testcase: prometheus
format: protobuf
# This demo is showcasing avro + schema registry. So there's no file server for the schema file.
- testcase: schema-registry
format: protobuf
- testcase: mysql-cdc
format: protobuf
- testcase: postgres-cdc
format: protobuf
- testcase: mysql-sink
format: protobuf
- testcase: postgres-sink
format: protobuf
- testcase: iceberg-sink
format: protobuf
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0

# In this step, this action saves a list of existing images,
# the cache is created without them in the post run.
# It also restores the cache if it exists.
- uses: satackey/[email protected]
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true

- name: Rewrite docker compose for protobuf
working-directory: integration_tests/scripts
if: ${{ matrix.format == 'protobuf' }}
run: |
python3 gen_pb_compose.py ${{ matrix.testcase }} ${{ matrix.format }}
- name: Run Demos
working-directory: integration_tests/scripts
run: |
python3 run_demos.py --case ${{ matrix.testcase }} --format ${{ matrix.format }}
- name: Check if the ingestion is successful
working-directory: integration_tests/scripts
run: |
python3 check_data.py ${{ matrix.testcase }}
- name: Dump logs on failure
if: ${{ failure() }}
working-directory: integration_tests/${{ matrix.testcase }}
run: |
docker compose logs
- uses: satackey/[email protected]
continue-on-error: true
2 changes: 1 addition & 1 deletion .github/workflows/typo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ jobs:
uses: actions/checkout@v3

- name: Check spelling of the entire repository
uses: crate-ci/typos@v1.11.1
uses: crate-ci/typos@v1.13.20
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ src/log/

log/

*.log

.risingwave/
.bin/

Expand Down
3 changes: 3 additions & 0 deletions .licenserc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,13 @@ header:
- "dashboard/**/*.js"
- "dashboard/**/*.ts"
- "src/**/*.html"
- "java/**/*.java"
- "java/**/*.py"

paths-ignore:
- "**/gen/**"
- "**/*.d.ts"
- "src/sqlparser/**/*.rs"
- "java/connector-node/risingwave-source-cdc/src/main/java/com/risingwave/connector/cdc/debezium/internal/*.java"

comment: on-failure
2 changes: 2 additions & 0 deletions .typos.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,6 @@ extend-exclude = [
"scripts",
"src/frontend/planner_test/tests/testdata",
"src/tests/sqlsmith/tests/freeze",
"**/go.mod",
"**/go.sum",
]
27 changes: 24 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 12 additions & 4 deletions Makefile.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ extend = [
{ path = "src/risedevtool/redis.toml" },
{ path = "src/risedevtool/connector.toml" },
{ path = "src/risedevtool/risedev-components.toml" },
{ path = "src/sqlparser/test_runner/sqlparser_test.toml"},
{ path = "src/frontend/planner_test/planner_test.toml" },
{ path = "src/tests/compaction_test/Makefile.toml" },
{ path = "src/storage/backup/integration_tests/Makefile.toml" },
Expand Down Expand Up @@ -599,11 +600,18 @@ else
fi
ARTIFACT="risingwave-connector-1.0.0.tar.gz"
TARGET_PATH="${JAVA_DIR}/connector-node/assembly/target/${ARTIFACT}"
cd "${JAVA_DIR}"
"${MAVEN_PATH}" --batch-mode --update-snapshots clean package -Dmaven.test.skip
if [[ ! -f ${TARGET_PATH} ]] || [[ ! -z ${REBUILD_CONNECTOR_NODE} ]]; then
echo "Rebuild connector node"
cd "${JAVA_DIR}"
"${MAVEN_PATH}" --batch-mode --update-snapshots clean package -Dmaven.test.skip
else
echo "Connector node was built already. Skipped. Set REBUILD_CONNECTOR_NODE=1 to enable rebuild"
fi
rm -rf ${PREFIX_BIN}/connector-node
mkdir -p "${PREFIX_BIN}/connector-node"
tar xf "${JAVA_DIR}/connector-node/assembly/target/${ARTIFACT}" -C "${PREFIX_BIN}/connector-node"
tar xf ${TARGET_PATH} -C "${PREFIX_BIN}/connector-node"
'''


Expand Down Expand Up @@ -693,7 +701,7 @@ script = """
#!/usr/bin/env bash
set -e
cargo check -p risingwave_simulation "$@"
cargo check -p risingwave_simulation --all-targets "$@"
"""

[tasks.sslt]
Expand Down
2 changes: 1 addition & 1 deletion ci/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ ARG RUST_TOOLCHAIN
RUN apt-get update -yy && \
DEBIAN_FRONTEND=noninteractive apt-get -y install make build-essential cmake protobuf-compiler curl parallel python3 python3-pip \
openssl libssl-dev libsasl2-dev libcurl4-openssl-dev pkg-config bash openjdk-11-jdk wget unzip git tmux lld postgresql-client kafkacat netcat mysql-client \
maven -yy \
maven zstd -yy \
&& rm -rf /var/lib/{apt,dpkg,cache,log}/

SHELL ["/bin/bash", "-c"]
Expand Down
2 changes: 1 addition & 1 deletion ci/build-ci-image.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export RUST_TOOLCHAIN=$(cat ../rust-toolchain)
# !!! CHANGE THIS WHEN YOU WANT TO BUMP CI IMAGE !!! #
# AND ALSO docker-compose.yml #
######################################################
export BUILD_ENV_VERSION=v20230302
export BUILD_ENV_VERSION=v20230309

export BUILD_TAG="public.ecr.aws/x5u3w5h6/rw-build-env:${BUILD_ENV_VERSION}"

Expand Down
1 change: 0 additions & 1 deletion ci/connector-node-version

This file was deleted.

8 changes: 4 additions & 4 deletions ci/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,28 @@ services:
retries: 5

source-test-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230302
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230309
depends_on:
- mysql
- db
volumes:
- ..:/risingwave

sink-test-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230302
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230309
depends_on:
- mysql
- db
volumes:
- ..:/risingwave

rw-build-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230302
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230309
volumes:
- ..:/risingwave

regress-test-env:
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230302
image: public.ecr.aws/x5u3w5h6/rw-build-env:v20230309
depends_on:
db:
condition: service_healthy
Expand Down
2 changes: 1 addition & 1 deletion ci/scripts/deterministic-e2e-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,4 @@ echo "--- deterministic simulation e2e, ci-3cn-2fe, parallel, batch"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation -j 16 ./e2e_test/batch/\*\*/\*.slt 2> $LOGDIR/parallel-batch-{}.log && rm $LOGDIR/parallel-batch-{}.log'

echo "--- deterministic simulation e2e, ci-3cn-2fe, fuzzing (pre-generated-queries)"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} './risingwave_simulation --run-sqlsmith-queries ./src/tests/sqlsmith/tests/sqlsmith-query-snapshots/{} 2> $LOGDIR/fuzzing-{}.log && rm $LOGDIR/fuzzing-{}.log'
seq 64 | parallel MADSIM_TEST_SEED={} './risingwave_simulation --run-sqlsmith-queries ./src/tests/sqlsmith/tests/sqlsmith-query-snapshots/{} 2> $LOGDIR/fuzzing-{}.log && rm $LOGDIR/fuzzing-{}.log'
7 changes: 6 additions & 1 deletion ci/scripts/deterministic-scale-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,10 @@ source ci/scripts/common.env.sh
echo "--- Download artifacts"
buildkite-agent artifact download scale-test.tar.zst .

echo "--- Extract artifacts"
tar -xvf scale-test.tar.zst
mkdir target/sim
mv target/ci-sim target/sim

echo "--- Run scaling tests in deterministic simulation mode"
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-scaling cargo nextest run --archive-file scale-test.tar.zst --no-fail-fast
seq $TEST_NUM | parallel MADSIM_TEST_SEED={} NEXTEST_PROFILE=ci-scaling cargo nextest run --no-fail-fast --cargo-metadata target/nextest/cargo-metadata.json --binaries-metadata target/nextest/binaries-metadata.json
5 changes: 0 additions & 5 deletions ci/scripts/docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,6 @@ set -euo pipefail
ghcraddr="ghcr.io/risingwavelabs/risingwave"
dockerhubaddr="risingwavelabs/risingwave"
arch="$(uname -m)"
connector_node_version=$(cat ci/connector-node-version)

# Git clone risingwave-connector-node repo
git clone https://"$GITHUB_TOKEN"@github.com/risingwavelabs/risingwave-connector-node.git
cd risingwave-connector-node && git checkout ${connector_node_version} && cd ..

# Build RisingWave docker image ${BUILDKITE_COMMIT}-${arch}
echo "--- docker build and tag"
Expand Down
16 changes: 8 additions & 8 deletions ci/scripts/e2e-iceberg-sink-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ spark-3.3.1-bin-hadoop3/bin/spark-sql --packages $DEPENDENCIES \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.endpoint=http://127.0.0.1:9301 \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.access.key=hummockadmin \
--conf spark.sql.catalog.demo.hadoop.fs.s3a.secret.key=hummockadmin \
--S --e "CREATE TABLE demo.demo_db.demo_table(v1 int, v2 int) TBLPROPERTIES ('format-version'='2');"
--S --e "CREATE TABLE demo.demo_db.demo_table(v1 int, v2 bigint, v3 string) TBLPROPERTIES ('format-version'='2');"

echo "--- testing sinks"
sqllogictest -p 4566 -d dev './e2e_test/sink/iceberg_sink.slt'
Expand All @@ -80,13 +80,13 @@ spark-3.3.1-bin-hadoop3/bin/spark-sql --packages $DEPENDENCIES \

# check sink destination using shell
if cat ./spark-output/*.csv | sort | awk -F "," '{
if ($1 == 1 && $2 == 2) c1++;
if ($1 == 13 && $2 == 2) c2++;
if ($1 == 21 && $2 == 2) c3++;
if ($1 == 2 && $2 == 2) c4++;
if ($1 == 3 && $2 == 2) c5++;
if ($1 == 5 && $2 == 2) c6++;
if ($1 == 8 && $2 == 2) c7++; }
if ($1 == 1 && $2 == 2 && $3 == "1-2") c1++;
if ($1 == 13 && $2 == 2 && $3 == "13-2") c2++;
if ($1 == 21 && $2 == 2 && $3 == "21-2") c3++;
if ($1 == 2 && $2 == 2 && $3 == "2-2") c4++;
if ($1 == 3 && $2 == 2 && $3 == "3-2") c5++;
if ($1 == 5 && $2 == 2 && $3 == "5-2") c6++;
if ($1 == 8 && $2 == 2 && $3 == "8-2") c7++; }
END { exit !(c1 == 1 && c2 == 1 && c3 == 1 && c4 == 1 && c5 == 1 && c6 == 1 && c7 == 1); }'; then
echo "Iceberg sink check passed"
else
Expand Down
Loading

0 comments on commit 68a81cf

Please sign in to comment.