Skip to content

Commit d439de1

Browse files
authored
Merge branch 'main' into tab/pb-resolve-path
2 parents 8e7f971 + e21226a commit d439de1

File tree

239 files changed

+11107
-6658
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

239 files changed

+11107
-6658
lines changed

.github/workflows/cherry-pick-to-release-branch.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77

88
jobs:
99
release_pull_request:
10-
if: "contains(github.event.pull_request.labels.*.name, 'need-cherry-pick-v0.19') && github.event.pull_request.merged == true"
10+
if: "contains(github.event.pull_request.labels.*.name, 'need-cherry-pick-v0.19') && github.event.pull_request.merged == true"
1111
runs-on: ubuntu-latest
1212
name: release_pull_request
1313
steps:
@@ -17,8 +17,7 @@ jobs:
1717
uses: risingwavelabs/github-action-cherry-pick@master
1818
with:
1919
pr_branch: 'v0.19.0-rc'
20-
labels: |
21-
cherry-pick
22-
body: 'Cherry picking #{old_pull_request_id} onto this branch'
20+
pr_labels: 'cherry-pick'
21+
pr_body: 'Cherry picking #{old_pull_request_id} onto this branch'
2322
env:
2423
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

.github/workflows/connector-node-integration.yml

Lines changed: 1 addition & 103 deletions
Original file line numberDiff line numberDiff line change
@@ -41,107 +41,5 @@ jobs:
4141
4242
echo "--- build connector node"
4343
cd ${RISINGWAVE_ROOT}/java
44+
# run unit test
4445
mvn --batch-mode --update-snapshots clean package -Dno-build-rust
45-
46-
echo "--- install postgresql client"
47-
sudo apt install postgresql postgresql-contrib libpq-dev
48-
sudo systemctl start postgresql || sudo pg_ctlcluster 12 main start
49-
# disable password encryption
50-
sudo -u postgres psql -c "CREATE ROLE test LOGIN SUPERUSER PASSWORD 'connector';"
51-
sudo -u postgres createdb test
52-
sudo -u postgres psql -d test -c "CREATE TABLE test (id serial PRIMARY KEY, name VARCHAR (50) NOT NULL);"
53-
54-
echo "--- starting minio"
55-
echo "setting up minio"
56-
wget https://dl.minio.io/server/minio/release/linux-amd64/minio > /dev/null
57-
chmod +x minio
58-
sudo ./minio server /tmp/minio &
59-
# wait for minio to start
60-
sleep 3
61-
wget https://dl.minio.io/client/mc/release/linux-amd64/mc > /dev/null
62-
chmod +x mc
63-
MC_PATH=${PWD}/mc
64-
${MC_PATH} config host add minio http://127.0.0.1:9000 minioadmin minioadmin
65-
66-
echo "--- starting connector-node service"
67-
cd ${RISINGWAVE_ROOT}/java/connector-node/assembly/target/
68-
tar xvf risingwave-connector-1.0.0.tar.gz > /dev/null
69-
sh ./start-service.sh &
70-
sleep 3
71-
cd ../../
72-
73-
74-
echo "--- prepare integration tests"
75-
cd ${RISINGWAVE_ROOT}/java/connector-node
76-
pip3 install grpcio grpcio-tools psycopg2 psycopg2-binary pyspark==3.3
77-
cd python-client && bash gen-stub.sh
78-
79-
echo "--- running jdbc integration tests"
80-
if python3 integration_tests.py --file_sink; then
81-
echo "File sink test passed"
82-
else
83-
echo "File sink test failed"
84-
exit 1
85-
fi
86-
87-
if python3 integration_tests.py --jdbc_sink; then
88-
echo "Jdbc sink test passed"
89-
else
90-
echo "Jdbc sink test failed"
91-
exit 1
92-
fi
93-
cd ..
94-
echo "all jdbc tests passed"
95-
96-
echo "running iceberg integration tests"
97-
${MC_PATH} mb minio/bucket
98-
99-
# test append-only mode
100-
cd python-client
101-
python3 pyspark-util.py create_iceberg
102-
if python3 integration_tests.py --iceberg_sink; then
103-
python3 pyspark-util.py test_iceberg
104-
echo "Iceberg sink test passed"
105-
else
106-
echo "Iceberg sink test failed"
107-
exit 1
108-
fi
109-
python3 pyspark-util.py drop_iceberg
110-
111-
# test upsert mode
112-
python3 pyspark-util.py create_iceberg
113-
if python3 integration_tests.py --upsert_iceberg_sink --input_file="./data/upsert_sink_input.json"; then
114-
python3 pyspark-util.py test_upsert_iceberg --input_file="./data/upsert_sink_input.json"
115-
echo "Upsert iceberg sink test passed"
116-
else
117-
echo "Upsert iceberg sink test failed"
118-
exit 1
119-
fi
120-
python3 pyspark-util.py drop_iceberg
121-
122-
# clean up minio
123-
cd ..
124-
${MC_PATH} rm -r -force minio/bucket
125-
${MC_PATH} rb minio/bucket
126-
echo "all iceberg tests passed"
127-
128-
echo "running deltalake integration tests"
129-
${MC_PATH} mb minio/bucket
130-
131-
cd python-client
132-
# test append-only mode
133-
python3 pyspark-util.py create_deltalake
134-
if python3 integration_tests.py --deltalake_sink; then
135-
python3 pyspark-util.py test_deltalake
136-
echo "Deltalake sink test passed"
137-
else
138-
echo "Deltalake sink test failed"
139-
exit 1
140-
fi
141-
142-
# clean up minio
143-
cd ..
144-
${MC_PATH} rm -r -force minio/bucket
145-
${MC_PATH} rb minio/bucket
146-
echo "all deltalake tests passed"
147-

0 commit comments

Comments
 (0)