Skip to content

Commit 2dd7dc0

Browse files
authored
Merge branch 'main' into wangzheng/chore_gc_metric
2 parents 128140d + 65f05dd commit 2dd7dc0

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

62 files changed

+1282
-374
lines changed

.github/workflows/typo.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,4 @@ jobs:
1010
uses: actions/checkout@v3
1111

1212
- name: Check spelling of the entire repository
13-
uses: crate-ci/typos@v1.14.12
13+
uses: crate-ci/typos@v1.15.0

.typos.toml

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
[default.extend-words]
2-
indexs = "indices"
3-
Stichting = "Stichting"
4-
fo = "fo"
5-
FPR = "FPR"
2+
indexs = "indices" # Both are valid, just pick one.
3+
Stichting = "Stichting" # This is Dutch for "Foundation". From DuckDB.
4+
FPR = "FPR" # False Positive Rate
5+
inout = "inout" # This is a SQL keyword!
6+
numer = "numer" # numerator
7+
nd = "nd" # N-dimentional / 2nd
8+
steam = "stream" # You played with Steam games too much.
9+
# Some weird short variable names
610
ot = "ot"
7-
inout = "inout"
8-
numer = "numer"
9-
nd = "nd"
10-
steam = "stream"
11+
bui = "bui"
12+
13+
[default.extend-identifiers]
1114

1215
[files]
1316
extend-exclude = [

Cargo.lock

Lines changed: 7 additions & 36 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Makefile.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -910,7 +910,7 @@ if [ $# -gt 0 ]; then
910910
ARGS=("$@")
911911
912912
echo "Applying clippy --fix for $@ (including dirty and staged files)"
913-
cargo clippy ${ARGS[@]/#/--package risingwave_} ${RISINGWAVE_FEATURE_FLAGS} --fix --allow-dirty --allow-staged
913+
cargo clippy ${ARGS[@]/#/--package risingwave_} --fix --allow-dirty --allow-staged
914914
else
915915
echo "Applying clippy --fix for all targets to all files (including dirty and staged files)"
916916
echo "Tip: run $(tput setaf 4)./risedev cf {package_names}$(tput sgr0) to only check-fix those packages (e.g. frontend, meta)."
@@ -922,7 +922,7 @@ fi
922922
private = true
923923
category = "RiseDev - Check"
924924
description = "Run cargo typos-cli check"
925-
install_crate = { min_version = "1.14.8", crate_name = "typos-cli", binary = "typos", test_arg = [
925+
install_crate = { min_version = "1.15.0", crate_name = "typos-cli", binary = "typos", test_arg = [
926926
"--help",
927927
], install_command = "binstall" }
928928
script = """
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
statement error
2+
select p, percentile_cont(p) within group (order by x::float8)
3+
from generate_series(1,5) x,
4+
(values (0::float8),(0.1),(0.25),(0.4),(0.5),(0.6),(0.75),(0.9),(1)) v(p)
5+
group by p order by p;
6+
7+
statement error
8+
select percentile_cont(array[0,1,0.25,0.75,0.5,1,0.3,0.32,0.35,0.38,0.4]) within group (order by x)
9+
from generate_series(1,6) x;
10+
11+
statement error
12+
select percentile_disc(array[0.25,0.5,0.75]) within group (order by x)
13+
from unnest('{fred,jim,fred,jack,jill,fred,jill,jim,jim,sheila,jim,sheila}'::text[]) u(x);
14+
15+
statement error
16+
select pg_collation_for(percentile_disc(1) within group (order by x collate "POSIX"))
17+
from (values ('fred'),('jim')) v(x);
18+
19+
query RR
20+
select
21+
percentile_cont(0.5) within group (order by a),
22+
percentile_disc(0.5) within group (order by a)
23+
from (values(1::float8),(3),(5),(7)) t(a);
24+
----
25+
4 3
26+
27+
query RR
28+
select
29+
percentile_cont(0.25) within group (order by a),
30+
percentile_disc(0.5) within group (order by a)
31+
from (values(1::float8),(3),(5),(7)) t(a);
32+
----
33+
2.5 3
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
query I
2+
select abs.abs from abs(-1);
3+
----
4+
1
5+
6+
query I
7+
select alias.alias from abs(-1) alias;
8+
----
9+
1
10+
11+
query I
12+
select alias.col from abs(-1) alias(col);
13+
----
14+
1

e2e_test/streaming/values.slt

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,3 +35,21 @@ drop materialized view mv;
3535

3636
statement ok
3737
drop table t;
38+
39+
statement ok
40+
create materialized view mv as select * from abs(-1);
41+
42+
# TODO: support this
43+
statement error not yet implemented: LogicalTableFunction::logical_rewrite_for_stream
44+
create materialized view mv2 as select * from range(1,2);
45+
46+
statement ok
47+
flush;
48+
49+
query IR
50+
select * from mv;
51+
----
52+
1
53+
54+
statement ok
55+
drop materialized view mv;

integration_tests/datagen/sink/sink.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,15 @@ type SinkRecord interface {
88
// Convert the event to an INSERT INTO command.
99
ToPostgresSql() string
1010

11-
// Convert the event to a Kakfa message in JSON format.
11+
// Convert the event to a Kafka message in JSON format.
1212
// This interface will also be used for Pulsar and Kinesis.
1313
ToJson() (topic string, key string, data []byte)
1414

15-
// Convert the event to a Kakfa message in Protobuf format.
15+
// Convert the event to a Kafka message in Protobuf format.
1616
// This interface will also be used for Pulsar and Kinesis.
1717
ToProtobuf() (topic string, key string, data []byte)
1818

19-
// Convert the event to a Kakfa message in Avro format.
19+
// Convert the event to a Kafka message in Avro format.
2020
// This interface will also be used for Pulsar and Kinesis.
2121
ToAvro() (topic string, key string, data []byte)
2222
}
@@ -40,7 +40,7 @@ func (r BaseSinkRecord) ToAvro() (topic string, key string, data []byte) {
4040
panic("not implemented")
4141
}
4242

43-
// Convert the event to a Kakfa message in the given format.
43+
// Convert the event to a Kafka message in the given format.
4444
// This interface will also be used for Pulsar and Kinesis.
4545
func RecordToKafka(r SinkRecord, format string) (topic string, key string, data []byte) {
4646
if format == "json" {

integration_tests/mysql-sink/create_mv.sql

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,32 @@ FROM
1414
jdbc.url = 'jdbc:mysql://mysql:3306/mydb?user=root&password=123456',
1515
table.name = 'target_count',
1616
type = 'upsert'
17-
);
17+
);
18+
19+
-- ingest the table back to RW
20+
CREATE TABLE rw_types (
21+
id BIGINT PRIMARY KEY,
22+
varchar_column VARCHAR,
23+
text_column TEXT,
24+
integer_column INTEGER,
25+
smallint_column SMALLINT,
26+
bigint_column BIGINT,
27+
decimal_column DECIMAL,
28+
real_column REAL,
29+
double_column DOUBLE PRECISION,
30+
boolean_column BOOLEAN,
31+
date_column DATE,
32+
time_column TIME,
33+
timestamp_column TIMESTAMP,
34+
jsonb_column JSONB,
35+
bytea_column BYTEA
36+
) WITH (
37+
connector = 'mysql-cdc',
38+
hostname = 'mysql',
39+
port = '3306',
40+
username = 'root',
41+
password = '123456',
42+
database.name = 'mydb',
43+
table.name = 'data_types',
44+
server.id = '3'
45+
);

integration_tests/mysql-sink/create_source.sql

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,39 @@ CREATE SOURCE user_behaviors (
1111
topic = 'user_behaviors',
1212
properties.bootstrap.server = 'message_queue:29092',
1313
scan.startup.mode = 'earliest'
14-
) ROW FORMAT JSON;
14+
) ROW FORMAT JSON;
15+
16+
CREATE TABLE data_types (
17+
id BIGINT PRIMARY KEY,
18+
varchar_column VARCHAR,
19+
text_column TEXT,
20+
integer_column INTEGER,
21+
smallint_column SMALLINT,
22+
bigint_column BIGINT,
23+
decimal_column DECIMAL,
24+
real_column REAL,
25+
double_column DOUBLE PRECISION,
26+
boolean_column BOOLEAN,
27+
date_column DATE,
28+
time_column TIME,
29+
timestamp_column TIMESTAMP,
30+
jsonb_column JSONB,
31+
bytea_column BYTEA
32+
);
33+
34+
CREATE SINK data_types_mysql_sink
35+
FROM
36+
data_types WITH (
37+
connector = 'jdbc',
38+
jdbc.url = 'jdbc:mysql://mysql:3306/mydb?user=root&password=123456',
39+
table.name = 'data_types',
40+
type = 'upsert'
41+
);
42+
43+
INSERT INTO data_types (id, varchar_column, text_column, integer_column, smallint_column, bigint_column, decimal_column, real_column, double_column, boolean_column, date_column, time_column, timestamp_column, jsonb_column, bytea_column)
44+
VALUES
45+
(1, 'Varchar value 1', 'Text value 1', 123, 456, 789, 12.34, 56.78, 90.12, TRUE, '2023-05-22', '12:34:56', '2023-05-22 12:34:56', '{"key": "value"}', E'\\xDEADBEEF'),
46+
(2, 'Varchar value 2', 'Text value 2', 234, 567, 890, 23.45, 67.89, 01.23, FALSE, '2023-05-23', '23:45:01', '2023-05-23 23:45:01', '{"key": "value2"}', E'\\xFEEDBEEF'),
47+
(3, 'Varchar value 3', 'Text value 3', 345, 678, 901, 34.56, 78.90, 12.34, TRUE, '2023-05-24', '12:34:56', '2023-05-24 12:34:56', '{"key": "value3"}', E'\\xCAFEBABE'),
48+
(4, 'Varchar value 4', 'Text value 4', 456, 789, 012, 45.67, 89.01, 23.45, FALSE, '2023-05-25', '23:45:01', '2023-05-25 23:45:01', '{"key": "value4"}', E'\\xBABEC0DE'),
49+
(5, 'Varchar value 5', 'Text value 5', 567, 890, 123, 56.78, 90.12, 34.56, TRUE, '2023-05-26', '12:34:56', '2023-05-26 12:34:56', '{"key": "value5"}', E'\\xDEADBABE');
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
user_behaviors,target_count
1+
user_behaviors,target_count,rw_types
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,24 @@
11
CREATE TABLE target_count (
22
target_id VARCHAR(128) primary key,
33
target_count BIGINT
4+
);
5+
6+
7+
-- sink table
8+
CREATE TABLE data_types (
9+
id BIGINT PRIMARY KEY,
10+
varchar_column VARCHAR(255),
11+
text_column TEXT,
12+
integer_column INT,
13+
smallint_column SMALLINT,
14+
bigint_column BIGINT,
15+
decimal_column DECIMAL(10,2),
16+
real_column FLOAT,
17+
double_column DOUBLE,
18+
boolean_column BOOLEAN,
19+
date_column DATE,
20+
time_column TIME,
21+
timestamp_column TIMESTAMP,
22+
jsonb_column JSON,
23+
bytea_column BLOB
424
);

integration_tests/mysql-sink/query.sql

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,12 @@ SELECT
22
*
33
FROM
44
target_count
5+
LIMIT
6+
10;
7+
8+
SELECT
9+
*
10+
FROM
11+
data_types
512
LIMIT
613
10;

integration_tests/postgres-sink/create_mv.sql

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,36 @@ FROM
1414
jdbc.url = 'jdbc:postgresql://postgres:5432/mydb?user=myuser&password=123456',
1515
table.name = 'target_count',
1616
type = 'upsert'
17-
);
17+
);
18+
19+
-- ingest back to RW
20+
CREATE table rw_types (
21+
id BIGINT PRIMARY KEY,
22+
varchar_column VARCHAR,
23+
text_column TEXT,
24+
integer_column INTEGER,
25+
smallint_column SMALLINT,
26+
bigint_column BIGINT,
27+
decimal_column DECIMAL,
28+
real_column REAL,
29+
double_column DOUBLE PRECISION,
30+
boolean_column BOOLEAN,
31+
date_column DATE,
32+
time_column TIME,
33+
timestamp_column TIMESTAMP,
34+
timestamptz_column TIMESTAMPTZ,
35+
interval_column INTERVAL,
36+
jsonb_column JSONB,
37+
bytea_column BYTEA,
38+
array_column VARCHAR[]
39+
) WITH (
40+
connector = 'postgres-cdc',
41+
hostname = 'postgres',
42+
port = '5432',
43+
username = 'myuser',
44+
password = '123456',
45+
database.name = 'mydb',
46+
schema.name = 'public',
47+
table.name = 'data_types',
48+
slot.name = 'data_types'
49+
);

0 commit comments

Comments
 (0)