Skip to content

Commit f112e6d

Browse files
committed
fix schema check fow allowed operaions
1 parent bfc4abe commit f112e6d

File tree

3 files changed

+33
-28
lines changed

3 files changed

+33
-28
lines changed

src/frontend/src/handler/alter_table_column.rs

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -97,22 +97,24 @@ pub async fn get_new_table_definition_for_cdc_table(
9797
"source schema should be None for CDC table"
9898
);
9999

100-
let orig_column_map: HashMap<String, ColumnDef> = HashMap::from_iter(
101-
original_columns
100+
let orig_column_catalog: HashMap<String, ColumnCatalog> = HashMap::from_iter(
101+
original_catalog
102+
.columns()
102103
.iter()
103-
.map(|col| (col.name.real_value(), col.clone())),
104+
.map(|col| (col.name().to_string(), col.clone())),
104105
);
105106

106107
// update the original columns with new version columns
107108
let mut new_column_defs = vec![];
108-
for col in new_columns {
109-
// if the column exists in the original definitoins, use the original column definition.
109+
for new_col in new_columns {
110+
// if the column exists in the original catalog, use it to construct the column definition.
110111
// since we don't support altering the column type right now
111-
if let Some(original_col) = orig_column_map.get(col.name()) {
112-
new_column_defs.push(original_col.clone());
112+
if let Some(original_col) = orig_column_catalog.get(new_col.name()) {
113+
let ty = to_ast_data_type(original_col.data_type())?;
114+
new_column_defs.push(ColumnDef::new(original_col.name().into(), ty, None, vec![]));
113115
} else {
114-
let ty = to_ast_data_type(col.data_type())?;
115-
new_column_defs.push(ColumnDef::new(col.name().into(), ty, None, vec![]));
116+
let ty = to_ast_data_type(new_col.data_type())?;
117+
new_column_defs.push(ColumnDef::new(new_col.name().into(), ty, None, vec![]));
116118
}
117119
}
118120
*original_columns = new_column_defs;

src/frontend/src/handler/create_table.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1187,6 +1187,7 @@ async fn derive_schema_for_cdc_table(
11871187
.iter_mut()
11881188
.zip_eq_fast(new_version_columns.into_iter())
11891189
{
1190+
assert_eq!(col.name(), new_version_col.name());
11901191
col.column_desc.generated_or_default_column =
11911192
new_version_col.column_desc.generated_or_default_column;
11921193
}

src/meta/service/src/ddl_service.rs

Lines changed: 21 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ use anyhow::anyhow;
1919
use rand::seq::SliceRandom;
2020
use rand::thread_rng;
2121
use risingwave_common::catalog::ColumnCatalog;
22+
use risingwave_common::types::DataType;
2223
use risingwave_common::util::column_index_mapping::ColIndexMapping;
2324
use risingwave_connector::sink::catalog::SinkId;
2425
use risingwave_meta::manager::{EventLogManagerRef, MetadataManager};
@@ -969,40 +970,41 @@ impl DdlService for DdlServiceImpl {
969970
for table in tables {
970971
// Since we only support `ADD` and `DROP` column, we check whether the new columns and the original columns
971972
// is a subset of the other.
972-
let original_column_names: HashSet<String> = HashSet::from_iter(
973-
table
974-
.columns
975-
.iter()
976-
.map(|col| ColumnCatalog::from(col.clone()).column_desc.name),
977-
);
978-
let new_column_names: HashSet<String> = HashSet::from_iter(
979-
table_change
980-
.columns
981-
.iter()
982-
.map(|col| ColumnCatalog::from(col.clone()).column_desc.name),
983-
);
984-
if !(original_column_names.is_subset(&new_column_names)
985-
|| original_column_names.is_superset(&new_column_names))
973+
let original_columns: HashSet<(String, DataType)> =
974+
HashSet::from_iter(table.columns.iter().map(|col| {
975+
let col = ColumnCatalog::from(col.clone());
976+
let data_type = col.data_type().clone();
977+
(col.column_desc.name, data_type)
978+
}));
979+
let new_columns: HashSet<(String, DataType)> =
980+
HashSet::from_iter(table_change.columns.iter().map(|col| {
981+
let col = ColumnCatalog::from(col.clone());
982+
let data_type = col.data_type().clone();
983+
(col.column_desc.name, data_type)
984+
}));
985+
986+
if !(original_columns.is_subset(&new_columns)
987+
|| original_columns.is_superset(&new_columns))
986988
{
987989
tracing::warn!(target: "auto_schema_change",
988990
table_id = table.id,
989991
cdc_table_id = table.cdc_table_id,
990992
upstraem_ddl = table_change.upstream_ddl,
991-
original_columns = ?original_column_names,
992-
new_columns = ?new_column_names,
993+
original_columns = ?original_columns,
994+
new_columns = ?new_columns,
993995
"New columns should be a subset or superset of the original columns, since only `ADD COLUMN` and `DROP COLUMN` is supported");
994996
return Err(Status::invalid_argument(
995997
"New columns should be a subset or superset of the original columns",
996998
));
997999
}
9981000
// skip the schema change if there is no change to original columns
999-
if original_column_names == new_column_names {
1001+
if original_columns == new_columns {
10001002
tracing::warn!(target: "auto_schema_change",
10011003
table_id = table.id,
10021004
cdc_table_id = table.cdc_table_id,
10031005
upstraem_ddl = table_change.upstream_ddl,
1004-
original_columns = ?original_column_names,
1005-
new_columns = ?new_column_names,
1006+
original_columns = ?original_columns,
1007+
new_columns = ?new_columns,
10061008
"No change to columns, skipping the schema change");
10071009
continue;
10081010
}

0 commit comments

Comments
 (0)