risingwave_connector/parser/
config.rs

1// Copyright 2025 RisingWave Labs
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use std::collections::{BTreeMap, HashSet};
16
17use risingwave_common::bail;
18use risingwave_common::secret::LocalSecretManager;
19use risingwave_connector_codec::decoder::avro::MapHandling;
20use risingwave_pb::catalog::{PbSchemaRegistryNameStrategy, StreamSourceInfo};
21
22use super::utils::get_kafka_topic;
23use super::{DebeziumProps, TimestamptzHandling};
24use crate::WithOptionsSecResolved;
25use crate::connector_common::AwsAuthProps;
26use crate::error::ConnectorResult;
27use crate::parser::PROTOBUF_MESSAGES_AS_JSONB;
28use crate::schema::AWS_GLUE_SCHEMA_ARN_KEY;
29use crate::schema::schema_registry::SchemaRegistryAuth;
30use crate::source::cdc::CDC_MONGODB_STRONG_SCHEMA_KEY;
31use crate::source::{SourceColumnDesc, SourceEncode, SourceFormat, extract_source_struct};
32
33/// Note: this is created in `SourceReader::build_stream`
34#[derive(Debug, Clone, Default)]
35pub struct ParserConfig {
36    pub common: CommonParserConfig,
37    pub specific: SpecificParserConfig,
38}
39
40impl ParserConfig {
41    pub fn get_config(self) -> (Vec<SourceColumnDesc>, SpecificParserConfig) {
42        (self.common.rw_columns, self.specific)
43    }
44}
45
46#[derive(Debug, Clone, Default)]
47pub struct CommonParserConfig {
48    /// Note: this is created by `SourceDescBuilder::builder`
49    pub rw_columns: Vec<SourceColumnDesc>,
50}
51
52#[derive(Debug, Clone, Default)]
53pub struct SpecificParserConfig {
54    pub encoding_config: EncodingProperties,
55    pub protocol_config: ProtocolProperties,
56}
57
58#[derive(Debug, Default, Clone)]
59pub enum EncodingProperties {
60    Avro(AvroProperties),
61    Protobuf(ProtobufProperties),
62    Csv(CsvProperties),
63    Json(JsonProperties),
64    MongoJson(MongoProperties),
65    Bytes(BytesProperties),
66    Parquet,
67    Native,
68    /// Encoding can't be specified because the source will determines it. Now only used in Iceberg.
69    None,
70    #[default]
71    Unspecified,
72}
73
74#[derive(Debug, Default, Clone)]
75pub enum ProtocolProperties {
76    Debezium(DebeziumProps),
77    DebeziumMongo,
78    Maxwell,
79    Canal,
80    Plain,
81    Upsert,
82    Native,
83    /// Protocol can't be specified because the source will determines it. Now only used in Iceberg.
84    None,
85    #[default]
86    Unspecified,
87}
88
89impl SpecificParserConfig {
90    // for test only
91    pub const DEFAULT_PLAIN_JSON: SpecificParserConfig = SpecificParserConfig {
92        encoding_config: EncodingProperties::Json(JsonProperties {
93            use_schema_registry: false,
94            timestamptz_handling: None,
95        }),
96        protocol_config: ProtocolProperties::Plain,
97    };
98
99    // The validity of (format, encode) is ensured by `extract_format_encode`
100    pub fn new(
101        info: &StreamSourceInfo,
102        with_properties: &WithOptionsSecResolved,
103    ) -> ConnectorResult<Self> {
104        let info = info.clone();
105        let source_struct = extract_source_struct(&info)?;
106        let format_encode_options_with_secret = LocalSecretManager::global()
107            .fill_secrets(info.format_encode_options, info.format_encode_secret_refs)?;
108        let (options, secret_refs) = with_properties.clone().into_parts();
109        // Make sure `with_properties` is no longer used by accident.
110        // All reads shall go to `options_with_secret` instead.
111        #[expect(unused_variables)]
112        let with_properties = ();
113        let options_with_secret =
114            LocalSecretManager::global().fill_secrets(options, secret_refs)?;
115        let format = source_struct.format;
116        let encode = source_struct.encode;
117        // this transformation is needed since there may be config for the protocol
118        // in the future
119        let protocol_config = match format {
120            SourceFormat::Native => ProtocolProperties::Native,
121            SourceFormat::None => ProtocolProperties::None,
122            SourceFormat::Debezium => {
123                let debezium_props = DebeziumProps::from(&format_encode_options_with_secret);
124                ProtocolProperties::Debezium(debezium_props)
125            }
126            SourceFormat::DebeziumMongo => ProtocolProperties::DebeziumMongo,
127            SourceFormat::Maxwell => ProtocolProperties::Maxwell,
128            SourceFormat::Canal => ProtocolProperties::Canal,
129            SourceFormat::Upsert => ProtocolProperties::Upsert,
130            SourceFormat::Plain => ProtocolProperties::Plain,
131            _ => unreachable!(),
132        };
133
134        let encoding_config = match (format, encode) {
135            (SourceFormat::Plain, SourceEncode::Csv) => EncodingProperties::Csv(CsvProperties {
136                delimiter: info.csv_delimiter as u8,
137                has_header: info.csv_has_header,
138            }),
139            (SourceFormat::Plain, SourceEncode::Parquet) => EncodingProperties::Parquet,
140            (SourceFormat::Plain, SourceEncode::Avro)
141            | (SourceFormat::Upsert, SourceEncode::Avro) => {
142                let mut config = AvroProperties {
143                    record_name: if info.proto_message_name.is_empty() {
144                        None
145                    } else {
146                        Some(info.proto_message_name.clone())
147                    },
148                    key_record_name: info.key_message_name.clone(),
149                    map_handling: MapHandling::from_options(&format_encode_options_with_secret)?,
150                    ..Default::default()
151                };
152                config.schema_location = if let Some(schema_arn) =
153                    format_encode_options_with_secret.get(AWS_GLUE_SCHEMA_ARN_KEY)
154                {
155                    risingwave_common::license::Feature::GlueSchemaRegistry
156                        .check_available()
157                        .map_err(anyhow::Error::from)?;
158                    SchemaLocation::Glue {
159                        schema_arn: schema_arn.clone(),
160                        aws_auth_props: serde_json::from_value::<AwsAuthProps>(
161                            serde_json::to_value(format_encode_options_with_secret.clone())
162                                .unwrap(),
163                        )
164                        .map_err(|e| anyhow::anyhow!(e))?,
165                        // The option `mock_config` is not public and we can break compatibility.
166                        mock_config: format_encode_options_with_secret
167                            .get("aws.glue.mock_config")
168                            .cloned(),
169                    }
170                } else if info.use_schema_registry {
171                    SchemaLocation::Confluent {
172                        urls: info.row_schema_location.clone(),
173                        client_config: SchemaRegistryAuth::from(&format_encode_options_with_secret),
174                        name_strategy: PbSchemaRegistryNameStrategy::try_from(info.name_strategy)
175                            .unwrap(),
176                        topic: get_kafka_topic(&options_with_secret)?.clone(),
177                    }
178                } else {
179                    SchemaLocation::File {
180                        url: info.row_schema_location.clone(),
181                        aws_auth_props: Some(
182                            serde_json::from_value::<AwsAuthProps>(
183                                serde_json::to_value(format_encode_options_with_secret.clone())
184                                    .unwrap(),
185                            )
186                            .map_err(|e| anyhow::anyhow!(e))?,
187                        ),
188                    }
189                };
190                EncodingProperties::Avro(config)
191            }
192            (SourceFormat::Plain, SourceEncode::Protobuf)
193            | (SourceFormat::Upsert, SourceEncode::Protobuf) => {
194                if info.row_schema_location.is_empty() {
195                    bail!("protobuf file location not provided");
196                }
197                let mut messages_as_jsonb = if let Some(messages_as_jsonb) =
198                    format_encode_options_with_secret.get(PROTOBUF_MESSAGES_AS_JSONB)
199                {
200                    messages_as_jsonb.split(',').map(|s| s.to_owned()).collect()
201                } else {
202                    HashSet::new()
203                };
204                messages_as_jsonb.insert("google.protobuf.Any".to_owned());
205
206                let mut config = ProtobufProperties {
207                    message_name: info.proto_message_name.clone(),
208                    key_message_name: info.key_message_name.clone(),
209                    messages_as_jsonb,
210                    ..Default::default()
211                };
212                config.schema_location = if info.use_schema_registry {
213                    SchemaLocation::Confluent {
214                        urls: info.row_schema_location.clone(),
215                        client_config: SchemaRegistryAuth::from(&format_encode_options_with_secret),
216                        name_strategy: PbSchemaRegistryNameStrategy::try_from(info.name_strategy)
217                            .unwrap(),
218                        topic: get_kafka_topic(&options_with_secret)?.clone(),
219                    }
220                } else {
221                    SchemaLocation::File {
222                        url: info.row_schema_location.clone(),
223                        aws_auth_props: Some(
224                            serde_json::from_value::<AwsAuthProps>(
225                                serde_json::to_value(format_encode_options_with_secret.clone())
226                                    .unwrap(),
227                            )
228                            .map_err(|e| anyhow::anyhow!(e))?,
229                        ),
230                    }
231                };
232                EncodingProperties::Protobuf(config)
233            }
234            (SourceFormat::Debezium, SourceEncode::Avro) => {
235                EncodingProperties::Avro(AvroProperties {
236                    record_name: if info.proto_message_name.is_empty() {
237                        None
238                    } else {
239                        Some(info.proto_message_name.clone())
240                    },
241                    key_record_name: info.key_message_name.clone(),
242                    schema_location: SchemaLocation::Confluent {
243                        urls: info.row_schema_location.clone(),
244                        client_config: SchemaRegistryAuth::from(&format_encode_options_with_secret),
245                        name_strategy: PbSchemaRegistryNameStrategy::try_from(info.name_strategy)
246                            .unwrap(),
247                        topic: get_kafka_topic(&options_with_secret).unwrap().clone(),
248                    },
249                    ..Default::default()
250                })
251            }
252            (
253                SourceFormat::Plain
254                | SourceFormat::Debezium
255                | SourceFormat::Maxwell
256                | SourceFormat::Canal
257                | SourceFormat::Upsert,
258                SourceEncode::Json,
259            ) => EncodingProperties::Json(JsonProperties {
260                use_schema_registry: info.use_schema_registry,
261                timestamptz_handling: TimestamptzHandling::from_options(
262                    &format_encode_options_with_secret,
263                )?,
264            }),
265            (SourceFormat::DebeziumMongo, SourceEncode::Json) => {
266                let props = MongoProperties::from(&format_encode_options_with_secret);
267                EncodingProperties::MongoJson(props)
268            }
269            (SourceFormat::Plain, SourceEncode::Bytes) => {
270                EncodingProperties::Bytes(BytesProperties { column_name: None })
271            }
272            (SourceFormat::Native, SourceEncode::Native) => EncodingProperties::Native,
273            (SourceFormat::None, SourceEncode::None) => EncodingProperties::None,
274            (format, encode) => {
275                bail!("Unsupported format {:?} encode {:?}", format, encode);
276            }
277        };
278        Ok(Self {
279            encoding_config,
280            protocol_config,
281        })
282    }
283}
284
285#[derive(Debug, Default, Clone)]
286pub struct AvroProperties {
287    pub schema_location: SchemaLocation,
288    pub record_name: Option<String>,
289    pub key_record_name: Option<String>,
290    pub map_handling: Option<MapHandling>,
291}
292
293/// WIP: may cover protobuf and json schema later.
294#[derive(Debug, Clone)]
295pub enum SchemaLocation {
296    /// Avsc from `https://`, `s3://` or `file://`.
297    File {
298        url: String,
299        aws_auth_props: Option<AwsAuthProps>, // for s3
300    },
301    /// <https://docs.confluent.io/platform/current/schema-registry/index.html>
302    Confluent {
303        urls: String,
304        client_config: SchemaRegistryAuth,
305        name_strategy: PbSchemaRegistryNameStrategy,
306        topic: String,
307    },
308    /// <https://docs.aws.amazon.com/glue/latest/dg/schema-registry.html>
309    Glue {
310        schema_arn: String,
311        aws_auth_props: AwsAuthProps,
312        // When `Some(_)`, ignore AWS and load schemas from provided config
313        mock_config: Option<String>,
314    },
315}
316
317// TODO: `SpecificParserConfig` shall not `impl`/`derive` a `Default`
318impl Default for SchemaLocation {
319    fn default() -> Self {
320        // backward compatible but undesired
321        Self::File {
322            url: Default::default(),
323            aws_auth_props: None,
324        }
325    }
326}
327
328#[derive(Debug, Default, Clone)]
329pub struct ProtobufProperties {
330    pub schema_location: SchemaLocation,
331    pub message_name: String,
332    pub key_message_name: Option<String>,
333    pub messages_as_jsonb: HashSet<String>,
334}
335
336#[derive(Debug, Default, Clone, Copy)]
337pub struct CsvProperties {
338    pub delimiter: u8,
339    pub has_header: bool,
340}
341
342#[derive(Debug, Default, Clone)]
343pub struct JsonProperties {
344    pub use_schema_registry: bool,
345    pub timestamptz_handling: Option<TimestamptzHandling>,
346}
347
348#[derive(Debug, Default, Clone)]
349pub struct BytesProperties {
350    pub column_name: Option<String>,
351}
352
353#[derive(Debug, Default, Clone)]
354pub struct MongoProperties {
355    pub strong_schema: bool,
356}
357
358impl MongoProperties {
359    pub fn new(strong_schema: bool) -> Self {
360        Self { strong_schema }
361    }
362}
363impl From<&BTreeMap<String, String>> for MongoProperties {
364    fn from(config: &BTreeMap<String, String>) -> Self {
365        let strong_schema = config
366            .get(CDC_MONGODB_STRONG_SCHEMA_KEY)
367            .is_some_and(|k| k.eq_ignore_ascii_case("true"));
368        Self { strong_schema }
369    }
370}