diff --git a/src/connector/src/parser/avro/parser.rs b/src/connector/src/parser/avro/parser.rs index dde74a999ac9f..ac93ab3e69807 100644 --- a/src/connector/src/parser/avro/parser.rs +++ b/src/connector/src/parser/avro/parser.rs @@ -87,7 +87,7 @@ impl AvroAccessBuilder { /// ## Confluent schema registry /// /// - In Kafka ([Confluent schema registry wire format](https://docs.confluent.io/platform/7.6/schema-registry/fundamentals/serdes-develop/index.html#wire-format)): - /// starts with 5 bytes`0x00{schema_id:08x}` followed by Avro binary encoding. + /// starts with 5 bytes`0x00{schema_id:08x}` followed by Avro binary encoding. async fn parse_avro_value(&self, payload: &[u8]) -> ConnectorResult> { // parse payload to avro value // if use confluent schema, get writer schema from confluent schema registry diff --git a/src/object_store/src/object/s3.rs b/src/object_store/src/object/s3.rs index 0ef12f3da3a3f..001eb8128a5b2 100644 --- a/src/object_store/src/object/s3.rs +++ b/src/object_store/src/object/s3.rs @@ -116,10 +116,7 @@ impl S3StreamingUploader { /// Reference: const MIN_PART_SIZE: usize = 5 * 1024 * 1024; const MAX_PART_SIZE: usize = 5 * 1024 * 1024 * 1024; - let part_size = config - .upload_part_size - .min(MAX_PART_SIZE) - .max(MIN_PART_SIZE); + let part_size = config.upload_part_size.clamp(MIN_PART_SIZE, MAX_PART_SIZE); Self { client,