diff --git a/src/ast/dml.rs b/src/ast/dml.rs index ccea7fbcb..0bf512c31 100644 --- a/src/ast/dml.rs +++ b/src/ast/dml.rs @@ -33,11 +33,11 @@ pub use super::ddl::{ColumnDef, TableConstraint}; use super::{ display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy, - CommentDef, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, HiveIOFormat, - HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName, OnCommit, - OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, Setting, - SqlOption, SqliteOnConflict, StorageSerializationPolicy, TableEngine, TableObject, - TableWithJoins, Tag, WrappedCollection, + CommentDef, CreateTableOptions, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, + HiveIOFormat, HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName, + OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, + Setting, SqliteOnConflict, StorageSerializationPolicy, TableObject, TableWithJoins, Tag, + WrappedCollection, }; /// Index column type. @@ -146,19 +146,17 @@ pub struct CreateTable { pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, - pub table_properties: Vec, - pub with_options: Vec, + pub table_options: CreateTableOptions, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, - pub engine: Option, - pub comment: Option, - pub auto_increment_offset: Option, - pub default_charset: Option, - pub collation: Option, + // For Hive dialect, the table comment is after the column definitions without `=`, + // so we need to add an extra variant to allow to identify this case when displaying. + // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) + pub comment_after_column_def: Option, pub on_commit: Option, /// ClickHouse "ON CLUSTER" clause: /// @@ -179,9 +177,6 @@ pub struct CreateTable { /// Hive: Table clustering column list. /// pub clustered_by: Option, - /// BigQuery: Table options list. - /// - pub options: Option>, /// SQLite "STRICT" clause. /// if the "STRICT" table-option keyword is added to the end, after the closing ")", /// then strict typing rules apply to that table. @@ -277,7 +272,7 @@ impl Display for CreateTable { // Hive table comment should be after column definitions, please refer to: // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) - if let Some(CommentDef::AfterColumnDefsWithoutEq(comment)) = &self.comment { + if let Some(comment) = &self.comment_after_column_def { write!(f, " COMMENT '{comment}'")?; } @@ -370,35 +365,14 @@ impl Display for CreateTable { } write!(f, " LOCATION '{}'", self.location.as_ref().unwrap())?; } - if !self.table_properties.is_empty() { - write!( - f, - " TBLPROPERTIES ({})", - display_comma_separated(&self.table_properties) - )?; - } - if !self.with_options.is_empty() { - write!(f, " WITH ({})", display_comma_separated(&self.with_options))?; - } - if let Some(engine) = &self.engine { - write!(f, " ENGINE={engine}")?; - } - if let Some(comment_def) = &self.comment { - match comment_def { - CommentDef::WithEq(comment) => { - write!(f, " COMMENT = '{comment}'")?; - } - CommentDef::WithoutEq(comment) => { - write!(f, " COMMENT '{comment}'")?; - } - // For CommentDef::AfterColumnDefsWithoutEq will be displayed after column definition - CommentDef::AfterColumnDefsWithoutEq(_) => (), - } - } - if let Some(auto_increment_offset) = self.auto_increment_offset { - write!(f, " AUTO_INCREMENT {auto_increment_offset}")?; + match &self.table_options { + options @ CreateTableOptions::With(_) + | options @ CreateTableOptions::Plain(_) + | options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?, + _ => (), } + if let Some(primary_key) = &self.primary_key { write!(f, " PRIMARY KEY {}", primary_key)?; } @@ -411,15 +385,9 @@ impl Display for CreateTable { if let Some(cluster_by) = self.cluster_by.as_ref() { write!(f, " CLUSTER BY {cluster_by}")?; } - - if let Some(options) = self.options.as_ref() { - write!( - f, - " OPTIONS({})", - display_comma_separated(options.as_slice()) - )?; + if let options @ CreateTableOptions::Options(_) = &self.table_options { + write!(f, " {}", options)?; } - if let Some(external_volume) = self.external_volume.as_ref() { write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?; } @@ -495,13 +463,6 @@ impl Display for CreateTable { write!(f, " WITH TAG ({})", display_comma_separated(tag.as_slice()))?; } - if let Some(default_charset) = &self.default_charset { - write!(f, " DEFAULT CHARSET={default_charset}")?; - } - if let Some(collation) = &self.collation { - write!(f, " COLLATE={collation}")?; - } - if self.on_commit.is_some() { let on_commit = match self.on_commit { Some(OnCommit::DeleteRows) => "ON COMMIT DELETE ROWS", diff --git a/src/ast/helpers/stmt_create_table.rs b/src/ast/helpers/stmt_create_table.rs index 344e9dec9..849a9b5f0 100644 --- a/src/ast/helpers/stmt_create_table.rs +++ b/src/ast/helpers/stmt_create_table.rs @@ -26,10 +26,12 @@ use sqlparser_derive::{Visit, VisitMut}; use super::super::dml::CreateTable; use crate::ast::{ - ClusteredBy, ColumnDef, CommentDef, Expr, FileFormat, HiveDistributionStyle, HiveFormat, Ident, - ObjectName, OnCommit, OneOrManyWithParens, Query, RowAccessPolicy, SqlOption, Statement, - StorageSerializationPolicy, TableConstraint, TableEngine, Tag, WrappedCollection, + ClusteredBy, ColumnDef, CommentDef, CreateTableOptions, Expr, FileFormat, + HiveDistributionStyle, HiveFormat, Ident, ObjectName, OnCommit, OneOrManyWithParens, Query, + RowAccessPolicy, Statement, StorageSerializationPolicy, TableConstraint, Tag, + WrappedCollection, }; + use crate::parser::ParserError; /// Builder for create table statement variant ([1]). @@ -76,19 +78,13 @@ pub struct CreateTableBuilder { pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, - pub table_properties: Vec, - pub with_options: Vec, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, - pub engine: Option, - pub comment: Option, - pub auto_increment_offset: Option, - pub default_charset: Option, - pub collation: Option, + pub comment_after_column_def: Option, pub on_commit: Option, pub on_cluster: Option, pub primary_key: Option>, @@ -96,7 +92,6 @@ pub struct CreateTableBuilder { pub partition_by: Option>, pub cluster_by: Option>>, pub clustered_by: Option, - pub options: Option>, pub strict: bool, pub copy_grants: bool, pub enable_schema_evolution: Option, @@ -112,6 +107,7 @@ pub struct CreateTableBuilder { pub catalog: Option, pub catalog_sync: Option, pub storage_serialization_policy: Option, + pub table_options: CreateTableOptions, } impl CreateTableBuilder { @@ -130,19 +126,13 @@ impl CreateTableBuilder { constraints: vec![], hive_distribution: HiveDistributionStyle::NONE, hive_formats: None, - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, - comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, + comment_after_column_def: None, on_commit: None, on_cluster: None, primary_key: None, @@ -150,7 +140,6 @@ impl CreateTableBuilder { partition_by: None, cluster_by: None, clustered_by: None, - options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -166,6 +155,7 @@ impl CreateTableBuilder { catalog: None, catalog_sync: None, storage_serialization_policy: None, + table_options: CreateTableOptions::None, } } pub fn or_replace(mut self, or_replace: bool) -> Self { @@ -228,15 +218,6 @@ impl CreateTableBuilder { self } - pub fn table_properties(mut self, table_properties: Vec) -> Self { - self.table_properties = table_properties; - self - } - - pub fn with_options(mut self, with_options: Vec) -> Self { - self.with_options = with_options; - self - } pub fn file_format(mut self, file_format: Option) -> Self { self.file_format = file_format; self @@ -266,28 +247,8 @@ impl CreateTableBuilder { self } - pub fn engine(mut self, engine: Option) -> Self { - self.engine = engine; - self - } - - pub fn comment(mut self, comment: Option) -> Self { - self.comment = comment; - self - } - - pub fn auto_increment_offset(mut self, offset: Option) -> Self { - self.auto_increment_offset = offset; - self - } - - pub fn default_charset(mut self, default_charset: Option) -> Self { - self.default_charset = default_charset; - self - } - - pub fn collation(mut self, collation: Option) -> Self { - self.collation = collation; + pub fn comment_after_column_def(mut self, comment: Option) -> Self { + self.comment_after_column_def = comment; self } @@ -326,11 +287,6 @@ impl CreateTableBuilder { self } - pub fn options(mut self, options: Option>) -> Self { - self.options = options; - self - } - pub fn strict(mut self, strict: bool) -> Self { self.strict = strict; self @@ -415,6 +371,11 @@ impl CreateTableBuilder { self } + pub fn table_options(mut self, table_options: CreateTableOptions) -> Self { + self.table_options = table_options; + self + } + pub fn build(self) -> Statement { Statement::CreateTable(CreateTable { or_replace: self.or_replace, @@ -430,19 +391,13 @@ impl CreateTableBuilder { constraints: self.constraints, hive_distribution: self.hive_distribution, hive_formats: self.hive_formats, - table_properties: self.table_properties, - with_options: self.with_options, file_format: self.file_format, location: self.location, query: self.query, without_rowid: self.without_rowid, like: self.like, clone: self.clone, - engine: self.engine, - comment: self.comment, - auto_increment_offset: self.auto_increment_offset, - default_charset: self.default_charset, - collation: self.collation, + comment_after_column_def: self.comment_after_column_def, on_commit: self.on_commit, on_cluster: self.on_cluster, primary_key: self.primary_key, @@ -450,7 +405,6 @@ impl CreateTableBuilder { partition_by: self.partition_by, cluster_by: self.cluster_by, clustered_by: self.clustered_by, - options: self.options, strict: self.strict, copy_grants: self.copy_grants, enable_schema_evolution: self.enable_schema_evolution, @@ -466,6 +420,7 @@ impl CreateTableBuilder { catalog: self.catalog, catalog_sync: self.catalog_sync, storage_serialization_policy: self.storage_serialization_policy, + table_options: self.table_options, }) } } @@ -491,19 +446,13 @@ impl TryFrom for CreateTableBuilder { constraints, hive_distribution, hive_formats, - table_properties, - with_options, file_format, location, query, without_rowid, like, clone, - engine, - comment, - auto_increment_offset, - default_charset, - collation, + comment_after_column_def: comment, on_commit, on_cluster, primary_key, @@ -511,7 +460,6 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - options, strict, copy_grants, enable_schema_evolution, @@ -527,6 +475,7 @@ impl TryFrom for CreateTableBuilder { catalog, catalog_sync, storage_serialization_policy, + table_options, }) => Ok(Self { or_replace, temporary, @@ -539,19 +488,13 @@ impl TryFrom for CreateTableBuilder { constraints, hive_distribution, hive_formats, - table_properties, - with_options, file_format, location, query, without_rowid, like, clone, - engine, - comment, - auto_increment_offset, - default_charset, - collation, + comment_after_column_def: comment, on_commit, on_cluster, primary_key, @@ -559,7 +502,6 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - options, strict, iceberg, copy_grants, @@ -577,6 +519,7 @@ impl TryFrom for CreateTableBuilder { catalog, catalog_sync, storage_serialization_policy, + table_options, }), _ => Err(ParserError::ParserError(format!( "Expected create table statement, but received: {stmt}" @@ -590,7 +533,7 @@ impl TryFrom for CreateTableBuilder { pub(crate) struct CreateTableConfiguration { pub partition_by: Option>, pub cluster_by: Option>>, - pub options: Option>, + pub table_options: CreateTableOptions, } #[cfg(test)] diff --git a/src/ast/helpers/stmt_data_loading.rs b/src/ast/helpers/stmt_data_loading.rs index cc4fa12fa..0481b9ed4 100644 --- a/src/ast/helpers/stmt_data_loading.rs +++ b/src/ast/helpers/stmt_data_loading.rs @@ -21,8 +21,6 @@ #[cfg(not(feature = "std"))] use alloc::string::String; -#[cfg(not(feature = "std"))] -use alloc::vec::Vec; use core::fmt; #[cfg(feature = "serde")] diff --git a/src/ast/mod.rs b/src/ast/mod.rs index f187df995..efdf33e09 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2557,6 +2557,18 @@ pub enum CreateTableOptions { /// /// Options(Vec), + + /// Plain options, options which are not part on any declerative statement e.g. WITH/OPTIONS/... + /// + Plain(Vec), + + TableProperties(Vec), +} + +impl Default for CreateTableOptions { + fn default() -> Self { + Self::None + } } impl fmt::Display for CreateTableOptions { @@ -2568,6 +2580,12 @@ impl fmt::Display for CreateTableOptions { CreateTableOptions::Options(options) => { write!(f, "OPTIONS({})", display_comma_separated(options)) } + CreateTableOptions::TableProperties(options) => { + write!(f, "TBLPROPERTIES ({})", display_comma_separated(options)) + } + CreateTableOptions::Plain(options) => { + write!(f, "{}", display_separated(options, " ")) + } CreateTableOptions::None => Ok(()), } } @@ -7277,7 +7295,10 @@ pub enum SqlOption { /// Any option that consists of a key value pair where the value is an expression. e.g. /// /// WITH(DISTRIBUTION = ROUND_ROBIN) - KeyValue { key: Ident, value: Expr }, + KeyValue { + key: Ident, + value: Expr, + }, /// One or more table partitions and represents which partition the boundary values belong to, /// e.g. /// @@ -7289,6 +7310,16 @@ pub enum SqlOption { range_direction: Option, for_values: Vec, }, + + Comment(CommentDef), + + TableSpace(TablespaceOption), + + // Advanced parameter formations + // UNION = (tbl_name[,tbl_name]...) + // ENGINE = ReplicatedMergeTree('/table_name','{replica}', ver) + // ENGINE = SummingMergeTree([columns]) + NamedParenthesizedList(NamedParenthesizedList), } impl fmt::Display for SqlOption { @@ -7320,10 +7351,53 @@ impl fmt::Display for SqlOption { display_comma_separated(for_values) ) } + SqlOption::TableSpace(tablespace_option) => { + write!(f, "TABLESPACE {}", tablespace_option.name)?; + match tablespace_option.storage { + Some(StorageType::Disk) => write!(f, " STORAGE DISK"), + Some(StorageType::Memory) => write!(f, " STORAGE MEMORY"), + _ => Ok(()), + } + } + + SqlOption::Comment(comment) => match comment { + CommentDef::WithEq(comment) => { + write!(f, "COMMENT = '{comment}'") + } + CommentDef::WithoutEq(comment) => { + write!(f, "COMMENT '{comment}'") + } + }, + SqlOption::NamedParenthesizedList(value) => { + write!(f, "{} = ", value.key)?; + if let Some(key) = &value.value { + write!(f, "{}", key)?; + } + if !value.parameters.is_empty() { + write!(f, "({})", display_comma_separated(&value.parameters))? + } + Ok(()) + } } } } +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum StorageType { + Disk, + Memory, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TablespaceOption { + pub name: String, + pub storage: Option, +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8543,27 +8617,13 @@ impl Display for CreateViewParams { } } -/// Engine of DB. Some warehouse has parameters of engine, e.g. [clickhouse] -/// -/// [clickhouse]: https://clickhouse.com/docs/en/engines/table-engines #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableEngine { - pub name: String, - pub parameters: Option>, -} - -impl Display for TableEngine { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.name)?; - - if let Some(parameters) = self.parameters.as_ref() { - write!(f, "({})", display_comma_separated(parameters))?; - } - - Ok(()) - } +pub struct NamedParenthesizedList { + pub key: Ident, + pub value: Option, + pub parameters: Vec, } /// Snowflake `WITH ROW ACCESS POLICY policy_name ON (identifier, ...)` @@ -8627,18 +8687,12 @@ pub enum CommentDef { /// Does not include `=` when printing the comment, as `COMMENT 'comment'` WithEq(String), WithoutEq(String), - // For Hive dialect, the table comment is after the column definitions without `=`, - // so we need to add an extra variant to allow to identify this case when displaying. - // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) - AfterColumnDefsWithoutEq(String), } impl Display for CommentDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - CommentDef::WithEq(comment) - | CommentDef::WithoutEq(comment) - | CommentDef::AfterColumnDefsWithoutEq(comment) => write!(f, "{comment}"), + CommentDef::WithEq(comment) | CommentDef::WithoutEq(comment) => write!(f, "{comment}"), } } } diff --git a/src/ast/spans.rs b/src/ast/spans.rs index 11770d1bc..d51526f23 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -30,13 +30,14 @@ use super::{ FunctionArgumentClause, FunctionArgumentList, FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView, LimitClause, MatchRecognizePattern, Measure, - NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, - OnInsert, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, - Query, RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, - ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, - Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, - TableFactor, TableObject, TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, - Value, Values, ViewColumnDef, WildcardAdditionalOptions, With, WithFill, + NamedParenthesizedList, NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, + OnConflictAction, OnInsert, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, + ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction, + RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem, + SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef, + TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins, + UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WildcardAdditionalOptions, With, + WithFill, }; /// Given an iterator of spans, return the [Span::union] of all spans. @@ -558,27 +559,20 @@ impl Spanned for CreateTable { constraints, hive_distribution: _, // hive specific hive_formats: _, // hive specific - table_properties, - with_options, - file_format: _, // enum - location: _, // string, no span + file_format: _, // enum + location: _, // string, no span query, without_rowid: _, // bool like, clone, - engine: _, // todo - comment: _, // todo, no span - auto_increment_offset: _, // u32, no span - default_charset: _, // string, no span - collation: _, // string, no span - on_commit: _, // enum + comment_after_column_def: _, // todo, no span + on_commit: _, on_cluster: _, // todo, clickhouse specific primary_key: _, // todo, clickhouse specific order_by: _, // todo, clickhouse specific partition_by: _, // todo, BigQuery specific cluster_by: _, // todo, BigQuery specific clustered_by: _, // todo, Hive specific - options: _, // todo, BigQuery specific strict: _, // bool copy_grants: _, // bool enable_schema_evolution: _, // bool @@ -593,15 +587,15 @@ impl Spanned for CreateTable { base_location: _, // todo, Snowflake specific catalog: _, // todo, Snowflake specific catalog_sync: _, // todo, Snowflake specific - storage_serialization_policy: _, // todo, Snowflake specific + storage_serialization_policy: _, + table_options, } = self; union_spans( core::iter::once(name.span()) + .chain(core::iter::once(table_options.span())) .chain(columns.iter().map(|i| i.span())) .chain(constraints.iter().map(|i| i.span())) - .chain(table_properties.iter().map(|i| i.span())) - .chain(with_options.iter().map(|i| i.span())) .chain(query.iter().map(|i| i.span())) .chain(like.iter().map(|i| i.span())) .chain(clone.iter().map(|i| i.span())), @@ -972,6 +966,14 @@ impl Spanned for SqlOption { } => union_spans( core::iter::once(column_name.span).chain(for_values.iter().map(|i| i.span())), ), + SqlOption::TableSpace(_) => Span::empty(), + SqlOption::Comment(_) => Span::empty(), + SqlOption::NamedParenthesizedList(NamedParenthesizedList { + key: name, + value, + parameters: values, + }) => union_spans(core::iter::once(name.span).chain(values.iter().map(|i| i.span))) + .union_opt(&value.as_ref().map(|i| i.span)), } } } @@ -1009,6 +1011,8 @@ impl Spanned for CreateTableOptions { CreateTableOptions::None => Span::empty(), CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())), CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())), + CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())), + CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())), } } } diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index d1a696a0b..f5056b313 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -25,8 +25,8 @@ use crate::ast::helpers::stmt_data_loading::{ use crate::ast::{ ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident, IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, - IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, Statement, TagsColumnOption, - WrappedCollection, + IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, SqlOption, Statement, + TagsColumnOption, WrappedCollection, }; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; @@ -411,6 +411,8 @@ pub fn parse_create_table( // "CREATE TABLE x COPY GRANTS (c INT)" and "CREATE TABLE x (c INT) COPY GRANTS" are both // accepted by Snowflake + let mut plain_options = vec![]; + loop { let next_token = parser.next_token(); match &next_token.token { @@ -422,7 +424,9 @@ pub fn parse_create_table( Keyword::COMMENT => { // Rewind the COMMENT keyword parser.prev_token(); - builder = builder.comment(parser.parse_optional_inline_comment()?); + if let Some(comment_def) = parser.parse_optional_inline_comment()? { + plain_options.push(SqlOption::Comment(comment_def)) + } } Keyword::AS => { let query = parser.parse_query()?; @@ -583,6 +587,13 @@ pub fn parse_create_table( } } } + let table_options = if !plain_options.is_empty() { + crate::ast::CreateTableOptions::Plain(plain_options) + } else { + crate::ast::CreateTableOptions::None + }; + + builder = builder.table_options(table_options); if iceberg && builder.base_location.is_none() { return Err(ParserError::ParserError( diff --git a/src/keywords.rs b/src/keywords.rs index 8609ec43d..4e343b6c2 100644 --- a/src/keywords.rs +++ b/src/keywords.rs @@ -115,9 +115,11 @@ define_keywords!( AUTHENTICATION, AUTHORIZATION, AUTO, + AUTOEXTEND_SIZE, AUTOINCREMENT, AUTO_INCREMENT, AVG, + AVG_ROW_LENGTH, AVRO, BACKWARD, BASE64, @@ -179,6 +181,7 @@ define_keywords!( CHARSET, CHAR_LENGTH, CHECK, + CHECKSUM, CIRCLE, CLEAR, CLOB, @@ -267,6 +270,7 @@ define_keywords!( DEFINED, DEFINER, DELAYED, + DELAY_KEY_WRITE, DELETE, DELIMITED, DELIMITER, @@ -310,6 +314,7 @@ define_keywords!( END_PARTITION, ENFORCED, ENGINE, + ENGINE_ATTRIBUTE, ENUM, ENUM16, ENUM8, @@ -438,6 +443,7 @@ define_keywords!( INPUTFORMAT, INSENSITIVE, INSERT, + INSERT_METHOD, INSTALL, INSTANT, INSTEAD, @@ -474,6 +480,7 @@ define_keywords!( JULIAN, KEY, KEYS, + KEY_BLOCK_SIZE, KILL, LAG, LANGUAGE, @@ -527,6 +534,7 @@ define_keywords!( MAX, MAXVALUE, MAX_DATA_EXTENSION_TIME_IN_DAYS, + MAX_ROWS, MEASURES, MEDIUMBLOB, MEDIUMINT, @@ -548,6 +556,7 @@ define_keywords!( MINUTE, MINUTES, MINVALUE, + MIN_ROWS, MOD, MODE, MODIFIES, @@ -644,6 +653,7 @@ define_keywords!( OWNERSHIP, PACKAGE, PACKAGES, + PACK_KEYS, PARALLEL, PARAMETER, PARQUET, @@ -763,6 +773,7 @@ define_keywords!( ROW, ROWID, ROWS, + ROW_FORMAT, ROW_NUMBER, RULE, RUN, @@ -777,6 +788,7 @@ define_keywords!( SEARCH, SECOND, SECONDARY, + SECONDARY_ENGINE_ATTRIBUTE, SECONDS, SECRET, SECURITY, @@ -828,12 +840,16 @@ define_keywords!( STATEMENT, STATIC, STATISTICS, + STATS_AUTO_RECALC, + STATS_PERSISTENT, + STATS_SAMPLE_PAGES, STATUS, STDDEV_POP, STDDEV_SAMP, STDIN, STDOUT, STEP, + STORAGE, STORAGE_INTEGRATION, STORAGE_SERIALIZATION_POLICY, STORED, @@ -859,6 +875,7 @@ define_keywords!( TABLE, TABLES, TABLESAMPLE, + TABLESPACE, TAG, TARGET, TASK, diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 2b61529ff..0cf9075ae 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -4217,7 +4217,7 @@ impl<'a> Parser<'a> { } } - fn parse(s: String, loc: Location) -> Result + pub fn parse(s: String, loc: Location) -> Result where ::Err: Display, { @@ -5401,12 +5401,17 @@ impl<'a> Parser<'a> { }; let location = hive_formats.location.clone(); let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; + let table_options = if !table_properties.is_empty() { + CreateTableOptions::TableProperties(table_properties) + } else { + CreateTableOptions::None + }; Ok(CreateTableBuilder::new(table_name) .columns(columns) .constraints(constraints) .hive_distribution(hive_distribution) .hive_formats(Some(hive_formats)) - .table_properties(table_properties) + .table_options(table_options) .or_replace(or_replace) .if_not_exists(if_not_exists) .external(true) @@ -6879,17 +6884,16 @@ impl<'a> Parser<'a> { // parse optional column list (schema) let (columns, constraints) = self.parse_columns()?; - let mut comment = if dialect_of!(self is HiveDialect) - && self.parse_keyword(Keyword::COMMENT) - { - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)), - _ => self.expected("comment", next_token)?, - } - } else { - None - }; + let comment_after_column_def = + if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)), + _ => self.expected("comment", next_token)?, + } + } else { + None + }; // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE` let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]); @@ -6897,39 +6901,8 @@ impl<'a> Parser<'a> { let hive_distribution = self.parse_hive_distribution()?; let clustered_by = self.parse_optional_clustered_by()?; let hive_formats = self.parse_hive_formats()?; - // PostgreSQL supports `WITH ( options )`, before `AS` - let with_options = self.parse_options(Keyword::WITH)?; - let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; - - let engine = if self.parse_keyword(Keyword::ENGINE) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => { - let name = w.value; - let parameters = if self.peek_token() == Token::LParen { - Some(self.parse_parenthesized_identifiers()?) - } else { - None - }; - Some(TableEngine { name, parameters }) - } - _ => self.expected("identifier", next_token)?, - } - } else { - None - }; - let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) { - let _ = self.consume_token(&Token::Eq); - let next_token = self.next_token(); - match next_token.token { - Token::Number(s, _) => Some(Self::parse::(s, next_token.span.start)?), - _ => self.expected("literal int", next_token)?, - } - } else { - None - }; + let create_table_config = self.parse_optional_create_table_config()?; // ClickHouse supports `PRIMARY KEY`, before `ORDER BY` // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key @@ -6957,30 +6930,6 @@ impl<'a> Parser<'a> { None }; - let create_table_config = self.parse_optional_create_table_config()?; - - let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => Some(w.value), - _ => self.expected("identifier", next_token)?, - } - } else { - None - }; - - let collation = if self.parse_keywords(&[Keyword::COLLATE]) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => Some(w.value), - _ => self.expected("identifier", next_token)?, - } - } else { - None - }; - let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) { Some(self.parse_create_table_on_commit()?) } else { @@ -6989,13 +6938,6 @@ impl<'a> Parser<'a> { let strict = self.parse_keyword(Keyword::STRICT); - // Excludes Hive dialect here since it has been handled after table column definitions. - if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { - // rewind the COMMENT keyword - self.prev_token(); - comment = self.parse_optional_inline_comment()? - }; - // Parse optional `AS ( query )` let query = if self.parse_keyword(Keyword::AS) { Some(self.parse_query()?) @@ -7012,8 +6954,6 @@ impl<'a> Parser<'a> { .temporary(temporary) .columns(columns) .constraints(constraints) - .with_options(with_options) - .table_properties(table_properties) .or_replace(or_replace) .if_not_exists(if_not_exists) .transient(transient) @@ -7024,18 +6964,14 @@ impl<'a> Parser<'a> { .without_rowid(without_rowid) .like(like) .clone_clause(clone) - .engine(engine) - .comment(comment) - .auto_increment_offset(auto_increment_offset) + .comment_after_column_def(comment_after_column_def) .order_by(order_by) - .default_charset(default_charset) - .collation(collation) .on_commit(on_commit) .on_cluster(on_cluster) .clustered_by(clustered_by) .partition_by(create_table_config.partition_by) .cluster_by(create_table_config.cluster_by) - .options(create_table_config.options) + .table_options(create_table_config.table_options) .primary_key(primary_key) .strict(strict) .build()) @@ -7060,9 +6996,22 @@ impl<'a> Parser<'a> { /// /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2) /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html) + /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html) fn parse_optional_create_table_config( &mut self, ) -> Result { + let mut table_options = CreateTableOptions::None; + + // PostgreSQL supports `WITH ( options )`, before `AS` + let with_options = self.parse_options(Keyword::WITH)?; + if !with_options.is_empty() { + table_options = CreateTableOptions::With(with_options) + } + + let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; + if !table_properties.is_empty() { + table_options = CreateTableOptions::TableProperties(table_properties); + } let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect) && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { @@ -7072,7 +7021,6 @@ impl<'a> Parser<'a> { }; let mut cluster_by = None; - let mut options = None; if dialect_of!(self is BigQueryDialect | GenericDialect) { if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { cluster_by = Some(WrappedCollection::NoWrapping( @@ -7082,18 +7030,252 @@ impl<'a> Parser<'a> { if let Token::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { - options = Some(self.parse_options(Keyword::OPTIONS)?); + table_options = + CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?) } }; } + if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None { + let plain_options = self.parse_plain_options()?; + if !plain_options.is_empty() { + table_options = CreateTableOptions::Plain(plain_options) + } + }; + Ok(CreateTableConfiguration { partition_by, cluster_by, - options, + table_options, }) } + fn parse_plain_option(&mut self) -> Result, ParserError> { + // Single parameter option + if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) { + return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION")))); + } + + // Custom option + if self.parse_keywords(&[Keyword::COMMENT]) { + let has_eq = self.consume_token(&Token::Eq); + let value = self.next_token(); + + let comment = match (has_eq, value.token) { + (true, Token::SingleQuotedString(s)) => { + Ok(Some(SqlOption::Comment(CommentDef::WithEq(s)))) + } + (false, Token::SingleQuotedString(s)) => { + Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s)))) + } + (_, token) => { + self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token)) + } + }; + return comment; + } + + if self.parse_keywords(&[Keyword::ENGINE]) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + let engine = match value.token { + Token::Word(w) => { + let parameters = if self.peek_token() == Token::LParen { + self.parse_parenthesized_identifiers()? + } else { + vec![] + }; + + Ok(Some(SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + value: Some(Ident::new(w.value)), + parameters, + }, + ))) + } + _ => { + return self.expected("Token::Word", value)?; + } + }; + + return engine; + } + + if self.parse_keywords(&[Keyword::TABLESPACE]) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + let tablespace = match value.token { + // TABLESPACE tablespace_name [STORAGE DISK] | [TABLESPACE tablespace_name] STORAGE MEMORY + Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => { + let storage = match self.parse_keyword(Keyword::STORAGE) { + true => { + let _ = self.consume_token(&Token::Eq); + let storage_token = self.next_token(); + match &storage_token.token { + Token::Word(w) => match w.value.to_uppercase().as_str() { + "DISK" => Some(StorageType::Disk), + "MEMORY" => Some(StorageType::Memory), + _ => self + .expected("Storage type (DISK or MEMORY)", storage_token)?, + }, + _ => self.expected("Token::Word", storage_token)?, + } + } + false => None, + }; + + Ok(Some(SqlOption::TableSpace(TablespaceOption { + name, + storage, + }))) + } + _ => { + return self.expected("Token::Word", value)?; + } + }; + + return tablespace; + } + + if self.parse_keyword(Keyword::UNION) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + match value.token { + // UNION [=] (tbl_name[,tbl_name]...) + Token::LParen => { + let tables: Vec = + self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?; + self.expect_token(&Token::RParen)?; + + return Ok(Some(SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("UNION"), + value: None, + parameters: tables, + }, + ))); + } + _ => { + return self.expected("Token::LParen", value)?; + } + } + } + + // Key/Value parameter option + let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { + // [DEFAULT] CHARACTER SET [=] charset_name + Ident::new("DEFAULT CHARSET") + } else if self.parse_keyword(Keyword::CHARSET) { + // [DEFAULT] CHARACTER SET [=] charset_name + Ident::new("CHARSET") + } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) { + // [DEFAULT] CHARACTER SET [=] charset_name + Ident::new("DEFAULT CHARACTER SET") + } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { + // [DEFAULT] CHARACTER SET [=] charset_name + Ident::new("CHARACTER SET") + } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) { + // [DEFAULT] COLLATE [=] collation_name + Ident::new("DEFAULT COLLATE") + } else if self.parse_keyword(Keyword::COLLATE) { + // [DEFAULT] COLLATE [=] collation_name + Ident::new("COLLATE") + } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) { + // {DATA | INDEX} DIRECTORY [=] 'absolute path to directory' + Ident::new("DATA DIRECTORY") + } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) { + // {DATA | INDEX} DIRECTORY [=] 'absolute path to directory' + Ident::new("INDEX DIRECTORY") + } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) { + // KEY_BLOCK_SIZE [=] value + Ident::new("KEY_BLOCK_SIZE") + } else if self.parse_keyword(Keyword::ROW_FORMAT) { + // ROW_FORMAT [=] {DEFAULT | DYNAMIC | FIXED | COMPRESSED | REDUNDANT | COMPACT} + Ident::new("ROW_FORMAT") + } else if self.parse_keyword(Keyword::PACK_KEYS) { + // PACK_KEYS [=] {0 | 1 | DEFAULT} + Ident::new("PACK_KEYS") + } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) { + // STATS_AUTO_RECALC [=] {DEFAULT | 0 | 1} + Ident::new("STATS_AUTO_RECALC") + } else if self.parse_keyword(Keyword::STATS_PERSISTENT) { + //STATS_PERSISTENT [=] {DEFAULT | 0 | 1} + Ident::new("STATS_PERSISTENT") + } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) { + // STATS_SAMPLE_PAGES [=] value + Ident::new("STATS_SAMPLE_PAGES") + } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) { + // DELAY_KEY_WRITE [=] {0 | 1} + Ident::new("DELAY_KEY_WRITE") + } else if self.parse_keyword(Keyword::COMPRESSION) { + // COMPRESSION [=] {'ZLIB' | 'LZ4' | 'NONE'} + Ident::new("COMPRESSION") + } else if self.parse_keyword(Keyword::ENCRYPTION) { + // ENCRYPTION [=] {'Y' | 'N'} + Ident::new("ENCRYPTION") + } else if self.parse_keyword(Keyword::MAX_ROWS) { + // MAX_ROWS [=] value + Ident::new("MAX_ROWS") + } else if self.parse_keyword(Keyword::MIN_ROWS) { + // MIN_ROWS [=] value + Ident::new("MIN_ROWS") + } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) { + // AUTOEXTEND_SIZE [=] value + Ident::new("AUTOEXTEND_SIZE") + } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) { + // AVG_ROW_LENGTH [=] value + Ident::new("AVG_ROW_LENGTH") + } else if self.parse_keyword(Keyword::CHECKSUM) { + // CHECKSUM [=] {0 | 1} + Ident::new("CHECKSUM") + } else if self.parse_keyword(Keyword::CONNECTION) { + // CONNECTION [=] 'connect_string' + Ident::new("CONNECTION") + } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) { + // ENGINE_ATTRIBUTE [=] 'string' + Ident::new("ENGINE_ATTRIBUTE") + } else if self.parse_keyword(Keyword::PASSWORD) { + // PASSWORD [=] 'string' + Ident::new("PASSWORD") + } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) { + // SECONDARY_ENGINE_ATTRIBUTE [=] 'string' + Ident::new("SECONDARY_ENGINE_ATTRIBUTE") + } else if self.parse_keyword(Keyword::INSERT_METHOD) { + // INSERT_METHOD [=] { NO | FIRST | LAST } + Ident::new("INSERT_METHOD") + } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { + Ident::new("AUTO_INCREMENT") + } else { + return Ok(None); + }; + + let _ = self.consume_token(&Token::Eq); + + let value = match self + .maybe_parse(|parser| parser.parse_value())? + .map(Expr::Value) + { + Some(expr) => expr, + None => Expr::Identifier(self.parse_identifier()?), + }; + + Ok(Some(SqlOption::KeyValue { key, value })) + } + + pub fn parse_plain_options(&mut self) -> Result, ParserError> { + let mut options = Vec::new(); + + while let Some(option) = self.parse_plain_option()? { + options.push(option); + } + + Ok(options) + } + pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { let comment = if self.parse_keyword(Keyword::COMMENT) { let has_eq = self.consume_token(&Token::Eq); diff --git a/tests/sqlparser_bigquery.rs b/tests/sqlparser_bigquery.rs index 3037d4ae5..3cee2c007 100644 --- a/tests/sqlparser_bigquery.rs +++ b/tests/sqlparser_bigquery.rs @@ -484,7 +484,7 @@ fn parse_create_table_with_options() { columns, partition_by, cluster_by, - options, + table_options, .. }) => { assert_eq!( @@ -539,7 +539,7 @@ fn parse_create_table_with_options() { Ident::new("userid"), Ident::new("age"), ])), - Some(vec![ + CreateTableOptions::Options(vec![ SqlOption::KeyValue { key: Ident::new("partition_expiration_days"), value: Expr::Value( @@ -561,7 +561,7 @@ fn parse_create_table_with_options() { }, ]) ), - (partition_by, cluster_by, options) + (partition_by, cluster_by, table_options) ) } _ => unreachable!(), diff --git a/tests/sqlparser_clickhouse.rs b/tests/sqlparser_clickhouse.rs index c56f98860..38e10f1ea 100644 --- a/tests/sqlparser_clickhouse.rs +++ b/tests/sqlparser_clickhouse.rs @@ -219,10 +219,10 @@ fn parse_delimited_identifiers() { #[test] fn parse_create_table() { - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY ("x")"#); - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x""#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY ("x")"#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x""#); clickhouse().verified_stmt( - r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, + r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, ); } @@ -589,7 +589,7 @@ fn parse_clickhouse_data_types() { #[test] fn parse_create_table_with_nullable() { - let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE=MergeTree ORDER BY (`k`)"#; + let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE = MergeTree ORDER BY (`k`)"#; // ClickHouse has a case-sensitive definition of data type, but canonical representation is not let canonical_sql = sql.replace("String", "STRING"); @@ -714,14 +714,14 @@ fn parse_create_table_with_nested_data_types() { fn parse_create_table_with_primary_key() { match clickhouse_and_generic().verified_stmt(concat!( r#"CREATE TABLE db.table (`i` INT, `k` INT)"#, - " ENGINE=SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", + " ENGINE = SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", " PRIMARY KEY tuple(i)", " ORDER BY tuple(i)", )) { Statement::CreateTable(CreateTable { name, columns, - engine, + table_options, primary_key, order_by, .. @@ -742,16 +742,23 @@ fn parse_create_table_with_primary_key() { ], columns ); - assert_eq!( - engine, - Some(TableEngine { - name: "SharedMergeTree".to_string(), - parameters: Some(vec![ + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + value: Some(Ident::new("SharedMergeTree")), + parameters: vec![ Ident::with_quote('\'', "/clickhouse/tables/{uuid}/{shard}"), Ident::with_quote('\'', "{replica}"), - ]), - }) - ); + ] + } + ))); + fn assert_function(actual: &Function, name: &str, arg: &str) -> bool { assert_eq!(actual.name, ObjectName::from(vec![Ident::new(name)])); assert_eq!( @@ -798,7 +805,7 @@ fn parse_create_table_with_variant_default_expressions() { " b DATETIME EPHEMERAL now(),", " c DATETIME EPHEMERAL,", " d STRING ALIAS toString(c)", - ") ENGINE=MergeTree" + ") ENGINE = MergeTree" ); match clickhouse_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { columns, .. }) => { diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs index 795dae4b3..80185115b 100644 --- a/tests/sqlparser_common.rs +++ b/tests/sqlparser_common.rs @@ -3642,7 +3642,7 @@ fn parse_create_table() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -3780,7 +3780,7 @@ fn parse_create_table() { }, ] ); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); } _ => unreachable!(), } @@ -3825,7 +3825,7 @@ fn parse_create_table_with_constraint_characteristics() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -3919,7 +3919,7 @@ fn parse_create_table_with_constraint_characteristics() { }, ] ); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); } _ => unreachable!(), } @@ -4404,7 +4404,11 @@ fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match generic.verified_stmt(sql) { - Statement::CreateTable(CreateTable { with_options, .. }) => { + Statement::CreateTable(CreateTable { table_options, .. }) => { + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( vec![ SqlOption::KeyValue { @@ -4465,7 +4469,7 @@ fn parse_create_external_table() { name, columns, constraints, - with_options, + table_options, if_not_exists, external, file_format, @@ -4508,7 +4512,7 @@ fn parse_create_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); assert!(!if_not_exists); } _ => unreachable!(), @@ -4533,7 +4537,7 @@ fn parse_create_or_replace_external_table() { name, columns, constraints, - with_options, + table_options, if_not_exists, external, file_format, @@ -4562,7 +4566,7 @@ fn parse_create_or_replace_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); assert!(!if_not_exists); assert!(or_replace); } @@ -11369,7 +11373,9 @@ fn test_parse_inline_comment() { // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) match all_dialects_except(|d| d.is::()).verified_stmt(sql) { Statement::CreateTable(CreateTable { - columns, comment, .. + columns, + table_options, + .. }) => { assert_eq!( columns, @@ -11383,8 +11389,10 @@ fn test_parse_inline_comment() { }] ); assert_eq!( - comment.unwrap(), - CommentDef::WithEq("comment with equal".to_string()) + table_options, + CreateTableOptions::Plain(vec![SqlOption::Comment(CommentDef::WithEq( + "comment with equal".to_string() + ))]) ); } _ => unreachable!(), @@ -12359,21 +12367,6 @@ fn parse_select_wildcard_with_except() { ); } -#[test] -fn parse_auto_increment_too_large() { - let dialect = GenericDialect {}; - let u64_max = u64::MAX; - let sql = - format!("CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) AUTO_INCREMENT=1{u64_max}"); - - let res = Parser::new(&dialect) - .try_with_sql(&sql) - .expect("tokenize to work") - .parse_statements(); - - assert!(res.is_err(), "{res:?}"); -} - #[test] fn test_group_by_nothing() { let Select { group_by, .. } = all_dialects_where(|d| d.supports_group_by_expr()) diff --git a/tests/sqlparser_duckdb.rs b/tests/sqlparser_duckdb.rs index bed02428d..481b0ab24 100644 --- a/tests/sqlparser_duckdb.rs +++ b/tests/sqlparser_duckdb.rs @@ -709,19 +709,13 @@ fn test_duckdb_union_datatype() { storage: Default::default(), location: Default::default() }), - table_properties: Default::default(), - with_options: Default::default(), file_format: Default::default(), location: Default::default(), query: Default::default(), without_rowid: Default::default(), like: Default::default(), clone: Default::default(), - engine: Default::default(), - comment: Default::default(), - auto_increment_offset: Default::default(), - default_charset: Default::default(), - collation: Default::default(), + comment_after_column_def: Default::default(), on_commit: Default::default(), on_cluster: Default::default(), primary_key: Default::default(), @@ -729,7 +723,6 @@ fn test_duckdb_union_datatype() { partition_by: Default::default(), cluster_by: Default::default(), clustered_by: Default::default(), - options: Default::default(), strict: Default::default(), copy_grants: Default::default(), enable_schema_evolution: Default::default(), @@ -745,6 +738,7 @@ fn test_duckdb_union_datatype() { catalog: Default::default(), catalog_sync: Default::default(), storage_serialization_policy: Default::default(), + table_options: CreateTableOptions::None }), stmt ); diff --git a/tests/sqlparser_hive.rs b/tests/sqlparser_hive.rs index 2af93db7d..92a2e2f85 100644 --- a/tests/sqlparser_hive.rs +++ b/tests/sqlparser_hive.rs @@ -130,12 +130,13 @@ fn create_table_with_comment() { " INTO 4 BUCKETS" ); match hive().verified_stmt(sql) { - Statement::CreateTable(CreateTable { comment, .. }) => { + Statement::CreateTable(CreateTable { + comment_after_column_def: comment, + .. + }) => { assert_eq!( comment, - Some(CommentDef::AfterColumnDefsWithoutEq( - "table comment".to_string() - )) + Some(CommentDef::WithoutEq("table comment".to_string())) ) } _ => unreachable!(), diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index 2bfc38a6a..5798065ba 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -1551,7 +1551,6 @@ fn parse_create_table_with_valid_options() { span: Span::empty(), }, data_type: Int(None,), - options: vec![], }, ColumnDef { @@ -1561,7 +1560,6 @@ fn parse_create_table_with_valid_options() { span: Span::empty(), }, data_type: Int(None,), - options: vec![], }, ], @@ -1573,19 +1571,13 @@ fn parse_create_table_with_valid_options() { storage: None, location: None, },), - table_properties: vec![], - with_options, file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, - comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, + comment_after_column_def: None, on_commit: None, on_cluster: None, primary_key: None, @@ -1593,7 +1585,6 @@ fn parse_create_table_with_valid_options() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, strict: false, iceberg: false, copy_grants: false, @@ -1610,6 +1601,7 @@ fn parse_create_table_with_valid_options() { catalog: None, catalog_sync: None, storage_serialization_policy: None, + table_options: CreateTableOptions::With(with_options) }) ); } @@ -1743,19 +1735,13 @@ fn parse_create_table_with_identity_column() { storage: None, location: None, },), - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, - comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, + comment_after_column_def: None, on_commit: None, on_cluster: None, primary_key: None, @@ -1763,7 +1749,6 @@ fn parse_create_table_with_identity_column() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -1779,6 +1764,7 @@ fn parse_create_table_with_identity_column() { catalog: None, catalog_sync: None, storage_serialization_policy: None, + table_options: CreateTableOptions::None }), ); } diff --git a/tests/sqlparser_mysql.rs b/tests/sqlparser_mysql.rs index 1d4fd6a0d..6c71f4e16 100644 --- a/tests/sqlparser_mysql.rs +++ b/tests/sqlparser_mysql.rs @@ -25,9 +25,9 @@ use matches::assert_matches; use sqlparser::ast::MysqlInsertPriority::{Delayed, HighPriority, LowPriority}; use sqlparser::ast::*; use sqlparser::dialect::{GenericDialect, MySqlDialect}; -use sqlparser::parser::{ParserError, ParserOptions}; -use sqlparser::tokenizer::Span; +use sqlparser::parser::{Parser, ParserError, ParserOptions}; use sqlparser::tokenizer::Token; +use sqlparser::tokenizer::{Location, Span}; use test_utils::*; #[macro_use] @@ -848,9 +848,23 @@ fn parse_create_table_comment() { for sql in [without_equal, with_equal] { match mysql().verified_stmt(sql) { - Statement::CreateTable(CreateTable { name, comment, .. }) => { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { assert_eq!(name.to_string(), "foo"); - assert_eq!(comment.expect("Should exist").to_string(), "baz"); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + let comment = match plain_options.first().unwrap() { + SqlOption::Comment(CommentDef::WithEq(c)) + | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, + _ => unreachable!(), + }; + assert_eq!(comment, "baz"); } _ => unreachable!(), } @@ -859,29 +873,276 @@ fn parse_create_table_comment() { #[test] fn parse_create_table_auto_increment_offset() { - let canonical = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT 123"; - let with_equal = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=123"; + let sql = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123"; + + match mysql().verified_stmt(sql) { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { + assert_eq!(name.to_string(), "foo"); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTO_INCREMENT"), + value: Expr::Value( + Value::Number( + Parser::parse("123".to_owned(), Location::empty()).unwrap(), + false + ) + .into() + ) + })); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_create_table_multiple_options_order_independent() { + let sql1 = "CREATE TABLE mytable (id INT) ENGINE=InnoDB ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc'"; + let sql2 = "CREATE TABLE mytable (id INT) KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB ROW_FORMAT=DYNAMIC"; + let sql3 = "CREATE TABLE mytable (id INT) ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB"; - for sql in [canonical, with_equal] { - match mysql().one_statement_parses_to(sql, canonical) { + for sql in [sql1, sql2, sql3] { + match mysql().parse_sql_statements(sql).unwrap().pop().unwrap() { Statement::CreateTable(CreateTable { name, - auto_increment_offset, + table_options, .. }) => { - assert_eq!(name.to_string(), "foo"); - assert_eq!( - auto_increment_offset.expect("Should exist").to_string(), - "123" - ); + assert_eq!(name.to_string(), "mytable"); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + value: Some(Ident::new("InnoDB")), + parameters: vec![] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("KEY_BLOCK_SIZE"), + value: Expr::Value( + Value::Number( + Parser::parse("8".to_owned(), Location::empty()).unwrap(), + false + ) + .into() + ) + })); + + assert!(plain_options + .contains(&SqlOption::Comment(CommentDef::WithEq("abc".to_owned())))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ROW_FORMAT"), + value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) + })); } _ => unreachable!(), } } } +#[test] +fn parse_create_table_with_all_table_options() { + let sql = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123 DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci INSERT_METHOD = FIRST KEY_BLOCK_SIZE = 8 ROW_FORMAT = DYNAMIC DATA DIRECTORY = '/var/lib/mysql/data' INDEX DIRECTORY = '/var/lib/mysql/index' PACK_KEYS = 1 STATS_AUTO_RECALC = 1 STATS_PERSISTENT = 0 STATS_SAMPLE_PAGES = 128 DELAY_KEY_WRITE = 1 COMPRESSION = 'ZLIB' ENCRYPTION = 'Y' MAX_ROWS = 10000 MIN_ROWS = 10 AUTOEXTEND_SIZE = 64 AVG_ROW_LENGTH = 128 CHECKSUM = 1 CONNECTION = 'mysql://localhost' ENGINE_ATTRIBUTE = 'primary' PASSWORD = 'secure_password' SECONDARY_ENGINE_ATTRIBUTE = 'secondary_attr' START TRANSACTION TABLESPACE my_tablespace STORAGE DISK UNION = (table1, table2, table3)"; + + let x = mysql().verified_stmt(sql); + println!("{x:?}"); + match mysql().verified_stmt(sql) { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { + assert_eq!(name, vec![Ident::new("foo".to_owned())].into()); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + value: Some(Ident::new("InnoDB")), + parameters: vec![] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DEFAULT CHARSET"), + value: Expr::Identifier(Ident::new("utf8mb4".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTO_INCREMENT"), + value: Expr::value(Value::Number( + Parser::parse("123".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("KEY_BLOCK_SIZE"), + value: Expr::value(Value::Number( + Parser::parse("8".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ROW_FORMAT"), + value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("PACK_KEYS"), + value: Expr::value(Value::Number( + Parser::parse("1".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_AUTO_RECALC"), + value: Expr::value(Value::Number( + Parser::parse("1".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_PERSISTENT"), + value: Expr::value(Value::Number( + Parser::parse("0".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_SAMPLE_PAGES"), + value: Expr::value(Value::Number( + Parser::parse("128".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_SAMPLE_PAGES"), + value: Expr::value(Value::Number( + Parser::parse("128".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("INSERT_METHOD"), + value: Expr::Identifier(Ident::new("FIRST".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COMPRESSION"), + value: Expr::value(Value::SingleQuotedString("ZLIB".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ENCRYPTION"), + value: Expr::value(Value::SingleQuotedString("Y".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("MAX_ROWS"), + value: Expr::value(Value::Number( + Parser::parse("10000".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("MIN_ROWS"), + value: Expr::value(Value::Number( + Parser::parse("10".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTOEXTEND_SIZE"), + value: Expr::value(Value::Number( + Parser::parse("64".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AVG_ROW_LENGTH"), + value: Expr::value(Value::Number( + Parser::parse("128".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("CHECKSUM"), + value: Expr::value(Value::Number( + Parser::parse("1".to_owned(), Location::empty()).unwrap(), + false + )) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("CONNECTION"), + value: Expr::value(Value::SingleQuotedString("mysql://localhost".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ENGINE_ATTRIBUTE"), + value: Expr::value(Value::SingleQuotedString("primary".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("PASSWORD"), + value: Expr::value(Value::SingleQuotedString("secure_password".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("SECONDARY_ENGINE_ATTRIBUTE"), + value: Expr::value(Value::SingleQuotedString("secondary_attr".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::Ident(Ident::new( + "START TRANSACTION".to_owned() + )))); + assert!( + plain_options.contains(&SqlOption::TableSpace(TablespaceOption { + name: "my_tablespace".to_string(), + storage: Some(StorageType::Disk), + })) + ); + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("UNION"), + value: None, + parameters: vec![ + Ident::new("table1".to_string()), + Ident::new("table2".to_string()), + Ident::new("table3".to_string()) + ] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DATA DIRECTORY"), + value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/data".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("INDEX DIRECTORY"), + value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/index".to_owned())) + })); + } + _ => unreachable!(), + } +} + #[test] fn parse_create_table_set_enum() { let sql = "CREATE TABLE foo (bar SET('a', 'b'), baz ENUM('a', 'b'))"; @@ -916,13 +1177,12 @@ fn parse_create_table_set_enum() { #[test] fn parse_create_table_engine_default_charset() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - engine, - default_charset, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -934,14 +1194,24 @@ fn parse_create_table_engine_default_charset() { },], columns ); - assert_eq!( - engine, - Some(TableEngine { - name: "InnoDB".to_string(), - parameters: None - }) - ); - assert_eq!(default_charset, Some("utf8mb3".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DEFAULT CHARSET"), + value: Expr::Identifier(Ident::new("utf8mb3".to_owned())) + })); + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + value: Some(Ident::new("InnoDB")), + parameters: vec![] + } + ))); } _ => unreachable!(), } @@ -949,12 +1219,12 @@ fn parse_create_table_engine_default_charset() { #[test] fn parse_create_table_collate() { - let sql = "CREATE TABLE foo (id INT(11)) COLLATE=utf8mb4_0900_ai_ci"; + let sql = "CREATE TABLE foo (id INT(11)) COLLATE = utf8mb4_0900_ai_ci"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - collation, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -966,7 +1236,16 @@ fn parse_create_table_collate() { },], columns ); - assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); } _ => unreachable!(), } @@ -974,16 +1253,26 @@ fn parse_create_table_collate() { #[test] fn parse_create_table_both_options_and_as_query() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb4_0900_ai_ci AS SELECT 1"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3 COLLATE = utf8mb4_0900_ai_ci AS SELECT 1"; match mysql_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, - collation, query, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); - assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); + assert_eq!( query.unwrap().body.as_select().unwrap().projection, vec![SelectItem::UnnamedExpr(Expr::Value( @@ -994,7 +1283,8 @@ fn parse_create_table_both_options_and_as_query() { _ => unreachable!(), } - let sql = r"CREATE TABLE foo (id INT(11)) ENGINE=InnoDB AS SELECT 1 DEFAULT CHARSET=utf8mb3"; + let sql = + r"CREATE TABLE foo (id INT(11)) ENGINE = InnoDB AS SELECT 1 DEFAULT CHARSET = utf8mb3"; assert!(matches!( mysql_and_generic().parse_sql_statements(sql), Err(ParserError::ParserError(_)) diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index a6d65ec75..45a9f569a 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -348,7 +348,7 @@ fn parse_create_table_with_defaults() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -485,6 +485,11 @@ fn parse_create_table_with_defaults() { ] ); assert!(constraints.is_empty()); + + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( with_options, vec![ @@ -4570,7 +4575,6 @@ fn parse_create_table_with_alias() { name, columns, constraints, - with_options: _with_options, if_not_exists: false, external: false, file_format: None, @@ -4977,7 +4981,11 @@ fn parse_at_time_zone() { fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match pg().verified_stmt(sql) { - Statement::CreateTable(CreateTable { with_options, .. }) => { + Statement::CreateTable(CreateTable { table_options, .. }) => { + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( vec![ SqlOption::KeyValue { @@ -5405,19 +5413,13 @@ fn parse_trigger_related_functions() { storage: None, location: None }), - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, - comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, + comment_after_column_def: None, on_commit: None, on_cluster: None, primary_key: None, @@ -5425,7 +5427,6 @@ fn parse_trigger_related_functions() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -5441,6 +5442,7 @@ fn parse_trigger_related_functions() { catalog: None, catalog_sync: None, storage_serialization_policy: None, + table_options: CreateTableOptions::None } ); diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 097af3466..e915c868e 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -470,9 +470,22 @@ fn test_snowflake_create_table_cluster_by() { #[test] fn test_snowflake_create_table_comment() { match snowflake().verified_stmt("CREATE TABLE my_table (a INT) COMMENT = 'some comment'") { - Statement::CreateTable(CreateTable { name, comment, .. }) => { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { assert_eq!("my_table", name.to_string()); - assert_eq!("some comment", comment.unwrap().to_string()); + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + let comment = match plain_options.first().unwrap() { + SqlOption::Comment(CommentDef::WithEq(c)) + | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, + _ => unreachable!(), + }; + assert_eq!("some comment", comment); } _ => unreachable!(), }