Skip to content

Commit

Permalink
[AMORO-3389][improvement]support add comment to mixed_hive table (#3389)
Browse files Browse the repository at this point in the history
* [improvement]support add comment to hive table

* add ut
  • Loading branch information
Aireed authored Jan 20, 2025
1 parent eba34b1 commit decd55d
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;

public class MixedHiveTables {

Expand Down Expand Up @@ -510,6 +511,11 @@ private org.apache.hadoop.hive.metastore.api.Table newHiveTable(
TableMeta meta, Schema schema, PartitionSpec partitionSpec) {
final long currentTimeMillis = System.currentTimeMillis();

// set table comment here!
Map<String, String> parameters = new HashMap<>();
Optional<String> comment = Optional.ofNullable(meta.getProperties().get("comment"));
comment.ifPresent(val -> parameters.put("comment", val));

org.apache.hadoop.hive.metastore.api.Table newTable =
new org.apache.hadoop.hive.metastore.api.Table(
meta.getTableIdentifier().getTableName(),
Expand All @@ -521,7 +527,7 @@ private org.apache.hadoop.hive.metastore.api.Table newHiveTable(
Integer.MAX_VALUE,
null,
HiveSchemaUtil.hivePartitionFields(schema, partitionSpec),
new HashMap<>(),
parameters,
null,
null,
TableType.EXTERNAL_TABLE.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@

import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Stream;

public class TestMixedFormatSessionCatalog extends MixedTableTestBase {
Expand All @@ -57,16 +58,16 @@ public Dataset<Row> sql(String sqlText) {

public static Stream<Arguments> testCreateTable() {
return Stream.of(
Arguments.arguments("arctic", true, ""),
Arguments.arguments("arctic", false, "pt"),
Arguments.arguments("arctic", true, "pt"),
Arguments.arguments("parquet", false, "pt"),
Arguments.arguments("parquet", false, "dt string"));
Arguments.arguments("arctic", true, "", "hive comment"),
Arguments.arguments("arctic", false, "pt", "hive comment"),
Arguments.arguments("arctic", true, "pt", "hive comment"),
Arguments.arguments("parquet", false, "pt", null),
Arguments.arguments("parquet", false, "dt string", null));
}

@ParameterizedTest(name = "{index} USING {0} WITH PK {1} PARTITIONED BY ({2})")
@ParameterizedTest(name = "{index} USING {0} WITH PK {1} PARTITIONED BY ({2}) COMMENT {3}")
@MethodSource
public void testCreateTable(String provider, boolean pk, String pt) {
public void testCreateTable(String provider, boolean pk, String pt, String comment) {

String sqlText = "CREATE TABLE " + target() + "(" + " id INT, data string, pt string ";
if (pk) {
Expand All @@ -78,6 +79,10 @@ public void testCreateTable(String provider, boolean pk, String pt) {
sqlText += " PARTITIONED BY (" + pt + ")";
}

if (StringUtils.isNotBlank(comment)) {
sqlText += " COMMENT '" + comment + "'";
}

sql(sqlText);

if ("arctic".equalsIgnoreCase(provider)) {
Expand All @@ -86,6 +91,9 @@ public void testCreateTable(String provider, boolean pk, String pt) {

Table hiveTable = loadHiveTable();
Assertions.assertNotNull(hiveTable);

String hiveComment = hiveTable.getParameters().get("comment");
Assertions.assertTrue(Objects.equals(hiveComment, comment));
}

static final Schema SCHEMA =
Expand All @@ -105,17 +113,17 @@ public void testCreateTable(String provider, boolean pk, String pt) {

public static Stream<Arguments> testCreateTableAsSelect() {
return Stream.of(
Arguments.arguments("arctic", true, "", true),
Arguments.arguments("arctic", false, "pt", true),
Arguments.arguments("arctic", true, "pt", false),
Arguments.arguments("parquet", false, "pt", false),
Arguments.arguments("parquet", false, "", false));
Arguments.arguments("arctic", true, "", true, "hive comment"),
Arguments.arguments("arctic", false, "pt", true, "hive comment"),
Arguments.arguments("arctic", true, "pt", false, "hive comment"),
Arguments.arguments("parquet", false, "pt", false, "hive comment"),
Arguments.arguments("parquet", false, "", false, "hive comment"));
}

@ParameterizedTest(name = "{index} USING {0} WITH PK {1} PARTITIONED BY ({2})")
@ParameterizedTest(name = "{index} USING {0} WITH PK {1} PARTITIONED BY ({2}) COMMENT {3}")
@MethodSource
public void testCreateTableAsSelect(
String provider, boolean pk, String pt, boolean duplicateCheck) {
String provider, boolean pk, String pt, boolean duplicateCheck, String comment) {
spark().conf().set(SparkSQLProperties.CHECK_SOURCE_DUPLICATES_ENABLE, duplicateCheck);
createViewSource(SCHEMA, source);
String sqlText = "CREATE TABLE " + target();
Expand All @@ -126,6 +134,11 @@ public void testCreateTableAsSelect(
if (StringUtils.isNotBlank(pt)) {
sqlText += " PARTITIONED BY (" + pt + ")";
}

if (StringUtils.isNotBlank(comment)) {
sqlText += " COMMENT '" + comment + "'";
}

sqlText += " AS SELECT * FROM " + source();

sql(sqlText);
Expand All @@ -135,6 +148,9 @@ public void testCreateTableAsSelect(

Table hiveTable = loadHiveTable();
Assertions.assertNotNull(hiveTable);

String hiveComment = hiveTable.getParameters().get("comment");
Assertions.assertTrue(Objects.equals(hiveComment, comment));
}

@Test
Expand Down

0 comments on commit decd55d

Please sign in to comment.