Skip to content

Commit

Permalink
[fix] Fix support recreate hive table,add unit test (apache#3083)
Browse files Browse the repository at this point in the history
  • Loading branch information
liuw529 committed Mar 27, 2024
1 parent d3d9047 commit 2b942f7
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -129,12 +129,10 @@ public List<Long> listAllIds() {
public void checkTableSchema(TableSchema oldSchema, TableSchema newSchema) {
boolean isCommon =
oldSchema.version() == newSchema.version()
&& oldSchema.id() == newSchema.id()
&& Objects.equals(oldSchema.fields(), newSchema.fields())
&& oldSchema.highestFieldId() == newSchema.highestFieldId()
&& Objects.equals(oldSchema.partitionKeys(), newSchema.partitionKeys())
&& Objects.equals(oldSchema.primaryKeys(), newSchema.primaryKeys())
&& Objects.equals(oldSchema.options(), newSchema.options());
&& Objects.equals(oldSchema.primaryKeys(), newSchema.primaryKeys());

if (!isCommon) {
throw new IllegalStateException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,15 @@

package org.apache.paimon.hive;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.paimon.catalog.AbstractCatalog;
import org.apache.paimon.catalog.Catalog;
import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.catalog.CatalogFactory;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.fs.hadoop.HadoopFileIO;
import org.apache.paimon.fs.local.LocalFileIO;
import org.apache.paimon.options.CatalogOptions;
import org.apache.paimon.options.Options;
Expand Down Expand Up @@ -120,6 +123,45 @@ public void testCreateExternalTableWithPaimonTable() throws Exception {
" 'org.apache.paimon.hive.PaimonStorageHandler' ");
}

@Test
public void testCallCreateTableToCreatHiveExternalTable() throws Exception {
// Create hive external table with paimon table
String tableName = "with_paimon_table";
String hadoopConfDir = "";

// Create a paimon table
Schema schema =
new Schema(
Lists.newArrayList(
new DataField(0, "col1", DataTypes.INT(), "first comment"),
new DataField(1, "Col2", DataTypes.STRING(), "second comment"),
new DataField(2, "COL3", DataTypes.DECIMAL(5, 3), "last comment")),
Collections.emptyList(),
Collections.emptyList(),
Maps.newHashMap(),
"");
Identifier identifier = Identifier.create(DATABASE_TEST, tableName);
Options options = new Options();
options.set("warehouse", path);
options.set("metastore", "hive");
options.set("table.type", "external");
options.set("hadoop-conf-dir", hadoopConfDir);
CatalogContext context = CatalogContext.create(options);
Catalog hiveCatalog = CatalogFactory.createCatalog(context);
hiveCatalog.createTable(identifier, schema, false);

// Drop hive external table
String hiveSql =
String.join(
"\n",
Arrays.asList(
"DROP TABLE " + tableName));
assertThatCode(() -> hiveShell.execute(hiveSql)).doesNotThrowAnyException();

assertThatCode(() -> hiveCatalog.createTable(identifier, schema, false)).doesNotThrowAnyException();

}

@Test
public void testCreateTableUsePartitionedBy() {
// Use `partitioned by` to create hive partition table
Expand Down

0 comments on commit 2b942f7

Please sign in to comment.