Skip to content

Commit

Permalink
[hive] Drop a Hive external table only deletes metadata but not data …
Browse files Browse the repository at this point in the history
…files. (#2979)
  • Loading branch information
zhuangchong authored Mar 12, 2024
1 parent 8f8091f commit 58113a7
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
<tbody>
<tr>
<td><h5>catalog-key</h5></td>
<td style="word-wrap: break-word;">(none)</td>
<td style="word-wrap: break-word;">"jdbc"</td>
<td>String</td>
<td>Custom jdbc catalog store key.</td>
</tr>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.paimon.schema.SchemaManager;
import org.apache.paimon.schema.TableSchema;
import org.apache.paimon.utils.Preconditions;
import org.apache.paimon.utils.StringUtils;

import org.apache.paimon.shade.guava30.com.google.common.collect.ImmutableMap;
import org.apache.paimon.shade.guava30.com.google.common.collect.Lists;
Expand Down Expand Up @@ -78,7 +77,7 @@ public class JdbcCatalog extends AbstractCatalog {
protected JdbcCatalog(
FileIO fileIO, String catalogKey, Map<String, String> config, String warehouse) {
super(fileIO);
this.catalogKey = StringUtils.isBlank(catalogKey) ? "jdbc" : catalogKey;
this.catalogKey = catalogKey;
this.options = config;
this.warehouse = warehouse;
Preconditions.checkNotNull(options, "Invalid catalog properties: null");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public final class JdbcCatalogOptions {
public static final ConfigOption<String> CATALOG_KEY =
ConfigOptions.key("catalog-key")
.stringType()
.defaultValue(null)
.defaultValue("jdbc")
.withDescription("Custom jdbc catalog store key.");

private JdbcCatalogOptions() {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,15 +140,15 @@ public RecordReader<InternalRow> createReader(DataSplit split) throws IOExceptio

Pair<int[], RowType> partitionPair = null;
if (!dataSchema.partitionKeys().isEmpty()) {
Pair<int[], int[][]> partitionMappping =
Pair<int[], int[][]> partitionMapping =
PartitionUtils.constructPartitionMapping(
dataSchema, dataProjection);
// if partition fields are not selected, we just do nothing
if (partitionMappping != null) {
dataProjection = partitionMappping.getRight();
if (partitionMapping != null) {
dataProjection = partitionMapping.getRight();
partitionPair =
Pair.of(
partitionMappping.getLeft(),
partitionMapping.getLeft(),
dataSchema.projectedLogicalRowType(
dataSchema.partitionKeys()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ public CastFieldGetter[] getCastMapping() {
return castMapping;
}

@Nullable
public Pair<int[], RowType> getPartitionPair() {
return partitionPair;
}
Expand Down Expand Up @@ -166,15 +167,15 @@ public BulkFormatMapping build(

Pair<int[], RowType> partitionPair = null;
if (!dataSchema.partitionKeys().isEmpty()) {
Pair<int[], int[][]> partitionMappping =
Pair<int[], int[][]> partitionMapping =
PartitionUtils.constructPartitionMapping(
dataRecordType, dataSchema.partitionKeys(), dataProjection);
// is partition fields are not selected, we just do nothing.
if (partitionMappping != null) {
dataProjection = partitionMappping.getRight();
if (partitionMapping != null) {
dataProjection = partitionMapping.getRight();
partitionPair =
Pair.of(
partitionMappping.getLeft(),
partitionMapping.getLeft(),
dataSchema.projectedLogicalRowType(dataSchema.partitionKeys()));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@
import static org.apache.paimon.options.CatalogOptions.TABLE_TYPE;
import static org.apache.paimon.options.OptionsUtils.convertToPropertiesPrefixKey;
import static org.apache.paimon.utils.Preconditions.checkArgument;
import static org.apache.paimon.utils.Preconditions.checkState;
import static org.apache.paimon.utils.StringUtils.isNullOrWhitespaceOnly;

/** A catalog implementation for Hive. */
Expand Down Expand Up @@ -349,6 +348,17 @@ protected void dropTableImpl(Identifier identifier) {
try {
client.dropTable(
identifier.getDatabaseName(), identifier.getObjectName(), true, false, true);

// When drop a Hive external table, only the hive metadata is deleted and the data files
// are not deleted.
TableType tableType =
OptionsUtils.convertToEnum(
hiveConf.get(TABLE_TYPE.key(), TableType.MANAGED.toString()),
TableType.class);
if (TableType.EXTERNAL.equals(tableType)) {
return;
}

// Deletes table directory to avoid schema in filesystem exists after dropping hive
// table successfully to keep the table consistency between which in filesystem and
// which in Hive metastore.
Expand Down Expand Up @@ -468,19 +478,6 @@ public String warehouse() {
return warehouse;
}

private void checkIdentifierUpperCase(Identifier identifier) {
checkState(
identifier.getDatabaseName().equals(identifier.getDatabaseName().toLowerCase()),
String.format(
"Database name[%s] cannot contain upper case in hive catalog",
identifier.getDatabaseName()));
checkState(
identifier.getObjectName().equals(identifier.getObjectName().toLowerCase()),
String.format(
"Table name[%s] cannot contain upper case in hive catalog",
identifier.getObjectName()));
}

private Table newHmsTable(Identifier identifier, Map<String, String> tableParameters) {
long currentTimeMillis = System.currentTimeMillis();
TableType tableType =
Expand Down Expand Up @@ -587,10 +584,6 @@ private FieldSchema convertToFieldSchema(DataField dataField) {
dataField.description());
}

private boolean schemaFileExists(Identifier identifier) {
return new SchemaManager(fileIO, getDataTableLocation(identifier)).latest().isPresent();
}

private SchemaManager schemaManager(Identifier identifier) {
return new SchemaManager(fileIO, getDataTableLocation(identifier))
.withLock(lock(identifier));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ public void testCreateExternalTable() throws Exception {
.isTrue();
tEnv.executeSql("DROP TABLE t").await();
Path tablePath = new Path(path, "test_db.db/t");
assertThat(tablePath.getFileSystem().exists(tablePath)).isFalse();
assertThat(tablePath.getFileSystem().exists(tablePath)).isTrue();
}

@Test
Expand Down

0 comments on commit 58113a7

Please sign in to comment.