Skip to content

Commit

Permalink
move paimon schema cache to ExternalSchemaCache
Browse files Browse the repository at this point in the history
  • Loading branch information
zddr committed Dec 10, 2024
1 parent 1de646f commit b1da138
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,10 @@ public Set<String> getPartitionNames() {

@Override
public Optional<SchemaCacheValue> initSchemaAndUpdateTime(SchemaCacheKey key) {
return initSchemaAndUpdateTime();
}

public Optional<SchemaCacheValue> initSchemaAndUpdateTime() {
org.apache.hadoop.hive.metastore.api.Table table = ((HMSExternalCatalog) catalog).getClient()
.getTable(dbName, name);
// try to use transient_lastDdlTime from hms client
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ private void createDbAndTableForHmsCatalog(HMSExternalCatalog hmsCatalog) {
result = TableIf.TableType.HMS_EXTERNAL_TABLE;

// mock initSchemaAndUpdateTime and do nothing
tbl.initSchemaAndUpdateTime(null);
tbl.initSchemaAndUpdateTime();
minTimes = 0;

tbl.getDatabase();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ private void init(HMSExternalCatalog hmsCatalog) {
result = DLAType.HIVE;

// mock initSchemaAndUpdateTime and do nothing
tbl.initSchemaAndUpdateTime(null);
tbl.initSchemaAndUpdateTime();
minTimes = 0;

tbl.getDatabase();
Expand Down Expand Up @@ -208,7 +208,7 @@ private void init(HMSExternalCatalog hmsCatalog) {
result = DLAType.HIVE;

// mock initSchemaAndUpdateTime and do nothing
tbl2.initSchemaAndUpdateTime(null);
tbl2.initSchemaAndUpdateTime();
minTimes = 0;

tbl2.getDatabase();
Expand Down Expand Up @@ -386,7 +386,7 @@ public void testHitSqlCacheAfterPartitionUpdateTimeChanged() throws Exception {
List<ScanNode> scanNodes = Arrays.asList(hiveScanNode4);

// invoke initSchemaAndUpdateTime first and init schemaUpdateTime
tbl2.initSchemaAndUpdateTime(null);
tbl2.initSchemaAndUpdateTime();

CacheAnalyzer ca = new CacheAnalyzer(connectContext, parseStmt, scanNodes);
ca.checkCacheMode(System.currentTimeMillis() + Config.cache_last_version_interval_second * 1000L * 2);
Expand Down Expand Up @@ -434,7 +434,7 @@ public void testHitSqlCacheByNereidsAfterPartitionUpdateTimeChanged() {
List<ScanNode> scanNodes = Arrays.asList(hiveScanNode4);

// invoke initSchemaAndUpdateTime first and init schemaUpdateTime
tbl2.initSchemaAndUpdateTime(null);
tbl2.initSchemaAndUpdateTime();

CacheAnalyzer ca = new CacheAnalyzer(connectContext, parseStmt, scanNodes);
ca.checkCacheModeForNereids(System.currentTimeMillis() + Config.cache_last_version_interval_second * 1000L * 2);
Expand Down

0 comments on commit b1da138

Please sign in to comment.