1
0

[MINOR] Reuse deleteMetadataTable for disabling metadata table (#5217)

This commit is contained in:
Y Ethan Guo
2022-04-03 03:42:14 -07:00
committed by GitHub
parent 84064a9b08
commit c34eb07598
2 changed files with 22 additions and 19 deletions

View File

@@ -66,9 +66,11 @@ import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieInsertException;
import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.exception.HoodieUpsertException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.HoodieTableMetadataUtil;
import org.apache.hudi.metadata.HoodieTableMetadataWriter;
import org.apache.hudi.metadata.MetadataPartitionType;
import org.apache.hudi.table.action.HoodieWriteMetadata;
@@ -811,15 +813,11 @@ public abstract class HoodieTable<T extends HoodieRecordPayload, I, K, O> implem
public void maybeDeleteMetadataTable() {
if (shouldExecuteMetadataTableDeletion()) {
try {
Path mdtBasePath = new Path(HoodieTableMetadata.getMetadataTableBasePath(config.getBasePath()));
FileSystem fileSystem = metaClient.getFs();
if (fileSystem.exists(mdtBasePath)) {
LOG.info("Deleting metadata table because it is disabled in writer.");
fileSystem.delete(mdtBasePath, true);
}
LOG.info("Deleting metadata table because it is disabled in writer.");
HoodieTableMetadataUtil.deleteMetadataTable(config.getBasePath(), context);
clearMetadataTablePartitionsConfig();
} catch (IOException ioe) {
throw new HoodieIOException("Failed to delete metadata table.", ioe);
} catch (HoodieMetadataException e) {
throw new HoodieException("Failed to delete metadata table.", e);
}
}
}

View File

@@ -18,13 +18,6 @@
package org.apache.hudi.metadata;
import org.apache.avro.AvroTypeException;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.avro.ConvertingGenericData;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieMetadataColumnStats;
@@ -63,10 +56,19 @@ import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieMetadataException;
import org.apache.hudi.io.storage.HoodieFileReader;
import org.apache.hudi.io.storage.HoodieFileReaderFactory;
import org.apache.avro.AvroTypeException;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
@@ -210,12 +212,15 @@ public class HoodieTableMetadataUtil {
* @param context instance of {@link HoodieEngineContext}.
*/
public static void deleteMetadataTable(String basePath, HoodieEngineContext context) {
final String metadataTablePath = HoodieTableMetadata.getMetadataTableBasePath(basePath);
FileSystem fs = FSUtils.getFs(metadataTablePath, context.getHadoopConf().get());
final String metadataTablePathStr = HoodieTableMetadata.getMetadataTableBasePath(basePath);
FileSystem fs = FSUtils.getFs(metadataTablePathStr, context.getHadoopConf().get());
try {
fs.delete(new Path(metadataTablePath), true);
Path metadataTablePath = new Path(metadataTablePathStr);
if (fs.exists(metadataTablePath)) {
fs.delete(metadataTablePath, true);
}
} catch (Exception e) {
throw new HoodieMetadataException("Failed to remove metadata table from path " + metadataTablePath, e);
throw new HoodieMetadataException("Failed to remove metadata table from path " + metadataTablePathStr, e);
}
}