Using BufferedFsInputStream to wrap FSInputStream for FSDataInputStream
This commit is contained in:
committed by
vinoth chandar
parent
720e42f52a
commit
c3c205fc02
@@ -45,6 +45,9 @@ public class HoodieMemoryConfig extends DefaultHoodieConfig {
|
||||
public static final String MAX_MEMORY_FOR_MERGE_PROP = "hoodie.memory.merge.max.size";
|
||||
// Property to set the max memory for compaction
|
||||
public static final String MAX_MEMORY_FOR_COMPACTION_PROP = "hoodie.memory.compaction.max.size";
|
||||
// Property to set the max memory for dfs inputstream buffer size
|
||||
public static final String MAX_DFS_STREAM_BUFFER_SIZE_PROP = "hoodie.memory.dfs.buffer.max.size";
|
||||
public static final int DEFAULT_MAX_DFS_STREAM_BUFFER_SIZE = 16 * 1024 * 1024; // 16MB
|
||||
|
||||
|
||||
private HoodieMemoryConfig(Properties props) {
|
||||
@@ -86,6 +89,12 @@ public class HoodieMemoryConfig extends DefaultHoodieConfig {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withMaxDFSStreamBufferSize(int maxStreamBufferSize) {
|
||||
props.setProperty(MAX_DFS_STREAM_BUFFER_SIZE_PROP,
|
||||
String.valueOf(maxStreamBufferSize));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamic calculation of max memory to use for for spillable map. user.available.memory = spark.executor.memory *
|
||||
* (1 - spark.memory.fraction) spillable.available.memory = user.available.memory * hoodie.memory.fraction. Anytime
|
||||
@@ -143,6 +152,9 @@ public class HoodieMemoryConfig extends DefaultHoodieConfig {
|
||||
!props.containsKey(MAX_MEMORY_FOR_COMPACTION_PROP),
|
||||
MAX_MEMORY_FOR_COMPACTION_PROP, String.valueOf(
|
||||
getMaxMemoryAllowedForMerge(props.getProperty(MAX_MEMORY_FRACTION_FOR_COMPACTION_PROP))));
|
||||
setDefaultOnCondition(props,
|
||||
!props.containsKey(MAX_DFS_STREAM_BUFFER_SIZE_PROP),
|
||||
MAX_DFS_STREAM_BUFFER_SIZE_PROP, String.valueOf(DEFAULT_MAX_DFS_STREAM_BUFFER_SIZE));
|
||||
return config;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -369,6 +369,12 @@ public class HoodieWriteConfig extends DefaultHoodieConfig {
|
||||
props.getProperty(HoodieMemoryConfig.MAX_MEMORY_FOR_COMPACTION_PROP));
|
||||
}
|
||||
|
||||
public int getMaxDFSStreamBufferSize() {
|
||||
return Integer
|
||||
.valueOf(
|
||||
props.getProperty(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE_PROP));
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private final Properties props = new Properties();
|
||||
@@ -469,6 +475,12 @@ public class HoodieWriteConfig extends DefaultHoodieConfig {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withMemoryConfig(HoodieMemoryConfig memoryConfig) {
|
||||
props.putAll(memoryConfig.getProps());
|
||||
isMemoryConfigSet = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withAutoCommit(boolean autoCommit) {
|
||||
props.setProperty(HOODIE_AUTO_COMMIT_PROP, String.valueOf(autoCommit));
|
||||
return this;
|
||||
|
||||
@@ -105,7 +105,7 @@ public class HoodieRealtimeTableCompactor implements HoodieCompactor {
|
||||
HoodieCompactedLogRecordScanner scanner = new HoodieCompactedLogRecordScanner(fs,
|
||||
metaClient.getBasePath(), operation.getDeltaFilePaths(), readerSchema, maxInstantTime,
|
||||
config.getMaxMemoryPerCompaction(), config.getCompactionLazyBlockReadEnabled(),
|
||||
config.getCompactionReverseLogReadEnabled());
|
||||
config.getCompactionReverseLogReadEnabled(), config.getMaxDFSStreamBufferSize());
|
||||
if (!scanner.iterator().hasNext()) {
|
||||
return Lists.<WriteStatus>newArrayList();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user