1
0

[HUDI-995] Use Transformations, Assertions and SchemaTestUtil (#1884)

- Consolidate transform functions for tests in Transformations.java
- Consolidate assertion functions for tests in Assertions.java
- Make use of SchemaTestUtil for loading schema from resource
This commit is contained in:
Raymond Xu
2020-08-01 05:57:18 -07:00
committed by GitHub
parent e79fbc07fe
commit 10e4268792
23 changed files with 302 additions and 277 deletions

View File

@@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapreduce.Job;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -52,6 +53,7 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import static org.apache.hudi.common.testutils.SchemaTestUtil.getSchemaFromResource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -316,10 +318,11 @@ public class TestHoodieParquetInputFormat {
ensureFilesInCommit("Pulling all commits from 100, should get us the 1 files from 200 commit", files, "200", 1);
}
// TODO enable this after enabling predicate pushdown
@Disabled("enable this after enabling predicate pushdown")
@Test
public void testPredicatePushDown() throws IOException {
// initial commit
Schema schema = InputFormatTestUtil.readSchema("/sample1.avsc");
Schema schema = getSchemaFromResource(getClass(), "/sample1.avsc");
String commit1 = "20160628071126";
File partitionDir = InputFormatTestUtil.prepareParquetTable(basePath, schema, 1, 10, commit1);
InputFormatTestUtil.commit(basePath, commit1);

View File

@@ -123,10 +123,6 @@ public class InputFormatTestUtil {
jobConf.setInt(maxCommitPulls, numberOfCommitsToPull);
}
public static Schema readSchema(String location) throws IOException {
return new Schema.Parser().parse(InputFormatTestUtil.class.getResourceAsStream(location));
}
public static File prepareParquetTable(java.nio.file.Path basePath, Schema schema, int numberOfFiles,
int numberOfRecords, String commitNumber) throws IOException {
HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath.toString());