1
0

[HUDI-1950] Fix Azure CI failure in TestParquetUtils (#2984)

* fix azure pipeline configs

* add pentaho.org in maven repositories

* Make sure file paths with scheme in TestParquetUtils

* add azure build status to README
This commit is contained in:
Raymond Xu
2021-06-15 03:45:17 -07:00
committed by GitHub
parent 515ce8eb36
commit f922837064
4 changed files with 19 additions and 47 deletions

View File

@@ -22,6 +22,7 @@ Hudi manages the storage of large analytical datasets on DFS (Cloud stores, HDFS
<https://hudi.apache.org/>
[![Build Status](https://travis-ci.com/apache/hudi.svg?branch=master)](https://travis-ci.com/apache/hudi)
[![Build Status](https://dev.azure.com/apache-hudi-ci-org/apache-hudi-ci/_apis/build/status/apachehudi-ci.hudi-mirror?branchName=master)](https://dev.azure.com/apache-hudi-ci-org/apache-hudi-ci/_build/latest?definitionId=3&branchName=master)
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.apache.hudi/hudi/badge.svg)](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.hudi%22)
[![Join on Slack](https://img.shields.io/badge/slack-%23hudi-72eff8?logo=slack&color=48c628&label=Join%20on%20Slack)](https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE)

View File

@@ -34,16 +34,8 @@ stages:
jobs:
- job: unit_tests_spark_client
steps:
- task: Maven@3
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -DskipTests
publishJUnitResults: false
javaHomeOption: 'JDKVersion'
jdkVersionOption: '1.8'
jdkArchitectureOption: 'x64'
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- script: |
mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -66,16 +58,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: unit_tests_utilities
steps:
- task: Maven@3
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -DskipTests
publishJUnitResults: false
javaHomeOption: 'JDKVersion'
jdkVersionOption: '1.8'
jdkArchitectureOption: 'x64'
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- script: |
mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -98,16 +82,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: unit_tests_other_modules
steps:
- task: Maven@3
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: -DskipTests
publishJUnitResults: false
javaHomeOption: 'JDKVersion'
jdkVersionOption: '1.8'
jdkArchitectureOption: 'x64'
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- script: |
mvn $(MAVEN_OPTS) clean install -DskipTests
- task: Cache@2
inputs:
key: 'maven | "$(Agent.OS)" | **/pom.xml'
@@ -130,18 +106,8 @@ stages:
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- job: functional_tests
steps:
- task: Maven@3
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: -Pfunctional-tests
publishJUnitResults: false
testResultsFiles: '**/surefire-reports/TEST-*.xml'
testRunTitle: 'functional tests'
javaHomeOption: 'JDKVersion'
jdkVersionOption: '1.8'
jdkArchitectureOption: 'x64'
mavenOptions: '-Xmx2g $(MAVEN_OPTS)'
- script: |
mvn $(MAVEN_OPTS) -Pfunctional-tests test
- job: integration_tests
steps:
- script: |

View File

@@ -58,7 +58,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
*/
public class TestParquetUtils extends HoodieCommonTestHarness {
private ParquetUtils parquetUtils = new ParquetUtils();
private ParquetUtils parquetUtils;
public static List<Arguments> bloomFilterTypeCodes() {
return Arrays.asList(
@@ -70,6 +70,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness {
@BeforeEach
public void setup() {
initPath();
parquetUtils = new ParquetUtils();
}
@ParameterizedTest
@@ -80,7 +81,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness {
rowKeys.add(UUID.randomUUID().toString());
}
String filePath = Paths.get(basePath, "test.parquet").toString();
String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
writeParquetFile(typeCode, filePath, rowKeys);
// Read and verify
@@ -110,7 +111,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness {
}
}
String filePath = Paths.get(basePath, "test.parquet").toString();
String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
writeParquetFile(typeCode, filePath, rowKeys);
// Read and verify
@@ -136,7 +137,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness {
expected.add(new HoodieKey(rowKey, partitionPath));
}
String filePath = basePath + "/test.parquet";
String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
Schema schema = HoodieAvroUtils.getRecordKeyPartitionPathSchema();
writeParquetFile(typeCode, filePath, rowKeys, schema, true, partitionPath);
@@ -152,7 +153,7 @@ public class TestParquetUtils extends HoodieCommonTestHarness {
@Test
public void testReadCounts() throws Exception {
String filePath = basePath + "/test.parquet";
String filePath = Paths.get(basePath, "test.parquet").toUri().toString();
List<String> rowKeys = new ArrayList<>();
for (int i = 0; i < 123; i++) {
rowKeys.add(UUID.randomUUID().toString());

View File

@@ -1051,6 +1051,10 @@
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
<repository>
<id>pentaho.org</id>
<url>https://public.nexus.pentaho.org/repository/proxy-public-3rd-party-release/</url>
</repository>
</repositories>
<profiles>