1
0

Fix build failing issues

This commit is contained in:
Nishith Agarwal
2017-08-14 11:57:38 -07:00
committed by vinoth chandar
parent 63f1b12355
commit e2d13c6305
3 changed files with 6 additions and 6 deletions

View File

@@ -90,15 +90,15 @@ public class TestMergeOnReadTable {
dfsCluster.shutdown();; dfsCluster.shutdown();;
} }
FSUtils.setFs(null); FSUtils.setFs(null);
// TEMPFIX(vc): Fix failing build // Need to closeAll to clear FileSystem.Cache, required because DFS and LocalFS used in the same JVM
//FileSystem.closeAll(); FileSystem.closeAll();
HoodieTestUtils.resetFS(); HoodieTestUtils.resetFS();
} }
@BeforeClass @BeforeClass
public static void setUpDFS() throws IOException { public static void setUpDFS() throws IOException {
// TEMPFIX(vc): Fix failing build // Need to closeAll to clear FileSystem.Cache, required because DFS and LocalFS used in the same JVM
//FileSystem.closeAll(); FileSystem.closeAll();
if (hdfsTestService == null) { if (hdfsTestService == null) {
hdfsTestService = new HdfsTestService(); hdfsTestService = new HdfsTestService();
dfsCluster = hdfsTestService.start(true); dfsCluster = hdfsTestService.start(true);

View File

@@ -87,7 +87,7 @@ public class HoodieClientTestUtils {
SparkConf sparkConf = new SparkConf() SparkConf sparkConf = new SparkConf()
.setAppName(appName) .setAppName(appName)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.setMaster("local[4]"); .setMaster("local[1]");
return HoodieReadClient.addHoodieSupport(sparkConf); return HoodieReadClient.addHoodieSupport(sparkConf);
} }
} }

View File

@@ -284,7 +284,7 @@ public class TestHDFSParquetImporter implements Serializable {
private JavaSparkContext getJavaSparkContext() { private JavaSparkContext getJavaSparkContext() {
// Initialize a local spark env // Initialize a local spark env
SparkConf sparkConf = new SparkConf().setAppName("TestConversionCommand").setMaster("local[4]"); SparkConf sparkConf = new SparkConf().setAppName("TestConversionCommand").setMaster("local[1]");
sparkConf = HoodieWriteClient.registerClasses(sparkConf); sparkConf = HoodieWriteClient.registerClasses(sparkConf);
return new JavaSparkContext(HoodieReadClient.addHoodieSupport(sparkConf)); return new JavaSparkContext(HoodieReadClient.addHoodieSupport(sparkConf));
} }