1
0

[MINOR] Fix CI by ignoring SparkContext error (#5468)

Sets spark.driver.allowMultipleContexts = true when constructing Spark conf in UtilHelpers
This commit is contained in:
Y Ethan Guo
2022-04-29 11:19:07 -07:00
committed by GitHub
parent e421d536ea
commit a1d82b4dc5

View File

@@ -279,6 +279,7 @@ public class UtilHelpers {
sparkConf.set("spark.hadoop.mapred.output.compression.codec", "true");
sparkConf.set("spark.hadoop.mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
sparkConf.set("spark.hadoop.mapred.output.compression.type", "BLOCK");
sparkConf.set("spark.driver.allowMultipleContexts", "true");
additionalConfigs.forEach(sparkConf::set);
return SparkRDDWriteClient.registerClasses(sparkConf);