[MINOR] Fix CI by ignoring SparkContext error (#5468)
Sets spark.driver.allowMultipleContexts = true when constructing Spark conf in UtilHelpers
This commit is contained in:
@@ -279,6 +279,7 @@ public class UtilHelpers {
|
||||
sparkConf.set("spark.hadoop.mapred.output.compression.codec", "true");
|
||||
sparkConf.set("spark.hadoop.mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec");
|
||||
sparkConf.set("spark.hadoop.mapred.output.compression.type", "BLOCK");
|
||||
sparkConf.set("spark.driver.allowMultipleContexts", "true");
|
||||
|
||||
additionalConfigs.forEach(sparkConf::set);
|
||||
return SparkRDDWriteClient.registerClasses(sparkConf);
|
||||
|
||||
Reference in New Issue
Block a user