1
0

Migrate integration tests to spark 2.4.4

This commit is contained in:
Mehrotra
2019-11-12 16:16:32 -08:00
committed by Balaji Varadarajan
parent 045fa87a3d
commit 92c69f5703
9 changed files with 19 additions and 19 deletions

View File

@@ -22,7 +22,7 @@ services:
hostname: namenode
container_name: namenode
environment:
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
ports:
- "50070:50070"
- "8020:8020"
@@ -39,7 +39,7 @@ services:
container_name: datanode1
hostname: datanode1
environment:
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
env_file:
- ./hadoop.env
ports:
@@ -61,7 +61,7 @@ services:
hostname: historyserver
container_name: historyserver
environment:
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
depends_on:
- "namenode"
links:
@@ -128,7 +128,7 @@ services:
- ${HUDI_WS}:/var/hoodie/ws
sparkmaster:
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkmaster_2.3.1:latest
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkmaster_2.4.4:latest
hostname: sparkmaster
container_name: sparkmaster
env_file:
@@ -145,7 +145,7 @@ services:
- "namenode"
spark-worker-1:
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkworker_2.3.1:latest
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkworker_2.4.4:latest
hostname: spark-worker-1
container_name: spark-worker-1
env_file:
@@ -218,7 +218,7 @@ services:
command: worker
adhoc-1:
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.3.1:latest
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.4.4:latest
hostname: adhoc-1
container_name: adhoc-1
env_file:
@@ -239,7 +239,7 @@ services:
- ${HUDI_WS}:/var/hoodie/ws
adhoc-2:
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.3.1:latest
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.4.4:latest
hostname: adhoc-2
container_name: adhoc-2
env_file:

View File

@@ -50,7 +50,7 @@
<properties>
<skipITs>false</skipITs>
<docker.build.skip>true</docker.build.skip>
<docker.spark.version>2.3.1</docker.spark.version>
<docker.spark.version>2.4.4</docker.spark.version>
<docker.hive.version>2.3.3</docker.hive.version>
<docker.hadoop.version>2.8.4</docker.hadoop.version>
<docker.presto.version>0.217</docker.presto.version>

View File

@@ -24,7 +24,7 @@ ENV ENABLE_INIT_DAEMON true
ENV INIT_DAEMON_BASE_URI http://identifier/init-daemon
ENV INIT_DAEMON_STEP spark_master_init
ARG SPARK_VERSION=2.3.1
ARG SPARK_VERSION=2.4.4
ARG SPARK_HADOOP_VERSION=2.7
ENV SPARK_VERSION ${SPARK_VERSION}

View File

@@ -18,7 +18,7 @@
ARG HADOOP_VERSION=2.8.4
ARG HIVE_VERSION=2.3.3
ARG SPARK_VERSION=2.3.1
ARG SPARK_VERSION=2.4.4
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
ARG PRESTO_VERSION=0.217

View File

@@ -18,7 +18,7 @@
ARG HADOOP_VERSION=2.8.4
ARG HIVE_VERSION=2.3.3
ARG SPARK_VERSION=2.3.1
ARG SPARK_VERSION=2.4.4
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
COPY master.sh /opt/spark

View File

@@ -18,7 +18,7 @@
ARG HADOOP_VERSION=2.8.4
ARG HIVE_VERSION=2.3.3
ARG SPARK_VERSION=2.3.1
ARG SPARK_VERSION=2.4.4
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
COPY worker.sh /opt/spark

View File

@@ -20,10 +20,10 @@
WS_ROOT=`dirname $PWD`
# restart cluster
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml down
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml pull
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml down
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml pull
sleep 5
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml up -d
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml up -d
sleep 15
docker exec -it adhoc-1 /bin/bash /var/hoodie/ws/docker/demo/setup_demo_container.sh

View File

@@ -21,7 +21,7 @@
# set up root directory
WS_ROOT=`dirname $PWD`
# shut down cluster
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml down
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml down
# remove houst mount directory
rm -rf /tmp/hadoop_data

View File

@@ -131,7 +131,7 @@
<properties>
<dockerCompose.envFile>${project.basedir}/compose_env</dockerCompose.envFile>
<dockerCompose.file>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</dockerCompose.file>
<dockerCompose.file>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark244.yml</dockerCompose.file>
<skipITs>false</skipITs>
<docker.compose.skip>${skipITs}</docker.compose.skip>
<checkstyle.skip>true</checkstyle.skip>
@@ -201,7 +201,7 @@
<configuration>
<skip>${docker.compose.skip}</skip>
<host>unix:///var/run/docker.sock</host>
<composeFile>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</composeFile>
<composeFile>${dockerCompose.file}</composeFile>
<detachedMode>true</detachedMode>
<envFile>${dockerCompose.envFile}</envFile>
</configuration>
@@ -214,7 +214,7 @@
</goals>
<configuration>
<skip>${docker.compose.skip}</skip>
<composeFile>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</composeFile>
<composeFile>${dockerCompose.file}</composeFile>
<removeVolumes>true</removeVolumes>
</configuration>
</execution>