Migrate integration tests to spark 2.4.4
This commit is contained in:
committed by
Balaji Varadarajan
parent
045fa87a3d
commit
92c69f5703
@@ -22,7 +22,7 @@ services:
|
|||||||
hostname: namenode
|
hostname: namenode
|
||||||
container_name: namenode
|
container_name: namenode
|
||||||
environment:
|
environment:
|
||||||
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
|
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
|
||||||
ports:
|
ports:
|
||||||
- "50070:50070"
|
- "50070:50070"
|
||||||
- "8020:8020"
|
- "8020:8020"
|
||||||
@@ -39,7 +39,7 @@ services:
|
|||||||
container_name: datanode1
|
container_name: datanode1
|
||||||
hostname: datanode1
|
hostname: datanode1
|
||||||
environment:
|
environment:
|
||||||
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
|
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
|
||||||
env_file:
|
env_file:
|
||||||
- ./hadoop.env
|
- ./hadoop.env
|
||||||
ports:
|
ports:
|
||||||
@@ -61,7 +61,7 @@ services:
|
|||||||
hostname: historyserver
|
hostname: historyserver
|
||||||
container_name: historyserver
|
container_name: historyserver
|
||||||
environment:
|
environment:
|
||||||
- CLUSTER_NAME=hudi_hadoop284_hive232_spark231
|
- CLUSTER_NAME=hudi_hadoop284_hive232_spark244
|
||||||
depends_on:
|
depends_on:
|
||||||
- "namenode"
|
- "namenode"
|
||||||
links:
|
links:
|
||||||
@@ -128,7 +128,7 @@ services:
|
|||||||
- ${HUDI_WS}:/var/hoodie/ws
|
- ${HUDI_WS}:/var/hoodie/ws
|
||||||
|
|
||||||
sparkmaster:
|
sparkmaster:
|
||||||
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkmaster_2.3.1:latest
|
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkmaster_2.4.4:latest
|
||||||
hostname: sparkmaster
|
hostname: sparkmaster
|
||||||
container_name: sparkmaster
|
container_name: sparkmaster
|
||||||
env_file:
|
env_file:
|
||||||
@@ -145,7 +145,7 @@ services:
|
|||||||
- "namenode"
|
- "namenode"
|
||||||
|
|
||||||
spark-worker-1:
|
spark-worker-1:
|
||||||
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkworker_2.3.1:latest
|
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkworker_2.4.4:latest
|
||||||
hostname: spark-worker-1
|
hostname: spark-worker-1
|
||||||
container_name: spark-worker-1
|
container_name: spark-worker-1
|
||||||
env_file:
|
env_file:
|
||||||
@@ -218,7 +218,7 @@ services:
|
|||||||
command: worker
|
command: worker
|
||||||
|
|
||||||
adhoc-1:
|
adhoc-1:
|
||||||
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.3.1:latest
|
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.4.4:latest
|
||||||
hostname: adhoc-1
|
hostname: adhoc-1
|
||||||
container_name: adhoc-1
|
container_name: adhoc-1
|
||||||
env_file:
|
env_file:
|
||||||
@@ -239,7 +239,7 @@ services:
|
|||||||
- ${HUDI_WS}:/var/hoodie/ws
|
- ${HUDI_WS}:/var/hoodie/ws
|
||||||
|
|
||||||
adhoc-2:
|
adhoc-2:
|
||||||
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.3.1:latest
|
image: apachehudi/hudi-hadoop_2.8.4-hive_2.3.3-sparkadhoc_2.4.4:latest
|
||||||
hostname: adhoc-2
|
hostname: adhoc-2
|
||||||
container_name: adhoc-2
|
container_name: adhoc-2
|
||||||
env_file:
|
env_file:
|
||||||
@@ -50,7 +50,7 @@
|
|||||||
<properties>
|
<properties>
|
||||||
<skipITs>false</skipITs>
|
<skipITs>false</skipITs>
|
||||||
<docker.build.skip>true</docker.build.skip>
|
<docker.build.skip>true</docker.build.skip>
|
||||||
<docker.spark.version>2.3.1</docker.spark.version>
|
<docker.spark.version>2.4.4</docker.spark.version>
|
||||||
<docker.hive.version>2.3.3</docker.hive.version>
|
<docker.hive.version>2.3.3</docker.hive.version>
|
||||||
<docker.hadoop.version>2.8.4</docker.hadoop.version>
|
<docker.hadoop.version>2.8.4</docker.hadoop.version>
|
||||||
<docker.presto.version>0.217</docker.presto.version>
|
<docker.presto.version>0.217</docker.presto.version>
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ ENV ENABLE_INIT_DAEMON true
|
|||||||
ENV INIT_DAEMON_BASE_URI http://identifier/init-daemon
|
ENV INIT_DAEMON_BASE_URI http://identifier/init-daemon
|
||||||
ENV INIT_DAEMON_STEP spark_master_init
|
ENV INIT_DAEMON_STEP spark_master_init
|
||||||
|
|
||||||
ARG SPARK_VERSION=2.3.1
|
ARG SPARK_VERSION=2.4.4
|
||||||
ARG SPARK_HADOOP_VERSION=2.7
|
ARG SPARK_HADOOP_VERSION=2.7
|
||||||
|
|
||||||
ENV SPARK_VERSION ${SPARK_VERSION}
|
ENV SPARK_VERSION ${SPARK_VERSION}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
ARG HADOOP_VERSION=2.8.4
|
ARG HADOOP_VERSION=2.8.4
|
||||||
ARG HIVE_VERSION=2.3.3
|
ARG HIVE_VERSION=2.3.3
|
||||||
ARG SPARK_VERSION=2.3.1
|
ARG SPARK_VERSION=2.4.4
|
||||||
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
||||||
|
|
||||||
ARG PRESTO_VERSION=0.217
|
ARG PRESTO_VERSION=0.217
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
ARG HADOOP_VERSION=2.8.4
|
ARG HADOOP_VERSION=2.8.4
|
||||||
ARG HIVE_VERSION=2.3.3
|
ARG HIVE_VERSION=2.3.3
|
||||||
ARG SPARK_VERSION=2.3.1
|
ARG SPARK_VERSION=2.4.4
|
||||||
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
||||||
|
|
||||||
COPY master.sh /opt/spark
|
COPY master.sh /opt/spark
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
ARG HADOOP_VERSION=2.8.4
|
ARG HADOOP_VERSION=2.8.4
|
||||||
ARG HIVE_VERSION=2.3.3
|
ARG HIVE_VERSION=2.3.3
|
||||||
ARG SPARK_VERSION=2.3.1
|
ARG SPARK_VERSION=2.4.4
|
||||||
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
FROM apachehudi/hudi-hadoop_${HADOOP_VERSION}-hive_${HIVE_VERSION}-sparkbase_${SPARK_VERSION}
|
||||||
|
|
||||||
COPY worker.sh /opt/spark
|
COPY worker.sh /opt/spark
|
||||||
|
|||||||
@@ -20,10 +20,10 @@
|
|||||||
|
|
||||||
WS_ROOT=`dirname $PWD`
|
WS_ROOT=`dirname $PWD`
|
||||||
# restart cluster
|
# restart cluster
|
||||||
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml down
|
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml down
|
||||||
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml pull
|
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml pull
|
||||||
sleep 5
|
sleep 5
|
||||||
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml up -d
|
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml up -d
|
||||||
sleep 15
|
sleep 15
|
||||||
|
|
||||||
docker exec -it adhoc-1 /bin/bash /var/hoodie/ws/docker/demo/setup_demo_container.sh
|
docker exec -it adhoc-1 /bin/bash /var/hoodie/ws/docker/demo/setup_demo_container.sh
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
# set up root directory
|
# set up root directory
|
||||||
WS_ROOT=`dirname $PWD`
|
WS_ROOT=`dirname $PWD`
|
||||||
# shut down cluster
|
# shut down cluster
|
||||||
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark231.yml down
|
HUDI_WS=${WS_ROOT} docker-compose -f compose/docker-compose_hadoop284_hive233_spark244.yml down
|
||||||
|
|
||||||
# remove houst mount directory
|
# remove houst mount directory
|
||||||
rm -rf /tmp/hadoop_data
|
rm -rf /tmp/hadoop_data
|
||||||
|
|||||||
@@ -131,7 +131,7 @@
|
|||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<dockerCompose.envFile>${project.basedir}/compose_env</dockerCompose.envFile>
|
<dockerCompose.envFile>${project.basedir}/compose_env</dockerCompose.envFile>
|
||||||
<dockerCompose.file>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</dockerCompose.file>
|
<dockerCompose.file>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark244.yml</dockerCompose.file>
|
||||||
<skipITs>false</skipITs>
|
<skipITs>false</skipITs>
|
||||||
<docker.compose.skip>${skipITs}</docker.compose.skip>
|
<docker.compose.skip>${skipITs}</docker.compose.skip>
|
||||||
<checkstyle.skip>true</checkstyle.skip>
|
<checkstyle.skip>true</checkstyle.skip>
|
||||||
@@ -201,7 +201,7 @@
|
|||||||
<configuration>
|
<configuration>
|
||||||
<skip>${docker.compose.skip}</skip>
|
<skip>${docker.compose.skip}</skip>
|
||||||
<host>unix:///var/run/docker.sock</host>
|
<host>unix:///var/run/docker.sock</host>
|
||||||
<composeFile>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</composeFile>
|
<composeFile>${dockerCompose.file}</composeFile>
|
||||||
<detachedMode>true</detachedMode>
|
<detachedMode>true</detachedMode>
|
||||||
<envFile>${dockerCompose.envFile}</envFile>
|
<envFile>${dockerCompose.envFile}</envFile>
|
||||||
</configuration>
|
</configuration>
|
||||||
@@ -214,7 +214,7 @@
|
|||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<skip>${docker.compose.skip}</skip>
|
<skip>${docker.compose.skip}</skip>
|
||||||
<composeFile>${project.basedir}/../docker/compose/docker-compose_hadoop284_hive233_spark231.yml</composeFile>
|
<composeFile>${dockerCompose.file}</composeFile>
|
||||||
<removeVolumes>true</removeVolumes>
|
<removeVolumes>true</removeVolumes>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
|
|||||||
Reference in New Issue
Block a user