Skip to content

Commit

Permalink
add github action to build all images
Browse files Browse the repository at this point in the history
  • Loading branch information
thanh-nguyen-dang committed May 30, 2024
1 parent 3f6eb37 commit b073506
Show file tree
Hide file tree
Showing 5 changed files with 98 additions and 48 deletions.
99 changes: 95 additions & 4 deletions .github/workflows/image_build_push.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,104 @@ name: Build Image and Push to Quay
on: push

jobs:
ci:
name: Build Hadoop base image
build-namenode:
name: Build namenode image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: hadoop-base
OVERRIDE_REPO_NAME: namenode
OVERRIDE_TAG_NAME: 3.3.0
DOCKERFILE_LOCATION: "./hadoop/base/Dockerfile"
DOCKERFILE_LOCATION: "./hadoop/namenode/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-datanode:
name: Build datanode image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: namenode
OVERRIDE_TAG_NAME: 3.3.0
DOCKERFILE_LOCATION: "./hadoop/datanode/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-nodemanager:
name: Build nodemanager image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: nodemanager
OVERRIDE_TAG_NAME: 3.3.0
DOCKERFILE_LOCATION: "./hadoop/nodemanager/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-resourcemanager:
name: Build resourcemanager image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: resourcemanager
OVERRIDE_TAG_NAME: 3.3.0
DOCKERFILE_LOCATION: "./hadoop/resourcemanager/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-historyserver:
name: Build historyserver image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: historyserver
OVERRIDE_TAG_NAME: 3.3.0-hadoop3.3
DOCKERFILE_LOCATION: "./hadoop/historyserver/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-master:
name: Build Spark master image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: spark-master
OVERRIDE_TAG_NAME: 3.3.0-hadoop3.3
DOCKERFILE_LOCATION: "./spark/master/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-worker:
name: Build Spark worker image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: spark-worker
OVERRIDE_TAG_NAME: 3.3.0-hadoop3.3
DOCKERFILE_LOCATION: "./spark/worker/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
build-submit:
name: Build Spark submit image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
OVERRIDE_REPO_NAME: spark-submit
OVERRIDE_TAG_NAME: 3.3.0-hadoop3.3
DOCKERFILE_LOCATION: "./spark/submit/Dockerfile"
USE_QUAY_ONLY: true
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
Expand Down
41 changes: 0 additions & 41 deletions hadoop/base/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -50,47 +50,6 @@ RUN apt-get --only-upgrade install libpq-dev

ENV PATH="${PATH}:${SPARK_HOME}/bin:${SPARK_HOME}/sbin:${HADOOP_HOME}/sbin:${HADOOP_HOME}/bin:${JAVA_HOME}/bin:${SCALA_HOME}/bin"

ENV CORE_CONF_fs_defaultFS=hdfs://namenode:9000 \
CORE_CONF_hadoop_http_staticuser_user=root \
CORE_CONF_hadoop_proxyuser_hue_hosts=* \
CORE_CONF_hadoop_proxyuser_hue_groups=* \
CORE_CONF_io_compression_codecs=org.apache.hadoop.io.compress.SnappyCodec \
HDFS_CONF_dfs_webhdfs_enabled=true \
HDFS_CONF_dfs_permissions_enabled=false \
HDFS_CONF_dfs_namenode_datanode_registration_ip___hostname___check=false \
YARN_CONF_yarn_log___aggregation___enable=true \
YARN_CONF_yarn_log_server_url=http://historyserver:8188/applicationhistory/logs/ \
YARN_CONF_yarn_resourcemanager_recovery_enabled=true \
YARN_CONF_yarn_resourcemanager_store_class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore \
YARN_CONF_yarn_resourcemanager_scheduler_class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler \
YARN_CONF_yarn_scheduler_capacity_root_default_maximum___allocation___mb=8192 \
YARN_CONF_yarn_scheduler_capacity_root_default_maximum___allocation___vcores=4 \
YARN_CONF_yarn_resourcemanager_fs_state___store_uri=/rmstate \
YARN_CONF_yarn_resourcemanager_system___metrics___publisher_enabled=true \
YARN_CONF_yarn_resourcemanager_hostname=resourcemanager \
YARN_CONF_yarn_resourcemanager_address=resourcemanager:8032 \
YARN_CONF_yarn_resourcemanager_scheduler_address=resourcemanager:8030 \
YARN_CONF_yarn_resourcemanager_resource__tracker_address=resourcemanager:8031 \
YARN_CONF_yarn_timeline___service_enabled=true \
YARN_CONF_yarn_timeline___service_generic___application___history_enabled=true \
YARN_CONF_yarn_timeline___service_hostname=historyserver \
YARN_CONF_mapreduce_map_output_compress=true \
YARN_CONF_mapred_map_output_compress_codec=org.apache.hadoop.io.compress.SnappyCodec \
YARN_CONF_yarn_nodemanager_resource_memory___mb=16384 \
YARN_CONF_yarn_nodemanager_resource_cpu___vcores=8 \
YARN_CONF_yarn_nodemanager_disk___health___checker_max___disk___utilization___per___disk___percentage=98.5 \
YARN_CONF_yarn_nodemanager_remote___app___log___dir=/app-logs \
YARN_CONF_yarn_nodemanager_aux___services=mapreduce_shuffle \
MAPRED_CONF_mapreduce_framework_name=yarn \
MAPRED_CONF_mapred_child_java_opts=-Xmx4096m \
MAPRED_CONF_mapreduce_map_memory_mb=4096 \
MAPRED_CONF_mapreduce_reduce_memory_mb=8192 \
MAPRED_CONF_mapreduce_map_java_opts=-Xmx3072m \
MAPRED_CONF_mapreduce_reduce_java_opts=-Xmx6144m \
MAPRED_CONF_yarn_app_mapreduce_am_env=HADOOP_MAPRED_HOME=$HADOOP_HOME/ \
MAPRED_CONF_mapreduce_map_env=HADOOP_MAPRED_HOME=$HADOOP_HOME/ \
MAPRED_CONF_mapreduce_reduce_env=HADOOP_MAPRED_HOME=$HADOOP_HOME/

COPY . /gen3spark
WORKDIR /gen3spark

Expand Down
2 changes: 1 addition & 1 deletion spark/master/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ ENV SPARK_MASTER_LOG /spark/logs

EXPOSE 8080 7077 6066

COPY master.sh /gen3spark
COPY master.sh /gen3spark/
WORKDIR /gen3spark

CMD ["/bin/bash", "/master.sh"]
Expand Down
2 changes: 1 addition & 1 deletion spark/submit/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ LABEL maintainer="Gezim Sejdiu <[email protected]>, Giannis Mouchakis <gmouchak
ENV SPARK_MASTER_NAME spark-master
ENV SPARK_MASTER_PORT 7077

COPY submit.sh /gen3spark
COPY submit.sh /gen3spark/
WORKDIR /gen3spark

CMD ["/bin/bash", "/submit.sh"]
2 changes: 1 addition & 1 deletion spark/worker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ ENV SPARK_MASTER "spark://spark-master:7077"

EXPOSE 8081

COPY worker.sh /gen3spark
COPY worker.sh /gen3spark/

CMD ["/bin/bash", "/worker.sh"]

0 comments on commit b073506

Please sign in to comment.