version: "2.1" services: namenode: image: bde2020/hadoop-namenode:1.1.0-hadoop2.8-java8 container_name: namenode volumes: - ./data/namenode:/hadoop/dfs/name environment: - CLUSTER_NAME=test - CORE_CONF_fs_defaultFS=hdfs://namenode:8020 healthcheck: interval: 5s retries: 100 networks: - spark-net datanode: image: bde2020/hadoop-datanode:1.1.0-hadoop2.8-java8 container_name: datanode volumes: - ./data/datanode:/hadoop/dfs/data environment: - CORE_CONF_fs_defaultFS=hdfs://namenode:8020 depends_on: namenode: condition: service_healthy healthcheck: interval: 5s retries: 100 networks: - spark-net spark-master: image: bde2020/spark-master:2.1.0-hadoop2.8-hive-java8 container_name: spark-master ports: - "8080:8080" - "7077:7077" environment: - CORE_CONF_fs_defaultFS=hdfs://namenode:8020 depends_on: namenode: condition: service_healthy datanode: condition: service_healthy healthcheck: interval: 5s retries: 100 networks: - spark-net spark-worker: image: bde2020/spark-worker:2.1.0-hadoop2.8-hive-java8 environment: - "SPARK_MASTER=spark://spark-master:7077" environment: - CORE_CONF_fs_defaultFS=hdfs://namenode:8020 depends_on: spark-master: condition: service_healthy healthcheck: interval: 5s retries: 100 networks: - spark-net networks: spark-net: external: name: spark-net