forked from SANSA-Stack/SANSA-Stack
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose.yml
92 lines (88 loc) · 2.2 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
version: '2'
services:
namenode:
image: bde2020/hadoop-namenode:1.0.0
hostname: namenode
container_name: namenode
domainname: hadoop
networks:
- hadoop
volumes:
- ./data/namenode:/hadoop/dfs/name
environment:
- CLUSTER_NAME=test
env_file:
- ./hadoop.env
ports:
- "50070:50070"
- "8020:8020"
datanode1:
image: bde2020/hadoop-datanode:1.0.0
hostname: datanode1
container_name: datanode1
domainname: hadoop
networks:
- hadoop
volumes:
- ./data/datanode1:/hadoop/dfs/data
env_file:
- ./hadoop.env
datanode2:
image: bde2020/hadoop-datanode:1.0.0
hostname: datanode2
container_name: datanode2
domainname: hadoop
networks:
- hadoop
volumes:
- ./data/datanode2:/hadoop/dfs/data
env_file:
- ./hadoop.env
spark-master:
image: bde2020/spark-master:1.6.2-hadoop2.6
hostname: spark-master
container_name: spark-master
domainname: hadoop
networks:
- hadoop
environment:
- CORE_CONF_fs_defaultFS=hdfs://namenode:8020
- YARN_CONF_yarn_resourcemanager_hostname=resourcemanager
- SPARK_CONF_spark_eventLog_enabled=true
- SPARK_CONF_spark_eventLog_dir=hdfs://namenode:8020/spark-logs
- SPARK_CONF_spark_history_fs_logDirectory=hdfs://namenode:8020/spark-logs
env_file:
- ./hadoop.env
ports:
- "8080:8080"
spark-worker:
image: bde2020/spark-worker:1.6.2-hadoop2.6
hostname: spark-worker
container_name: spark-worker
domainname: hadoop
networks:
- hadoop
environment:
- CORE_CONF_fs_defaultFS=hdfs://namenode:8020
- YARN_CONF_yarn_resourcemanager_hostname=resourcemanager
- SPARK_CONF_spark_eventLog_enabled=true
- SPARK_CONF_spark_eventLog_dir=hdfs://namenode:8020/spark-logs
- SPARK_CONF_spark_history_fs_logDirectory=hdfs://namenode:8020/spark-logs
env_file:
- ./hadoop.env
links:
- "spark-master"
hue:
image: bde2020/hdfs-filebrowser:3.9
hostname: hdfsfb
container_name: hdfsfb
domainname: hadoop
networks:
- hadoop
environment:
- NAMENODE_HOST=namenode
ports:
- "8088:8088"
networks:
hadoop:
external: true