1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
|
series: xenial
description: >
This is a five unit big data cluster that includes Hadoop 2.7.3 and Spark 2.1
from Apache Bigtop. Use it to analyse batch data with MapReduce or streaming
data with Spark. It will run on 5 machines in your cloud.
services:
namenode:
charm: "/home/ubuntu/charms/xenial/hadoop-namenode"
constraints: "mem=7G root-disk=32G"
num_units: 1
annotations:
gui-x: "500"
gui-y: "800"
to:
- "0"
resourcemanager:
charm: "/home/ubuntu/charms/xenial/hadoop-resourcemanager"
constraints: "mem=7G root-disk=32G"
num_units: 1
annotations:
gui-x: "500"
gui-y: "0"
to:
- "0"
slave:
charm: "/home/ubuntu/charms/xenial/hadoop-slave"
constraints: "mem=7G root-disk=32G"
num_units: 3
annotations:
gui-x: "0"
gui-y: "400"
to:
- "1"
- "2"
- "3"
plugin:
charm: "/home/ubuntu/charms/xenial/hadoop-plugin"
annotations:
gui-x: "1000"
gui-y: "400"
client:
charm: "cs:xenial/hadoop-client-8"
constraints: "mem=7G root-disk=32G"
num_units: 1
annotations:
gui-x: "1250"
gui-y: "400"
to:
- "4"
spark:
charm: "/home/ubuntu/charms/xenial/spark"
constraints: "mem=7G root-disk=32G"
num_units: 1
options:
spark_execution_mode: "yarn-client"
annotations:
gui-x: "1000"
gui-y: "0"
to:
- "4"
ganglia:
charm: "cs:xenial/ganglia-12"
num_units: 1
annotations:
gui-x: "0"
gui-y: "800"
to:
- "4"
ganglia-node:
charm: "cs:xenial/ganglia-node-7"
annotations:
gui-x: "250"
gui-y: "400"
rsyslog:
charm: "cs:~bigdata-dev/xenial/rsyslog-7"
num_units: 1
annotations:
gui-x: "1000"
gui-y: "800"
to:
- "4"
rsyslog-forwarder-ha:
charm: "cs:~bigdata-dev/xenial/rsyslog-forwarder-ha-7"
annotations:
gui-x: "750"
gui-y: "400"
relations:
- [resourcemanager, namenode]
- [namenode, slave]
- [resourcemanager, slave]
- [plugin, namenode]
- [plugin, resourcemanager]
- [spark, plugin]
- [client, plugin]
- ["ganglia-node:juju-info", "namenode:juju-info"]
- ["ganglia-node:juju-info", "resourcemanager:juju-info"]
- ["ganglia-node:juju-info", "slave:juju-info"]
- ["ganglia-node:juju-info", "spark:juju-info"]
- ["ganglia:node", "ganglia-node:node"]
- ["rsyslog-forwarder-ha:juju-info", "namenode:juju-info"]
- ["rsyslog-forwarder-ha:juju-info", "resourcemanager:juju-info"]
- ["rsyslog-forwarder-ha:juju-info", "slave:juju-info"]
- ["rsyslog-forwarder-ha:juju-info", "spark:juju-info"]
- ["rsyslog:aggregator", "rsyslog-forwarder-ha:syslog"]
machines:
"0":
series: "xenial"
constraints: "mem=7G root-disk=32G"
"1":
series: "xenial"
constraints: "mem=7G root-disk=32G"
"2":
series: "xenial"
constraints: "mem=7G root-disk=32G"
"3":
series: "xenial"
constraints: "mem=7G root-disk=32G"
"4":
series: "xenial"
constraints: "mem=7G root-disk=32G"
|