forked from hortonworks/ansible-hortonworks
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build_aws_nodes.yml
82 lines (78 loc) · 3.48 KB
/
build_aws_nodes.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
---
- set_fact: outer_loop="{{ item }}"
- name: Get AMI information
ec2_ami_facts:
region: "{{ cloud_config.region }}"
image_ids: "{{ outer_loop.image }}"
register: current_ami
- name: Fail if the AMI does not provide any information
fail:
msg: "The AMI {{ outer_loop.image }} does not exist in region {{ cloud_config.region }} or it does not provide any information."
when: current_ami['images'] | length == 0
- name: Create {{ outer_loop.host_group }} node(s) (EBS root volume)
ec2:
region: "{{ cloud_config.region }}"
zone: "{{ cloud_config.zone }}"
image: "{{ outer_loop.image }}"
instance_type: "{{ outer_loop.type }}"
spot_price: "{{ (outer_loop.spot|default(false) == true) | ternary(outer_loop.spot_price,omit) }}"
vpc_subnet_id: "{{ cluster_subnet.subnet.id }}"
groups: "{{ outer_loop.security_groups }}"
assign_public_ip: "{{ outer_loop.public_ip }}"
key_name: "{{ cloud_config.ssh.keyname }}"
exact_count: "{{ outer_loop.count }}"
volumes:
- device_name: "{{ current_ami['images'][0]['root_device_name'] }}"
volume_type: "{{ outer_loop.root_volume.type }}"
volume_size: "{{ outer_loop.root_volume.size }}"
delete_on_termination: true
- { device_name: /dev/xvdb, ephemeral: ephemeral0 }
- { device_name: /dev/xvdc, ephemeral: ephemeral1 }
- { device_name: /dev/xvdd, ephemeral: ephemeral2 }
- { device_name: /dev/xvde, ephemeral: ephemeral3 }
- { device_name: /dev/xvdf, ephemeral: ephemeral4 }
- { device_name: /dev/xvdg, ephemeral: ephemeral5 }
- { device_name: /dev/xvdh, ephemeral: ephemeral6 }
- { device_name: /dev/xvdi, ephemeral: ephemeral7 }
- { device_name: /dev/xvdj, ephemeral: ephemeral8 }
- { device_name: /dev/xvdk, ephemeral: ephemeral9 }
- { device_name: /dev/xvdl, ephemeral: ephemeral10 }
- { device_name: /dev/xvdm, ephemeral: ephemeral11 }
- { device_name: /dev/xvdn, ephemeral: ephemeral12 }
- { device_name: /dev/xvdo, ephemeral: ephemeral13 }
- { device_name: /dev/xvdp, ephemeral: ephemeral14 }
- { device_name: /dev/xvdq, ephemeral: ephemeral15 }
- { device_name: /dev/xvdr, ephemeral: ephemeral16 }
- { device_name: /dev/xvds, ephemeral: ephemeral17 }
- { device_name: /dev/xvdt, ephemeral: ephemeral18 }
- { device_name: /dev/xvdu, ephemeral: ephemeral19 }
- { device_name: /dev/xvdv, ephemeral: ephemeral20 }
- { device_name: /dev/xvdw, ephemeral: ephemeral21 }
- { device_name: /dev/xvdx, ephemeral: ephemeral22 }
- { device_name: /dev/xvdy, ephemeral: ephemeral23 }
count_tag:
Group: "{{ cluster_name }}-{{ outer_loop.host_group }}"
instance_tags:
Cluster: "{{ cluster_name }}"
Role: "{{ outer_loop.host_group }}"
Group: "{{ cluster_name }}-{{ outer_loop.host_group }}"
Name: "{{ cluster_name }}-{{ outer_loop.host_group }}"
Owner: "{{ cluster_owner | default(omit) }}"
wait: yes
wait_timeout: 600
spot_wait_timeout: 600
user_data: |
#!/bin/sh
[ -e /bin/yum-config-manager ] && yum-config-manager --enable rhui-REGION-rhel-server-optional
register: current_ec2
when: outer_loop.root_volume.ebs
- name: Wait for SSH to start
wait_for:
host: "{{ (outer_loop.public_ip|default(false)== true) | ternary(local_loop.public_ip,local_loop.private_ip) }}"
port: 22
state: started
delay: 30
timeout: 300
loop_control:
loop_var: local_loop
with_items: '{{ current_ec2.instances }}'