-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgluster.yaml
225 lines (183 loc) · 5.26 KB
/
gluster.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
---
#
# INSTANCES
#
# This section fires up a set of guests dynamically,
# registering them in the inventory under a relevant group
#
- hosts:
- localhost
connection: local
gather_facts: False
vars_files:
- iaas/vars/ec2-vars.yaml
- gluster/vars/gluster-vars.yaml
tags:
- vms
- gluster_volume
- gluster_volume_cleanup
- brick
- swift
tasks:
- include: gluster/tasks/gluster-nodes-ec2.yaml
# Dump data on these hosts.
- hosts: gluster_nodes
vars_files:
- iaas/vars/ec2-vars.yaml
- gluster/vars/gluster-vars.yaml
user: '${remote_user}'
sudo: False
gather_facts: True
tasks:
- name: ec2 facts
action: ec2_facts
register: ec2_facts
- name: dump everything
local_action: template src=common/templates/dump.j2 dest=./ansible_data_dump.txt
#----------------------------
#
# Setup gluster
#
#----------------------------
- hosts: gluster_nodes
vars_files:
- iaas/vars/ec2-vars.yaml
- gluster/vars/gluster-vars.yaml
user: '${remote_user}'
sudo: True
gather_facts: True
tasks:
- name: dummy task to force reading of facts
action: debug msg="null"
tags:
- gluster_volume
- gluster_volume_cleanup
- brick
# - include: gluster/tasks/uninstall-gluster.yaml
- include: gluster/tasks/install-gluster.yaml
- hosts: gluster_nodes[0:0]
vars_files:
- iaas/vars/ec2-vars.yaml
- gluster/vars/gluster-vars.yaml
user: '${remote_user}'
sudo: True
gather_facts: True
tasks:
- include: gluster/tasks/clean-gluster.yaml
- include: gluster/tasks/configure-gluster.yaml
#----------------------------
#
# Setup swift
#
#----------------------------
- hosts: gluster_nodes
vars_files:
- iaas/vars/ec2-vars.yaml
- gluster/vars/gluster-vars.yaml
- swift/vars/swift-vars.yaml
user: '${remote_user}'
sudo: True
gather_facts: True
tasks:
- name: ec2 facts
action: ec2_facts
# - name: dump everything
# local_action: template src=common/templates/dump.j2 dest=./ansible_data_dump_ec2.txt
- include: swift/tasks/install-swift.yaml
- include: swift/tasks/setup-swift.yaml
- include: swift/tasks/install-ufo-pilot.yaml
#----------------------------
#
# System-level tweaks on the headnode
#
# Run as root via sudo
#
#----------------------------
- hosts: openstack_heads
# vars_files:
# - vars/global-vars.yaml
user: '${remote_user}'
sudo: True
gather_facts: True
tasks:
- name: wait for system to settle
action: command sleep 5
- name: turn off the firewall so we can talk to the services
action: service name=firewalld state=stopped
- name: disable SELinux
action: selinux policy=targeted state=permissive
#
# Fix for NOVA
#
# https://github.com/openstack-dev/devstack/commit/a534e0bf1a1ec0abc1d1f673af1b70fbf8239350
- name: add vagrant user to policy kit
action: template src=templates/50-libvirt-vagrant.rules dest=/etc/polkit-1/rules.d/50-libvirt-vagrant.rules owner=root group=root mode=0664
#
# Setup /opt/stack for tweaks
#
- name: setup /opt/stack ahead of time
action: file path=/opt/stack state=directory owner=vagrant group=vagrant mode=0775
#----------------------------
#
# DEVSTACK
#
#----------------------------
- hosts: openstack_heads
# vars_files:
# - vars/global-vars.yaml
user: '${remote_user}'
sudo: False
gather_facts: True
# tags:
# - devstack
tasks:
#
# Get Devstack
#
- name: checkout devstack
action: git repo=${devstack_url} dest=/home/vagrant/devstack version=${devstack_branch}
- name: create localrc
action: template src=templates/localrc dest=/home/vagrant/devstack/localrc owner=vagrant group=vagrant mode=0664
- name: create local.sh
action: template src=templates/local.sh dest=/home/vagrant/devstack/local.sh owner=vagrant group=vagrant mode=0775
#
# Setup /opt/stack for tweaks
#
- name: setup /opt/stack ahead of time
action: file path=/opt/stack state=directory owner=vagrant group=vagrant mode=0775
#
# Put NodeJS in place
#
- name: pull nodejs
action: git repo=git://github.com/creationix/nvm.git dest=/opt/stack/nvm
- name: install nodejs
action: shell cd /opt/stack/nvm && source ./nvm.sh && nvm install 0.8.14 && nvm alias default 0.8.14
- name: put node stuff into /usr/local/bin
action: shell sudo ln -sf /opt/stack/nvm/v0.8.14/bin/* /usr/local/bin/
- name: put node stuff into /usr/local/lib
action: shell sudo ln -sf /opt/stack/nvm/v0.8.14/lib/* /usr/local/lib
#
# Fix horizon.conf for apache on F18
#
- name: fix the horizon.conf file
action: template
src=templates/apache-horizon.template
dest=/home/vagrant/devstack/files/apache-horizon.template
owner=vagrant
group=vagrant
mode=0664
#
# Fix cinder to make sure that tgtd can find cindor volume config files
#
# - name: make sure tgtd can find cinder config files
# action: shell sudo echo "include /opt/stack/data/cinder/volumes/*" >> /etc/tgt/tgtd.conf
#
# Run Devstack (3 times!)
#
- name: run devstack
action: shell cd /home/vagrant/devstack && FORCE=yes ./stack.sh | tee devstack.log
ignore_errors: yes
with_items:
- 1
- 2
- 3