-
Notifications
You must be signed in to change notification settings - Fork 9
293 lines (258 loc) · 9.76 KB
/
sonar_single_account_cli.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
name: 'Sonar Single Account CLI'
concurrency:
group: single_account
on:
workflow_dispatch:
inputs:
branch:
required: true
type: string
delay_destroy:
description: 'Delay the destroy step and subsequent steps to allow investigation'
type: boolean
default: false
required: true
workflow_call:
inputs:
branch:
required: true
type: string
secrets:
AWS_ACCESS_KEY_ID_STAGE:
required: true
AWS_SECRET_ACCESS_KEY_STAGE:
required: true
SLACK_WEBHOOK_URL:
required: true
JUMP_SERVER_KEY:
required: true
env:
TF_CLI_ARGS: "-no-color"
TF_INPUT: 0
TF_VAR_gw_count: 1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_STAGE }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_STAGE }}
JUMP_SERVER_KEY: ${{ secrets.JUMP_SERVER_KEY }}
AWS_REGION: ap-southeast-1
TF_WORKSPACE: single_account
DESTROY_DELAY_SECONDS: 1800
permissions:
contents: read
jobs:
terraform:
strategy:
max-parallel: 1
matrix:
include:
- name: single account
example_dir: examples/aws/installation/sonar_single_account_deployment
target_dir: single_account
name: '${{ matrix.name }} ${{ inputs.branch }}'
runs-on: ubuntu-latest
env:
EXAMPLE_DIR: ./${{ matrix.example_dir }}
REMOTE_EXAMPLE_DIR: ./${{ matrix.target_dir }}/dsfkit/${{ matrix.example_dir }}
TARGET_DIR: ${{ matrix.target_dir }}
environment: test
# Use the Bash shell regardless whether the GitHub Actions runner is ubuntu-latest, macos-latest, or windows-latest
defaults:
run:
shell: bash
steps:
# Checkout the repository to the GitHub Actions runner
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ inputs.branch }}
- name: Setup jq
uses: sergeysova/jq-action@v2
- name: Get The Public IP
run: echo curr_ip=$(curl -s https://ipinfo.io/ip) >> $GITHUB_ENV
- name: Set IP in AWS Security Group
run: |
aws_sg=$(aws ec2 authorize-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --protocol tcp --port 22 --cidr $curr_ip/32)
echo sg_id=$(echo $aws_sg | jq '.SecurityGroupRules[0].SecurityGroupRuleId') >> $GITHUB_ENV
- name: Change the modules source to local
run: |
find ./examples/ -type f -exec sed -i -f sed.expr {} \;
- name: Cleaning environment
continue-on-error: true
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: |
terraform -chdir=$REMOTE_EXAMPLE_DIR destroy -auto-approve
- name: Delete Old Environment
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: TARGET_DIR
script: |
cd $TARGET_DIR
rm -rf dsfkit
rm -rf dsfkit.zip
- name: Create terraform backend file
run: |
cat << EOF > $EXAMPLE_DIR/backend.tf
terraform {
backend "s3" {
bucket = "terraform-state-bucket-dsfkit-github-tests"
key = "states/terraform.tfstate"
dynamodb_table = "terraform-state-lock"
region = "us-east-1"
}
}
EOF
- name: Create tfvars File
run: |
cat << EOF > $EXAMPLE_DIR/terraform.tfvars
${{ vars.TFVAR_PARAMETERS_SINGLE_ACCOUNT_AUTOMATION_V1 }}
EOF
- name: View The Vars
run: cat $EXAMPLE_DIR/terraform.tfvars
- name: Create a ZIP File
run: zip -r dsfkit.zip ../dsfkit
- name: View The ZIP File
run: |
ls -l
pwd
- name: SCP the ZIP File
uses: appleboy/scp-action@master
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
source: "dsfkit.zip"
timeout: "10m"
target: ${{ matrix.target_dir }}
overwrite: true
- name: Unzip
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: TARGET_DIR
script: |
cd $TARGET_DIR
unzip -uq dsfkit.zip
# Initialize a new or existing Terraform working directory by creating initial files, loading any remote state, downloading modules, etc.
- name: Terraform Init
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: terraform -chdir=$REMOTE_EXAMPLE_DIR init
- name: Terraform Validate
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: terraform -chdir=$REMOTE_EXAMPLE_DIR validate
# Generates an execution plan for Terraform
- name: Terraform Plan
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: |
printenv
terraform -chdir=$REMOTE_EXAMPLE_DIR plan
- name: Terraform Apply
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: terraform -chdir=$REMOTE_EXAMPLE_DIR apply -auto-approve
- name: Terraform Output
if: always()
uses: appleboy/[email protected]
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: terraform -chdir=$REMOTE_EXAMPLE_DIR output -json
- name: Collect Artifacts
id: collect-artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: collected-keys
path: |
${{ env.EXAMPLE_DIR }}/ssh_keys
- name: Check how was the workflow run
if: ${{ failure() }}
id: check-trigger
run: |
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "run-by=Automation" >> $GITHUB_OUTPUT
else
echo "run-by=${{ github.actor }}" >> $GITHUB_OUTPUT
fi
# This step allows time for investigation of the failed resources before destroying them
- name: Conditional Delay
if: ${{ failure() }}
run: |
echo "delay_destroy: ${{ inputs.delay_destroy }}"
if [ "${{ inputs.delay_destroy }}" == "true" ]; then
echo "Terraform workspace: $TF_WORKSPACE"
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*${{ github.workflow }} ${{ env.TF_WORKSPACE }} automation Failed*\n You have ${{ env.DESTROY_DELAY_SECONDS }} seconds to investigate the environment before it is destroyed :alarm_clock:\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
echo ""
echo "Sleeping for $((DESTROY_DELAY_SECONDS / 60)) minutes before destroying the environment"
sleep $DESTROY_DELAY_SECONDS
fi
- name: Terraform Destroy
# if: always()
uses: appleboy/[email protected]
id: test_audit
with:
host: 54.179.25.83
username: ec2-user
key: ${{ env.JUMP_SERVER_KEY }}
port: 22
command_timeout: "2h"
envs: REMOTE_EXAMPLE_DIR,TF_WORKSPACE
script: terraform -chdir=$REMOTE_EXAMPLE_DIR destroy -auto-approve
- name: Delete Security Group
if: always()
run: aws ec2 revoke-security-group-ingress --group-id ${{ vars.JUMP_SERVER_SG_ID }} --security-group-rule-ids ${{ env.sg_id }}
# Send job failure to Slack
- name: Send Slack When Failure
run: |
if [ ${{ inputs.branch }} == 'master' ]; then
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*Prod ${{ matrix.name }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#dsfkit-prod"}' ${{ secrets.SLACK_WEBHOOK_URL }}
else
curl -X POST -H 'Content-type: application/json' --data '{"text":":exclamation: :exclamation: :exclamation:\n*Dev nightly ${{ matrix.name }} automation Failed*\n<https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}|Please check the job!>\nRun by: ${{ steps.check-trigger.outputs.run-by }}", "channel": "#edsf_automation"}' ${{ secrets.SLACK_WEBHOOK_URL }}
fi
if: ${{ failure() }}