Skip to content

Commit

Permalink
Merge pull request #15798 from CDCgov/deployment/2024-09-05
Browse files Browse the repository at this point in the history
Deployment of 2024-09-05
  • Loading branch information
victor-chaparro authored Sep 5, 2024
2 parents 12ab4a2 + 851a1da commit c09ae52
Show file tree
Hide file tree
Showing 30 changed files with 1,064 additions and 66 deletions.
9 changes: 4 additions & 5 deletions .environment/frontend/run-frontend.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,12 @@ function modified_check() {
cd "$DIR/../../frontend-react";
FRONTEND_DIR=$(pwd);

if [ ! -f "$HOOK_FILE" ]; then
echo "$FRONTEND_DIR/$HOOK_FILE does not exist. Please make sure you run yarn first."
exit 1
fi

modified_check
if [[ ${isModified} == 1 ]]; then
if [ ! -f "$HOOK_FILE" ]; then
echo "$FRONTEND_DIR/$HOOK_FILE does not exist. Please make sure you run yarn first."
exit 1
fi
"./$HOOK_FILE"
fi
RC=$?
Expand Down
1 change: 1 addition & 0 deletions .environment/gitleaks/gitleaks-config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ title = "PRIME ReportStream Gitleaks Configuration"
'ApiKeyCredential\(\"flexion\"',
'authType: \"two-legged\"',
'Authorization-Type: \"username/password\"',
'cdctiautomated_sa'
]
paths = [
'.terraform/modules/',
Expand Down
14 changes: 12 additions & 2 deletions .github/actions/az-cost/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ runs:
- name: Install azure-cost-cli
shell: bash
run: |
dotnet new tool-manifest
dotnet new tool-manifest --force
dotnet tool install azure-cost-cli
- name: Run azure-cost-cli for daily totals
Expand All @@ -38,6 +38,16 @@ runs:
dotnet azure-cost accumulatedCost -g ${{ inputs.rg }} -t custom --from $(date --date='${{ inputs.days_ago }} days ago' '+%m/%d/%Y') --to $(date --date='1 days ago' '+%m/%d/%Y') -o csv >> $GITHUB_OUTPUT
echo "$EOF" >> $GITHUB_OUTPUT
- name: Run azure-cost-cli for daily totals by resource
if: inputs.total_format == 'dailyByResource'
id: run_azure_cost_daily_by_resource
shell: bash
run: |
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
echo "RESULT<<$EOF" >> $GITHUB_OUTPUT
dotnet azure-cost dailyCosts -g ${{ inputs.rg }} -t custom --from $(date --date='${{ inputs.days_ago }} days ago' '+%m/%d/%Y') --to $(date --date='1 days ago' '+%m/%d/%Y') -o csv --exclude-meter-details --dimension ConsumedService >> $GITHUB_OUTPUT
echo "$EOF" >> $GITHUB_OUTPUT
- name: Run azure-cost-cli for total
if: inputs.total_format == 'summary'
id: run_azure_cost_summary
Expand All @@ -52,5 +62,5 @@ runs:
run: |
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
echo "RESULT<<$EOF" >> $GITHUB_OUTPUT
echo -e "${{ steps.run_azure_cost_daily.outputs.RESULT }}${{ steps.run_azure_cost_summary.outputs.RESULT }}" >> $GITHUB_OUTPUT
echo -e "${{ steps.run_azure_cost_daily.outputs.RESULT }}${{ steps.run_azure_cost_daily_by_resource.outputs.RESULT }}${{ steps.run_azure_cost_summary.outputs.RESULT }}" >> $GITHUB_OUTPUT
echo "$EOF" >> $GITHUB_OUTPUT
6 changes: 3 additions & 3 deletions .github/actions/build-backend/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ runs:
shell: bash

- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action/linux@567cc7f8dcea3eba5da355f6ebc95663310d8a07
uses: EnricoMi/publish-unit-test-result-action/linux@82082dac68ad6a19d980f8ce817e108b9f496c2a
if: >
always() &&
github.event.sender.login != 'dependabot[bot]' &&
Expand Down Expand Up @@ -151,7 +151,7 @@ runs:
shell: bash

- name: Publish Integration Test Results
uses: EnricoMi/publish-unit-test-result-action/linux@567cc7f8dcea3eba5da355f6ebc95663310d8a07
uses: EnricoMi/publish-unit-test-result-action/linux@82082dac68ad6a19d980f8ce817e108b9f496c2a
if: >
always() &&
github.event.sender.login != 'dependabot[bot]' &&
Expand Down Expand Up @@ -194,7 +194,7 @@ runs:

- name: Upload Artifact
if: inputs.upload-build == 'true'
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: prime-router-build-${{ inputs.version }}
path: prime-router/prime-router-build.tar.gz
Expand Down
4 changes: 2 additions & 2 deletions .github/actions/build-frontend/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ runs:
shell: bash

- name: Store E2E Results
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
if: always() && ${{ !startsWith(inputs.env-name, 'trialfrontend') }}
with:
name: e2e-data
Expand Down Expand Up @@ -182,7 +182,7 @@ runs:
run: tar -czf static-website-react.tar.gz build

- name: Upload frontend artifact
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: static-website-react-${{ inputs.version }}
path: frontend-react/static-website-react.tar.gz
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/build-submissions/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ runs:
distribution: "temurin"
cache: "gradle"

- uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582
- uses: gradle/actions/setup-gradle@16bf8bc8fe830fa669c3c9f914d3eb147c629707

- name: Lint
if: inputs.run-integration-tests == 'true'
Expand Down
4 changes: 4 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,10 @@ updates:
directory: "/.github/actions/az-cost"
schedule:
interval: "daily"
- package-ecosystem: "github-actions"
directory: "/.github/actions/build-submissions"
schedule:
interval: "daily"

# Frontend
- package-ecosystem: "npm"
Expand Down
60 changes: 48 additions & 12 deletions .github/workflows/export_cost_data.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,57 @@ on:
schedule:
# The workflow runs every day at 3PM
- cron: "0 19 * * *" #UTC-5

jobs:
export_cost_data:
name: Export Azure Cost Data to Storage
strategy:
fail-fast: false
matrix:
include:
- rg: "prime-data-hub-staging"
env: "staging"
pwd_key: "POSTGRESQL_STAGING_PWD"
db: "pdhstaging-pgsql"
- rg: "prime-data-hub-prod"
env: "prod"
pwd_key: "POSTGRESQL_PROD_PWD"
db: "pdhprod-pgsql"
runs-on: ubuntu-latest
steps:
- name: "Check out changes"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- name: Connect to VPN and login to Azure
uses: ./.github/actions/vpn-azure
with:
env-name: prod
env-name: ${{ matrix.env }}
sp-creds: ${{ secrets.SERVICE_PRINCIPAL_CREDS }}
tf-auth: false
- name: Run Az Cost CLI
id: az-cost
uses: ./.github/actions/az-cost
with:
rg: prime-data-hub-prod
rg: ${{ matrix.rg }}
days_ago: 180
total_format: daily
- name: Get Az Cost Result
run: |
echo "${{ steps.az-cost.outputs.result }}" > costs_raw.csv
touch costs.csv
sed 's/\r//' costs_raw.csv > costs.csv
cat ./costs.csv
sed 's/\r//' costs_raw.csv > costs_total.csv
cat ./costs_total.csv
- name: Run Az Cost CLI By Resource
id: az-cost-by-resource
uses: ./.github/actions/az-cost
with:
rg: ${{ matrix.rg }}
days_ago: 180
total_format: dailyByResource
- name: Get Az Cost Result By Resource
run: |
echo "${{ steps.az-cost-by-resource.outputs.result }}" > costs_by_res_raw.csv
touch costs_by_res.csv
sed 's/\r//' costs_by_res_raw.csv > costs_by_res.csv
cat ./costs_by_res.csv
- name: Get runner ip
id: runner_ip
uses: ./.github/actions/runner-ip
Expand All @@ -39,15 +63,15 @@ jobs:
id: add-db-access
shell: bash
run: |
az postgres server update -g prime-data-hub-prod -n pdhprod-pgsql --public-network-access "Enabled"
az postgres server update -g ${{ matrix.rg }} -n ${{ matrix.db }} --public-network-access "Enabled"
sleep 10;
az postgres server firewall-rule create -g prime-data-hub-prod -s pdhprod-pgsql -n github_actions_runner \
az postgres server firewall-rule create -g ${{ matrix.rg }} -s ${{ matrix.db }} -n github_actions_runner \
--start-ip-address ${{ steps.runner_ip.outputs.ip-address }} --end-ip-address ${{ steps.runner_ip.outputs.ip-address }}
- name: Store cost data in DB
uses: ./.github/actions/db-query
with:
pass: ${{ secrets['POSTGRESQL_PROD_PWD'] }}
host: pdhprod-pgsql
pass: ${{ secrets[matrix.pwd_key] }}
host: ${{ matrix.db }}
port: 5432
user: prime
database: prime_data_hub
Expand All @@ -60,12 +84,24 @@ jobs:
Currency varchar(3)
);
TRUNCATE Azure_Costs;
\copy Azure_Costs (Date, Cost, CostUsd, Currency) FROM './costs.csv' WITH DELIMITER ',' CSV HEADER;
\copy Azure_Costs (Date, Cost, CostUsd, Currency) FROM './costs_total.csv' WITH DELIMITER ',' CSV HEADER;
CREATE TABLE IF NOT EXISTS Azure_Resource_Costs (
Date date,
Name varchar(128),
Cost NUMERIC(13, 8),
CostUsd NUMERIC(13, 8),
Currency varchar(3),
Tags varchar(256)
);
TRUNCATE Azure_Resource_Costs;
\copy Azure_Resource_Costs (Date, Name, Cost, CostUsd, Currency, Tags) FROM './costs_by_res.csv' WITH DELIMITER ',' CSV HEADER;
# Remove runner db access
- name: Remove DB access
if: always() && steps.add-db-access.outcome == 'success'
shell: bash
run: |
az postgres server firewall-rule delete -g prime-data-hub-prod -s pdhprod-pgsql -n github_actions_runner --yes
az postgres server firewall-rule delete -g ${{ matrix.rg }} -s ${{ matrix.db }} -n github_actions_runner --yes
sleep 10;
az postgres server update -g prime-data-hub-prod -n pdhprod-pgsql --public-network-access "Disabled"
az postgres server update -g ${{ matrix.rg }} -n ${{ matrix.db }} --public-network-access "Disabled"
6 changes: 3 additions & 3 deletions .github/workflows/frontend_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ jobs:
run: yarn run test:e2e --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
- name: Upload blob report to GitHub Actions Artifacts
if: ${{ !cancelled() }}
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: frontend-e2e-data--shard-${{ matrix.shardIndex }}
path: frontend-react/e2e-data
Expand Down Expand Up @@ -158,7 +158,7 @@ jobs:
cache-dependency-path: frontend-react/yarn.lock

- name: Download blob reports from GitHub Actions Artifacts
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
path: frontend-react/e2e-data
pattern: frontend-e2e-data--shard-*
Expand All @@ -170,7 +170,7 @@ jobs:
- name: Merge folders
run: mv ./playwright-report/* ./e2e-data/report && rmdir ./playwright-report && rm ./e2e-data/report/*.zip && rm -rf ./e2e-data/report/resources
- name: Upload final e2e-data
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: frontend-e2e-data--attempt-${{ github.run_attempt }}
path: frontend-react/e2e-data
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release_to_github.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,13 @@ jobs:
fetch-depth: 0
- name: Download router artifact
if: ${{ needs.wf_vars.outputs.enable_builds == 'true' }}
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
name: prime-router-build-${{ needs.wf_vars.outputs.version }}
path: archives/
- name: Download frontend artifact
if: ${{ needs.wf_vars.outputs.enable_builds == 'true' }}
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
name: static-website-react-${{ needs.wf_vars.outputs.version }}
path: archives/
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/restore_databases.yml
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ jobs:
if: |
needs.pre_job.outputs.sink_env_name != 'prod' &&
(${{ matrix.db }} == 'prime_data_hub' || ${{ matrix.db }} == 'prime_data_hub_candidate')
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: clean_output_${{ matrix.db }}
path: clean_output_${{ matrix.db }}.html
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/sonarcloud.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:

- name: "Get changed files with yaml"
id: changed-files-yaml
uses: tj-actions/changed-files@40853de9f8ce2d6cfdc73c1b96f14e22ba44aec4
uses: tj-actions/changed-files@e9772d140489982e0e3704fea5ee93d536f1e275
with:
files_yaml: |
frontend:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/start_frontend_smoke.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
echo "::endgroup::"
- name: "Store E2E Results"
if: ${{ !cancelled() }}
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
with:
name: smoke-data
path: frontend-react/e2e-data/
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/validate_terraform.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332

- name: Run Checkov action
uses: bridgecrewio/checkov-action@1b813e8f72afe2b6263a6ea10c873707e21ebe44
uses: bridgecrewio/checkov-action@c9421864e014ef6b8acfa35d0bf3c7e52c13ab10
with:
directory: operations/app/terraform
skip_check: CKV_AZURE_139,CKV_AZURE_137,CKV_AZURE_103,CKV_AZURE_104,CKV_AZURE_102,CKV_AZURE_130,CKV_AZURE_121,CKV_AZURE_67,CKV_AZURE_56,CKV_AZURE_17,CKV_AZURE_63,CKV_AZURE_18,CKV_AZURE_88,CKV_AZURE_65,CKV_AZURE_13,CKV_AZURE_66,CKV_AZURE_33,CKV_AZURE_35,CKV_AZURE_36,CKV_AZURE_98,CKV2_AZURE_1,CKV2_AZURE_15,CKV2_AZURE_21,CKV_AZURE_213,CKV_AZURE_59,CKV2_AZURE_33,CKV2_AZURE_32,CKV2_AZURE_28,CKV_AZURE_206,CKV_AZURE_42,CKV_AZURE_110,CKV_AZURE_109,CKV_AZURE_166,CKV2_AZURE_38,CKV2_AZURE_40,CKV2_AZURE_41,CKV_AZURE_235
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ test.describe(
await dailyDataPage.page.locator(".usa-table tbody").waitFor({ state: "visible" });
});

test("downloads the file", async ({ dailyDataPage }) => {
test.skip("downloads the file", async ({ dailyDataPage }) => {
await setDate(dailyDataPage.page, "#start-date", 14);
await setDate(dailyDataPage.page, "#end-date", 0);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@ import {
import { tableColumnDateTimeInRange, tableDataCellValue, TEST_ORG_IGNORE } from "../../../helpers/utils";
import { endDate, setDate, startDate } from "../../../pages/authenticated/daily-data";
import * as submissionHistory from "../../../pages/authenticated/submission-history";
import { openReportIdDetailPage, SubmissionHistoryPage } from "../../../pages/authenticated/submission-history";
import {
openReportIdDetailPage,
SubmissionHistoryPage,
URL_SUBMISSION_HISTORY,
} from "../../../pages/authenticated/submission-history";
import { test as baseTest } from "../../../test";

export interface SubmissionHistoryPageFixtures {
Expand Down Expand Up @@ -53,10 +57,6 @@ test.describe(
test.describe("admin user", () => {
test.use({ storageState: "e2e/.auth/admin.json" });

test.beforeAll(({ browserName }) => {
test.skip(browserName !== "chromium");
});

test.describe(`${TEST_ORG_IGNORE} org`, () => {
test("nav contains the 'Submission History' option", async ({ submissionHistoryPage }) => {
const navItems = submissionHistoryPage.page.locator(".usa-nav li");
Expand Down Expand Up @@ -119,22 +119,35 @@ test.describe(
});

test.describe("on 'Filter'", () => {
test("with 'From' date, 'To' date", async ({ submissionHistoryPage }) => {
const fromDate = await setDate(submissionHistoryPage.page, "#start-date", 180);
/**
* TODO: Fix. From/To fields appear to reset (and table data is unchanged)
* after clicking filter
*/
// eslint-disable-next-line playwright/no-skipped-test
test.skip("with 'From' date, 'To' date", async ({ submissionHistoryPage }) => {
const fromDate = await setDate(submissionHistoryPage.page, "#start-date", 7);
const toDate = await setDate(submissionHistoryPage.page, "#end-date", 0);

// Apply button is enabled
await submissionHistoryPage.filterButton.click();
await submissionHistoryPage.page.locator(".usa-table tbody").waitFor({ state: "visible" });

// Check that table data contains the dates/times that were selected
const areDatesInRange = await tableColumnDateTimeInRange(
submissionHistoryPage.page,
1,
fromDate,
toDate,
const responsePromise = await submissionHistoryPage.page.waitForResponse(
(res) => res.status() === 200 && res.url().includes(URL_SUBMISSION_HISTORY),
);
expect(areDatesInRange).toBe(true);

if (responsePromise) {
// Check that table data contains the dates/times that were selected
const areDatesInRange = await tableColumnDateTimeInRange(
submissionHistoryPage.page,
1,
fromDate,
toDate,
);

// eslint-disable-next-line playwright/no-conditional-expect
expect(areDatesInRange).toBe(true);
} else {
console.error("Request not received within the timeout period");
}
});

test("on 'clear' resets the dates", async ({ submissionHistoryPage }) => {
Expand Down
Loading

0 comments on commit c09ae52

Please sign in to comment.