diff --git a/.github/actions/check-file-format/action.yaml b/.github/actions/check-file-format/action.yaml new file mode 100644 index 0000000..4590427 --- /dev/null +++ b/.github/actions/check-file-format/action.yaml @@ -0,0 +1,9 @@ +name: Check File Format +runs: + using: "composite" + steps: + - name: Check File Format + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + ./scripts/githooks/check-file-format.sh diff --git a/.github/actions/check-format/action.yml b/.github/actions/check-format/action.yml deleted file mode 100644 index 81d3c71..0000000 --- a/.github/actions/check-format/action.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: "Check Formats" -runs: - using: "composite" - steps: - - name: Check File Format - shell: bash - run: | - export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} - ./scripts/githooks/editorconfig-pre-commit.sh - - - name: Check Markdown Format - shell: bash - run: | - export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} - ./scripts/githooks/markdown-pre-commit.sh diff --git a/.github/actions/check-markdown-format/action.yaml b/.github/actions/check-markdown-format/action.yaml new file mode 100644 index 0000000..42e34eb --- /dev/null +++ b/.github/actions/check-markdown-format/action.yaml @@ -0,0 +1,9 @@ +name: Check Markdown Format +runs: + using: "composite" + steps: + - name: Check Markdown Format + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + ./scripts/githooks/check-markdown-format.sh diff --git a/.github/actions/check-terraform-format/action.yaml b/.github/actions/check-terraform-format/action.yaml new file mode 100644 index 0000000..e664527 --- /dev/null +++ b/.github/actions/check-terraform-format/action.yaml @@ -0,0 +1,9 @@ +name: Check Terraform Format +runs: + using: "composite" + steps: + - name: Check Terraform Format + shell: bash + run: | + export BRANCH_NAME=origin/${{ github.event.repository.default_branch }} + ./scripts/githooks/check-terraform-format.sh diff --git a/.github/actions/cloc-repository/action.yaml b/.github/actions/cloc-repository/action.yaml new file mode 100644 index 0000000..b1c8def --- /dev/null +++ b/.github/actions/cloc-repository/action.yaml @@ -0,0 +1,9 @@ +name: Count lines of code in this repository +runs: + using: "composite" + steps: + - name: Count lines of code in this repository + shell: bash + run: | + export FORMAT=default + ./scripts/cloc-repository.sh diff --git a/.github/actions/get-metadata/action.yaml b/.github/actions/get-metadata/action.yaml new file mode 100644 index 0000000..17db5ae --- /dev/null +++ b/.github/actions/get-metadata/action.yaml @@ -0,0 +1,13 @@ +name: Get Metadata +runs: + using: "composite" + steps: + - name: Get Metadata + id: metadata + shell: bash + run: | + datetime=$(date -u +'%Y-%m-%dT%H:%M:%S%z') + echo "build_datetime=$datetime" >> $GITHUB_OUTPUT + echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT + echo "terraform_version=$(grep terraform .tool-versions | cut -f2 -d' ')" >> $GITHUB_OUTPUT diff --git a/.github/actions/scan-dependencies/action.yaml b/.github/actions/scan-dependencies/action.yaml new file mode 100644 index 0000000..a61aec0 --- /dev/null +++ b/.github/actions/scan-dependencies/action.yaml @@ -0,0 +1,23 @@ +name: Scan Dependencies +runs: + using: "composite" + steps: + - name: Generate SBOM File + shell: bash + run: ./scripts/sbom-generator.sh + + - name: Upload SBOM Artifact + uses: actions/upload-artifact@v3 + with: + name: dependency-scan + path: ./sbom-spdx.json + + - name: Run CVE Scanner + shell: bash + run: ./scripts/cve-scanner.sh + + - name: Upload CVE Artifact + uses: actions/upload-artifact@v3 + with: + name: dependency-scan + path: ./cve-scan.json diff --git a/.github/actions/scan-secrets/action.yaml b/.github/actions/scan-secrets/action.yaml new file mode 100644 index 0000000..b52247d --- /dev/null +++ b/.github/actions/scan-secrets/action.yaml @@ -0,0 +1,9 @@ +name: Scan Secrets +runs: + using: "composite" + steps: + - name: Count lines of code in this repository + shell: bash + run: | + export ALL_FILES=true + ./scripts/githooks/scan-secrets.sh diff --git a/.github/actions/scan-secrets/action.yml b/.github/actions/scan-secrets/action.yml deleted file mode 100644 index 5623f5f..0000000 --- a/.github/actions/scan-secrets/action.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: "Check Formats" -runs: - using: "composite" - steps: - - name: Scan Secrets - shell: bash - run: | - export ALL_FILES=true - ./scripts/githooks/secret-scan-pre-commit.sh diff --git a/.github/workflows/cicd-pipeline.yaml b/.github/workflows/cicd-pipeline.yaml index 7a84a34..db5848b 100644 --- a/.github/workflows/cicd-pipeline.yaml +++ b/.github/workflows/cicd-pipeline.yaml @@ -2,6 +2,8 @@ name: CI/CD Pipeline permissions: contents: read security-events: write + id-token: write + actions: write on: push: @@ -18,38 +20,64 @@ jobs: build_datetime: ${{ steps.metadata.outputs.build_datetime }} build_timestamp: ${{ steps.metadata.outputs.build_timestamp }} build_epoch: ${{ steps.metadata.outputs.build_epoch }} + terraform_version: ${{ steps.metadata.outputs.terraform_version }} steps: + - uses: actions/checkout@v3 + - id: metadata - run: | - datetime=$(date -u +'%Y-%m-%dT%H:%M:%S%z') - echo "build_datetime=$datetime" >> $GITHUB_OUTPUT - echo "build_timestamp=$(date --date=$datetime -u +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT - echo "build_epoch=$(date --date=$datetime -u +'%s')" >> $GITHUB_OUTPUT - - precommit-checks: + name: Get Metadata + uses: ./.github/actions/get-metadata + + - id: cloc + name: Get Lines of Code + uses: ./.github/actions/cloc-repository + + formatting-checks: + needs: [get-metadata] + runs-on: ubuntu-latest + name: Formatting Checks + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Check File Format + uses: ./.github/actions/check-file-format + + - name: Check Markdown Format + uses: ./.github/actions/check-markdown-format + + - name: Check Terraform Format + uses: ./.github/actions/check-terraform-format + + security-scan: needs: [get-metadata] runs-on: ubuntu-latest - name: Pre-Commit Checks + name: Security Scanning steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: ./.github/actions/check-format/ - - uses: ./.github/actions/scan-secrets/ + - name: Scan Dependencies + uses: ./.github/actions/scan-dependencies + + - name: Scan Secrets + uses: ./.github/actions/scan-secrets + checkov: name: Checkov runs-on: ubuntu-latest - needs: [precommit-checks] + needs: [formatting-checks, security-scan] steps: - uses: actions/checkout@v3 - + - uses: actions/setup-python@v4 with: python-version: '3.11' - + - name: Install Latest Checkov id: install-checkov run: pip install --user checkov @@ -57,17 +85,17 @@ jobs: - name: Run Checkov id: run-checkov run: checkov --directory . -o sarif -s --quiet - + - name: Upload SARIF File uses: github/codeql-action/upload-sarif@v2 - if: always() && github.ref == format('refs/heads/{0}', github.event.repository.default_branch) + if: always() && github.ref == format('refs/heads/{0}', github.event.repository.default_branch) with: sarif_file: results.sarif - + tflint: name: TFLint runs-on: ubuntu-latest - needs: [precommit-checks] + needs: [formatting-checks, security-scan] steps: - uses: actions/checkout@v3 @@ -77,16 +105,101 @@ jobs: with: path: ~/.tflint.d/plugins key: tflint-${{ hashFiles('.tflint.hcl') }} - + - name: Setup TFLint uses: terraform-linters/setup-tflint@v3 with: tflint_version: v0.47.0 - + - name: Init TFLint run: tflint --init env: GITHUB_TOKEN: ${{ github.token }} - + - name: Run TFLint - run: tflint -f compact \ No newline at end of file + run: tflint -f compact + + build-example-app: + name: Build Example App + runs-on: ubuntu-latest + needs: [tflint, checkov] + steps: + - uses: actions/checkout@v3 + + - name: Install asdf & tools + uses: asdf-vm/actions/install@v2 + + - name: Install Example Dependencies + run: make example-install + + - name: Build Example App + run: make example-build + + - name: Zip OpenNext Deployment Assets + run: cd example/.open-next && zip -r ../../open-next.zip . -q + + - name: Store Build Artifacts + uses: actions/upload-artifact@v3 + with: + name: example-app-opennext-build + path: open-next.zip + + deploy: + name: Deploy Example App + runs-on: ubuntu-latest + needs: [build-example-app] + if: success() && github.ref_name == 'main' + concurrency: example-deploy + environment: + name: Example Application + url: https://terraform-aws-opennext.tools.engineering.england.nhs.uk/ + + steps: + - uses: actions/checkout@v3 + + - name: Install asdf & tools + uses: asdf-vm/actions/install@v2 + + - id: aws-credentials + name: Setup AWS Credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: ${{ secrets.DEPLOYMENT_IAM_ROLE }} + aws-region: eu-west-2 + + - name: Get Current Identity + run: aws sts get-caller-identity + + - name: Download Build Artifacts + uses: actions/download-artifact@v3 + with: + name: example-app-opennext-build + + - name: Unzip Build Artifacts to .open-next folder + run: unzip -q -d example/.open-next open-next.zip + + - name: Run Terraform Init + run: terraform -chdir=example/terraform init + + - name: Run Terraform Plan + run: terraform -chdir=example/terraform plan -out example-app.tfplan + + - name: Store Terraform Plan Artifact + uses: actions/upload-artifact@v3 + with: + name: example-app-tfplan-output + path: example/terraform/example-app.tfplan + + - name: Run Terraform Apply + run: terraform -chdir=example/terraform apply example-app.tfplan + + - name: Get CloudFront Distribution ID + id: get_distribution_id + run: echo "distribution_id=$(terraform -chdir=example/terraform output -raw cloudfront_distribution_id)" >> "$GITHUB_OUTPUT" + + - name: Trigger CloudFront Cache Invalidation + id: trigger_invalidation + run: echo "invalidation_id=$(aws cloudfront create-invalidation --distribution-id ${{ steps.get_distribution_id.outputs.distribution_id }} --paths '/*' --output text --query Invalidation.Id)" >> "$GITHUB_OUTPUT" + + - name: Wait for Invalidation + run: aws cloudfront wait invalidation-completed --distribution-id ${{ steps.get_distribution_id.outputs.distribution_id }} --id ${{ steps.trigger_invalidation.outputs.invalidation_id }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml deleted file mode 100644 index 7bacef2..0000000 --- a/.github/workflows/release.yaml +++ /dev/null @@ -1,36 +0,0 @@ -name: Release -run-name: Publish Release Version ${{ github.ref_name }} - -on: - push: - tags: - - "v*" - -jobs: - get_metadata: - name: Get Metadata - runs-on: ubuntu-latest - outputs: - prerelease: ${{ steps.determine_prerelease.outputs == 'true' }} - - steps: - - name: Determine if prerelease - id: determine_prerelease - run: | - if [[ "${{ github.ref_name }}" =~ ^v[0-9]*\.[0-9]*\.[0-9]*-.*?\.[0-9]*? ]]; then - echo "prerelease=true" >> "$GITHUB_OUTPUT" - else - echo "prerelease=false" >> "$GITHUB_OUTPUT" - fi - - publish_release: - name: Publish Release ${{ github.ref_name }} - needs: [get_metadata] - runs-on: ubuntu-latest - steps: - - name: Publish Release - uses: "marvinpinto/action-automatic-releases@v1.2.1" - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - automatic_release_tag: ${{ github.ref_name }} - prerelease: ${{ needs.get_metadata.outputs.prerelease }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 026be5b..0000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: 2.7.2 - hooks: - - id: editorconfig-checker - - - repo: https://github.com/markdownlint/markdownlint - rev: v0.12.0 - hooks: - - id: markdownlint_docker - - - repo: https://github.com/gitleaks/gitleaks - rev: v8.17.0 - hooks: - - id: gitleaks-docker - - - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.81.0 - hooks: - - id: terraform_tflint diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000..eac4bb0 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,5 @@ +nodejs 18.16.1 +yarn 1.22.19 +terraform 1.5.0 +pre-commit 3.3.3 +poetry 1.5.1 diff --git a/Makefile b/Makefile index 94d9e45..e33adc8 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,15 @@ -include scripts/makefile/Makefile.init +include scripts/init.mk + +config: + make \ + asdf-install \ + githooks-install \ + nodejs-install \ + terraform-install + +.SILENT: \ + config + ############### ## Constants ## @@ -60,8 +71,6 @@ example-install: check # Installs the dependencies for the example project example-build: example-clean # Builds the example Next.js application yarn --cwd example package - cp -r example/.open-next/* ${BUILD_FOLDER} - for f in ${BUILD_FOLDER}/*; do cd $$f; zip -rq $$f.zip . && cd -; rm -rf $$f; done tag-release: check-version build-cloudfront-logs-lambda git add . @@ -70,3 +79,12 @@ tag-release: check-version build-cloudfront-logs-lambda git tag ${version} git push --tags + +format-terraform: # Formats all Terraform Files + terraform fmt + terraform -chdir=modules/cloudfront-logs fmt + terraform -chdir=modules/opennext-assets fmt + terraform -chdir=modules/opennext-cloudfront fmt + terraform -chdir=modules/opennext-lambda fmt + terraform -chdir=modules/opennext-revalidation-queue fmt + terraform -chdir=example/terraform fmt diff --git a/README.md b/README.md index cb0cc4f..9e9b52c 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ This is a Terraform module for deploying a Next.js application built with [OpenN - [OpenNext Terraform Module for AWS](#opennext-terraform-module-for-aws) - [Table of Contents](#table-of-contents) + - [Example](#example) - [Installation](#installation) - [Prerequisites](#prerequisites) - [Usage](#usage) @@ -16,20 +17,27 @@ This is a Terraform module for deploying a Next.js application built with [OpenN - [Contacts](#contacts) - [Licence](#licence) +## Example + +The example app in `example/` is deployed using the latest version of this Terraform module to [terraform-aws-opennext.tools.engineering.england.nhs.uk](https://terraform-aws-opennext.tools.engineering.england.nhs.uk/). + ## Installation -Copy and paste the following into your Terraform configuration, edit the variables, and then `terraform init`. +Copy and paste the following into your Terraform configuration, edit the variables, and then run `terraform init`. ```tf module "opennext" { source = "nhs-england-tools/opennext/aws" - version = "0.0.1-alpha.5" + version = "1.0.0" # Use the latest release from https://github.com/nhs-england-tools/terraform-aws-opennext/releases - prefix = "opennext" - domain_name = "your-domain-name.com" - acm_certificate_arn = "arn:aws:acm:region:account:certificate/certificate_ID" - hosted_zone_id = "12345" - opennext_build_path = ".open-next" + prefix = "opennext" # Prefix for all created resources + opennext_build_path = "../.open-next" # Path to your .open-next folder + hosted_zone_id = data.aws_route53_zone.zone.zone_id # The Route53 hosted zone ID for your domain name + + cloudfront = { + aliases = [local.domain_name] # Your domain name + acm_certificate_arn = aws_acm_certificate_validation.ssl_certificate.certificate_arn # The ACM (SSL) certificate for your domain + } } ``` @@ -37,7 +45,7 @@ module "opennext" { The following software packages or their equivalents are expected to be installed -- [Terraform](https://developer.hashicorp.com/terraform/downloads) (>=1.3) +- [Terraform](https://developer.hashicorp.com/terraform/downloads) (>=1.5) ## Usage @@ -69,7 +77,6 @@ Describe or link templates on how to raise an issue, feature request or make a c - Thomas Judd-Cooper - [Email](mailto:thomas.judd-cooper1@nhs.net) - [GitHub](https://github.com/Tomdango) - ## Licence Unless stated otherwise, the codebase is released under the MIT License. This covers both the codebase and any sample code in the documentation. diff --git a/docs/adr/ADR-001 - Use git hook and GitHub action to check the .editorconfig compliance.md b/docs/adr/ADR-001 - Use git hook and GitHub action to check the .editorconfig compliance.md deleted file mode 100644 index a84bc71..0000000 --- a/docs/adr/ADR-001 - Use git hook and GitHub action to check the .editorconfig compliance.md +++ /dev/null @@ -1,142 +0,0 @@ -# ADR-001: Use git hook and GitHub Action to check the `.editorconfig` compliance - ->| | | ->| ------------ | --- | ->| Date | `15/05/2023` | ->| Status | `RFC` | ->| Deciders | `Engineering` | ->| Significance | `Construction techniques` | ->| Owners | `Dan Stefaniuk, Amaan Ibn-Nasar` | - ---- - -- [ADR-001: Use git hook and GitHub Action to check the `.editorconfig` compliance](#adr-001-use-git-hook-and-github-action-to-check-the-editorconfig-compliance) - - [Context](#context) - - [Decision](#decision) - - [Assumptions](#assumptions) - - [Drivers](#drivers) - - [Options](#options) - - [Options 1: The pre-commit project](#options-1-the-pre-commit-project) - - [Options 2a: Custom shell script](#options-2a-custom-shell-script) - - [Options 2b: Docker-based custom shell script](#options-2b-docker-based-custom-shell-script) - - [Option 3: A GitHub Action from the Marketplace](#option-3-a-github-action-from-the-marketplace) - - [Outcome](#outcome) - - [Rationale](#rationale) - - [Consequences](#consequences) - - [Compliance](#compliance) - - [Notes](#notes) - - [Tags](#tags) - -## Context - -As part of the Repository Template project a need for a simple text formatting feature using the [EditorConfig](https://editorconfig.org/) rules was identified that is accessible and consistent for all contributors. To ensure that formatting rules are applied, a compliance check has to be implemented on a developer workstation and as a part of the CI/CD pipeline. This will establish a fast feedback loop and a fallback option, if the former has not worked. - -## Decision - -### Assumptions - -This decision is based on the following assumptions that are used to form a set of generic requirements for the implementation as a guide. A solution should be - -- Cross-platform and portable, supporting systems like - - macOS - - Windows WSL (Ubuntu) - - Ubuntu and potentially other Linux distributions like Alpine -- Configurable - - can run on a file or a directory - - can be turned on/off entirely -- Run locally (aka developer workstation) and remotely (aka CI/CD pipeline) -- Reusable and avoid code duplication - -### Drivers - -Implementation of this compliance check (like text encoding, line endings, tabs vs. spaces etc.) will help with any potential debate or discussion, removing personal preferences and opinions, enabling teams to focus on delivering value to the product they work on. - -Other linting tools like for example [Prettier](https://prettier.io/) and [ESLint](https://eslint.org/) are not considered here as they are code formatting tools dedicated to specific technologies and languages. The main drivers for this decision are the style consistency across all files in the codebase and to eliminate any disruptive changes introduced based on preferences. EditorConfig rules are recognised and supported by most if not all major editors and IDEs. - -Here is the recommended ruleset: - -```console -charset = utf-8 -end_of_line = lf -indent_size = 2 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true -``` - -### Options - -#### Options 1: The [pre-commit](https://pre-commit.com/) project - -- Pros - - Python is installed on most if not all platforms - - A pythonist friendly tool - - Well-documented -- Cons - - Dependency on Python even for a non-Python project - - Potential versioning issues with Python runtime and dependencies compatibility - - Lack of process isolation, access to resources with user-level privileges - - Dependency on multiple parties and plugins - -#### Options 2a: Custom shell script - -- Pros - - Execution environment is installed everywhere, no setup required - - Ease of maintainability and testability - - It is a simple solution -- Cons - - May potentially require more coding in Bash - - Requires shell scripting skills - -#### Options 2b: Docker-based custom shell script - -This option is an extension built upon option 2a. - -- Pros - - Cross-platform compatibility - - Isolation of the process dependencies and runtime - - Docker is an expected dependency for most/all projects -- Cons - - Requires Docker as a preinstalled dependency - - Requires basic Docker skills - -#### Option 3: A GitHub Action from the Marketplace - -- Pros - - Usage of a GitHub native functionality -- Cons - - Reliance on the GitHub DSL (coding in yaml) may lead to less portable solution - - Implementation of the functionality has to be duplicated for the git hook - -### Outcome - -The decision is to implement Option 2b. - -### Rationale - -A choice of shell scripting along with Docker offers a good support for simplicity, process isolation, portability across the operating systems and reuse of the same code and its configuration. This approach makes it consistent for a local environment and the CI/CD pipeline, where the process can be gated and compliance enforced. - -## Consequences - -As a result of the above decision - -- a single Bash script will be implemented -- it will be placed in the `scripts/githooks` directory -- the name of the file will be `editorconfig-pre-commit.sh` -- there will be a `pre-commit` runner included -- the GitHub Action will call the git hook `editorconfig-pre-commit.sh` script directly -- and a couple of `Makefile` targets like `config`, `githooks-install` will be implemented to bootstrap the project - -The intention of this decision is to guide any other git hook and GitHub Action implementations. - -## Compliance - -Both, the git hook and the GitHub Action should be executed automatically as part of the developer workflow. - -## Notes - -There is an emerging practice to use projects like [act](https://github.com/nektos/act) to make GitHub actions even more portable. The recommendation is for this tool to be assessed at further stages of the [nhs-england-tools/repository-template](https://github.com/nhs-england-tools/repository-template) project implementation, in the context of this decision record. - -## Tags - -`#maintainability, #testability, #simplicity, #security` diff --git a/docs/adr/ADR-002 - Scan repository for hardcoded secrets.md b/docs/adr/ADR-002 - Scan repository for hardcoded secrets.md deleted file mode 100644 index dded6ee..0000000 --- a/docs/adr/ADR-002 - Scan repository for hardcoded secrets.md +++ /dev/null @@ -1,98 +0,0 @@ -# ADR-002: Scan repository for hardcoded secrets - ->| | | ->| ------------ | --- | ->| Date | `31/05/2023` | ->| Status | `RFC` | ->| Deciders | `Engineering` | ->| Significance | `Construction techniques` | ->| Owners | `Dan Stefaniuk, Jon Pearce, Tamara Goldschmidt, Tim Rickwood` | - ---- - -- [ADR-002: Scan repository for hardcoded secrets](#adr-002-scan-repository-for-hardcoded-secrets) - - [Context](#context) - - [Decision](#decision) - - [Assumptions](#assumptions) - - [Drivers](#drivers) - - [Options](#options) - - [Outcome](#outcome) - - [Rationale](#rationale) - - [Consequences](#consequences) - - [Compliance](#compliance) - - [Notes](#notes) - - [Actions](#actions) - - [Tags](#tags) - -## Context - -- Review git-secrets decision - - To compare - - tools - - 1. https://github.com/awslabs/git-secrets - - 2. https://github.com/trufflesecurity/trufflehog - - 3. https://github.com/gitleaks/gitleaks - - repo metadata - - contributions (data and number) - - stars & forks - - language - - licence - - features - - scan whole history - - scan single commit - - predefined set of rules - - patterns: custom rules - - patterns: exclusions - - entropy detection - - run as a docker image (locally and in CI/CD pipeline) - -Describe the context and the problem statement. Is there a relationship to other decisions previously made? Are there any dependencies and/or constraints within which the decision will be made? Do these need to be reviewed or validated? Please, note that environmental limitations or restrictions such as accepted technology standards, commonly recognised and used patterns, engineering and architecture principles, organisation policies, governance and so on, may as an effect narrow down the choices. This should also be explicitly documented, as this is a point-in-time decision with the intention of being able to articulate it clearly and justify it later. - -## Decision - -### Assumptions - -Summarise the underlying assumptions in the environment in which you make the decision. This could be related to technology changes, forecast of the monetary and non-monetary costs, further delivery commitments, impactful external drivers etc., and any known unknowns that translate to risks. - -### Drivers - -List the decision drivers that motivate this change or course of action. This may include any identified risks and residual risks after applying the decision. - -### Options - -Consider a comprehensive set of alternative options; provide weighting if applicable. - -### Outcome - -State the decision outcome as a result of taking into account all of the above. Is it a reversible or irreversible decision? - -### Rationale - -Provide a rationale for the decision that is based on weighing the options to ensure that the same questions are not going to be asked again and again unless the decision needs to be superseded. - -For non-trivial decisions a comparison table can be useful for the reviewer. Decision criteria down one side, options across the top. You'll likely find decision criteria come from the Drivers section above. Effort can be an important driving factor. You may have an intuitive feel for this, but reviewers will not. T-shirt sizing the effort for each option may help communicate. - -## Consequences - -Describe the resulting context, after applying the decision. All the identified consequences should be listed here, not just the positive ones. Any decision comes with many implications. For example, it may introduce a need to make other decisions as an effect of cross-cutting concerns; it may impact structural or operational characteristics of the software, and influence non-functional requirements; as a result, some things may become easier or more difficult to do because of this change. What are the trade-offs? - -What are the conditions under which this decision no longer applies or becomes irrelevant? - -## Compliance - -Establish how the success is going to be measured. Once implemented, the effect might lend itself to be measured, therefore if appropriate a set of criteria for success could be established. Compliance checks of the decision can be manual or automated using a fitness function. If it is the latter this section can then specify how that fitness function would be implemented and whether there are any other changes to the codebase needed to measure this decision for compliance. - -## Notes - -Include any links to existing epics, decisions, dependencies, risks, and policies related to this decision record. This section could also include any further links to configuration items within the project or the codebase, signposting to the areas of change. - -It is important that if the decision is sub-optimal or the choice is tactical or misaligned with the strategic directions the risk related to it is identified and clearly articulated. As a result of that, the expectation is that a [Tech Debt](./tech-debt.md) record is going to be created on the backlog. - -## Actions - -- [x] name, date by, action -- [ ] name, date by, action - -## Tags - -`#maintainability, #testability, #simplicity, #security` diff --git a/example/package.json b/example/package.json index 328fd2b..aaf41c5 100644 --- a/example/package.json +++ b/example/package.json @@ -5,7 +5,7 @@ "scripts": { "dev": "next dev", "build": "next build", - "package": "open-next build && yarn package:functions", + "package": "open-next build", "start": "next start", "lint": "next lint" }, diff --git a/example/public/images/patrick.1200x1200.png:Zone.Identifier b/example/public/images/patrick.1200x1200.png:Zone.Identifier deleted file mode 100644 index 98ef56e..0000000 --- a/example/public/images/patrick.1200x1200.png:Zone.Identifier +++ /dev/null @@ -1,4 +0,0 @@ -[ZoneTransfer] -ZoneId=3 -ReferrerUrl=https://github.com/ -HostUrl=https://raw.githubusercontent.com/serverless-stack/open-next/main/example/public/images/patrick.1200x1200.png diff --git a/example/src/pages/404.tsx b/example/src/pages/404.tsx index f15f2f7..117ea15 100644 --- a/example/src/pages/404.tsx +++ b/example/src/pages/404.tsx @@ -1,9 +1,18 @@ import { NextPage } from "next"; +import Head from "next/head"; const PageNotFoundPage: NextPage = () => ( -
+ <> + + 404 Page Not Found - Next.js Feature Test App + + + + +

404

-
+
+ ); -export default PageNotFoundPage; \ No newline at end of file +export default PageNotFoundPage; diff --git a/example/src/pages/_document.tsx b/example/src/pages/_document.tsx index c1a98b0..89c6acb 100644 --- a/example/src/pages/_document.tsx +++ b/example/src/pages/_document.tsx @@ -1,5 +1,6 @@ import { Html, Head, Main, NextScript } from 'next/document' -import { Container, Header } from 'nhsuk-react-components' +import Link from 'next/link' +import { Container, Footer, Header } from 'nhsuk-react-components' export default function Document() { return ( @@ -15,6 +16,14 @@ export default function Document() {
+
+ + Home + + + © Crown Copyright {new Date().getFullYear()} + +
diff --git a/example/src/pages/font-next-font.tsx b/example/src/pages/font-next-font.tsx index 63e3024..78f8802 100644 --- a/example/src/pages/font-next-font.tsx +++ b/example/src/pages/font-next-font.tsx @@ -1,5 +1,6 @@ import { MuseoModerno } from "@next/font/google"; import { NextPage } from "next"; +import Head from "next/head"; const museo = MuseoModerno({ subsets: ["latin"], @@ -7,12 +8,20 @@ const museo = MuseoModerno({ }); const NextFontPage: NextPage = () => ( + <> + + Font - @next/font - Next.js Feature Test App + + + +

Font β€” @next/font

Test 1:

This uses default font.

This uses MuseoModerno font.

-
+ + ); -export default NextFontPage \ No newline at end of file +export default NextFontPage diff --git a/example/src/pages/image-html-tag.tsx b/example/src/pages/image-html-tag.tsx index 52dda22..98265ca 100644 --- a/example/src/pages/image-html-tag.tsx +++ b/example/src/pages/image-html-tag.tsx @@ -1,13 +1,22 @@ import { NextPage } from "next"; +import Head from "next/head"; const ImageHTMLTagPage: NextPage = () => ( -
+ <> + + Image using html image tag - Next.js Feature Test App + + + + +

Image using html image tag

Patrick

Test 1:

Original image dimension: 1200 x 1200. Check the dimension of the displayed image is also 1200 x 1200.

-
+
+ ); -export default ImageHTMLTagPage; \ No newline at end of file +export default ImageHTMLTagPage; diff --git a/example/src/pages/image-optimization-imported.tsx b/example/src/pages/image-optimization-imported.tsx index c909104..b5bd6a4 100644 --- a/example/src/pages/image-optimization-imported.tsx +++ b/example/src/pages/image-optimization-imported.tsx @@ -1,14 +1,23 @@ import Image from 'next/image' import pic from '../../public/images/patrick.1200x1200.png' import { NextPage } from 'next'; +import Head from 'next/head'; const ImageOptimizationImportedPage: NextPage = () => ( -
+ <> + + Image Optimization - Imported Image - Next.js Feature Test App + + + + +

Image Optimization

Patrick

Test 1:

Original image dimension: 1200 x 1200. Check the dimension of the displayed image is smaller than 1200 x 1200.

-
+
+ ); -export default ImageOptimizationImportedPage \ No newline at end of file +export default ImageOptimizationImportedPage diff --git a/example/src/pages/image-optimization-remote.tsx b/example/src/pages/image-optimization-remote.tsx index cb20077..2775106 100644 --- a/example/src/pages/image-optimization-remote.tsx +++ b/example/src/pages/image-optimization-remote.tsx @@ -1,13 +1,22 @@ import { NextPage } from 'next'; import Image from 'next/image' +import Head from 'next/head'; -const ImageOptimizationRemotePage: NextPage = () => ( -
-

Image Optimization

- Misty Forest -

Test 1:

-

Original image dimension: 2268 x 4032. Check the dimension of the displayed image is smaller than 256 x 455.

-
+const ImageOptimizationRemotePage: NextPage = () => ( + <> + + Image Optimization - Remote Image - Next.js Feature Test App + + + + +
+

Image Optimization

+ Misty Forest +

Test 1:

+

Original image dimension: 2268 x 4032. Check the dimension of the displayed image is smaller than 256 x 455.

+
+ ); -export default ImageOptimizationRemotePage \ No newline at end of file +export default ImageOptimizationRemotePage diff --git a/example/src/pages/index.tsx b/example/src/pages/index.tsx index f6b1ba8..417fe4f 100644 --- a/example/src/pages/index.tsx +++ b/example/src/pages/index.tsx @@ -1,12 +1,12 @@ import Head from 'next/head' import Link from 'next/link' -import { Container, ListPanel } from 'nhsuk-react-components' + export default function Home() { return ( <> - Create Next App - + Home - Next.js Feature Test App + @@ -16,7 +16,6 @@ export default function Home() {

This app contains a handful of pages. Each page implements a specific Next.js feature. Deploy this app. Then select a test below to check if the feature works.

Based upon the excellent OpenNext example Next.js app.

-
  • Static Site Generation (SSG)
  • diff --git a/example/src/pages/isr.tsx b/example/src/pages/isr.tsx index ecad241..201760a 100644 --- a/example/src/pages/isr.tsx +++ b/example/src/pages/isr.tsx @@ -1,4 +1,5 @@ import { GetStaticProps, NextPage } from "next"; +import Head from "next/head"; export const getStaticProps: GetStaticProps = () => { return { @@ -10,11 +11,19 @@ export const getStaticProps: GetStaticProps = () => { } const ISRPage: NextPage<{time: string}> = ({time}) => ( -
    + <> + + Incremental Static Rendering (ISR) - Next.js Feature Test App + + + + +

    Incremental Static Rendering (ISR)

    Test 1:

    This timestamp πŸ‘‰ {time} should change every 10 seconds when the page is repeatedly refreshed.

    -
    +
    + ) -export default ISRPage; \ No newline at end of file +export default ISRPage; diff --git a/example/src/pages/middleware-geolocation.tsx b/example/src/pages/middleware-geolocation.tsx index 69760f0..c569dd9 100644 --- a/example/src/pages/middleware-geolocation.tsx +++ b/example/src/pages/middleware-geolocation.tsx @@ -1,4 +1,5 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async (context) => { return { @@ -9,11 +10,19 @@ export const getServerSideProps: GetServerSideProps = async (context) => { } const GeolocationMiddlewarePage: NextPage<{qs: string}> = ({ qs }) => ( -
    -

    Middleware - geolocation

    -

    Test 1:

    -

    URL query contains country, city, and region: {qs}

    -
    + <> + + Middleware - Geolocation - Next.js Feature Test App + + + + +
    +

    Middleware - geolocation

    +

    Test 1:

    +

    URL query contains country, city, and region: {qs}

    +
    + ); -export default GeolocationMiddlewarePage; \ No newline at end of file +export default GeolocationMiddlewarePage; diff --git a/example/src/pages/middleware-redirect-destination.tsx b/example/src/pages/middleware-redirect-destination.tsx index 95e3186..963cb37 100644 --- a/example/src/pages/middleware-redirect-destination.tsx +++ b/example/src/pages/middleware-redirect-destination.tsx @@ -1,10 +1,19 @@ import { NextPage } from "next"; +import Head from "next/head"; const MiddlewareRedirectDestinationPage: NextPage = () => ( -
    + <> + + Middleware - Redirect - Next.js Feature Test App + + + + +

    Middleware - redirect

    If you see this page, Middleware with redirect is working.

    -
    +
    + ); -export default MiddlewareRedirectDestinationPage; \ No newline at end of file +export default MiddlewareRedirectDestinationPage; diff --git a/example/src/pages/middleware-redirect.tsx b/example/src/pages/middleware-redirect.tsx index 7567b2d..fb9fc52 100644 --- a/example/src/pages/middleware-redirect.tsx +++ b/example/src/pages/middleware-redirect.tsx @@ -1,11 +1,20 @@ import { NextPage } from "next"; +import Head from "next/head"; const MiddlewareRedirectPage: NextPage = () => ( -
    + <> + + Middleware - Redirect - Next.js Feature Test App + + + + +

    Middleware - redirect

    Test 1:

    If you see this page, Middleware with redirect is NOT working. You should be redirected to /middleware-redirect-destination.

    -
    +
    + ); -export default MiddlewareRedirectPage \ No newline at end of file +export default MiddlewareRedirectPage diff --git a/example/src/pages/middleware-rewrite.tsx b/example/src/pages/middleware-rewrite.tsx index 509c74d..56b94c9 100644 --- a/example/src/pages/middleware-rewrite.tsx +++ b/example/src/pages/middleware-rewrite.tsx @@ -1,4 +1,5 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async (context) => { return { @@ -9,13 +10,21 @@ export const getServerSideProps: GetServerSideProps = async (context) => { }, }; } - + const MiddlewareRewritePage: NextPage<{isRewritten: boolean}> = ({ isRewritten }) => ( -
    + <> + + Middleware - Rewrite - Next.js Feature Test App + + + + +

    Middleware - rewrite

    Test 1:

    URL is rewritten {isRewritten}

    -
    +
    + ); export default MiddlewareRewritePage; diff --git a/example/src/pages/middleware-set-header.tsx b/example/src/pages/middleware-set-header.tsx index bd527c3..c46c125 100644 --- a/example/src/pages/middleware-set-header.tsx +++ b/example/src/pages/middleware-set-header.tsx @@ -1,21 +1,30 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async (context) => { return { props: { - isMiddlewareHeaderSet: + isMiddlewareHeaderSet: context.req.headers["x-hello-from-middleware-1"] === "hello" ? "yes" : "no", }, }; } - + const MiddlewareSetHeaderPage: NextPage<{isMiddlewareHeaderSet: boolean}> = ({ isMiddlewareHeaderSet }) => ( -
    -

    Middleware - set header

    -

    Test 1:

    -

    Is middleware header set? {isMiddlewareHeaderSet}

    -
    + <> + + Middleware - Set Header - Next.js Feature Test App + + + + +
    +

    Middleware - set header

    +

    Test 1:

    +

    Is middleware header set? {isMiddlewareHeaderSet}

    +
    + ); -export default MiddlewareSetHeaderPage; \ No newline at end of file +export default MiddlewareSetHeaderPage; diff --git a/example/src/pages/ssg-dynamic-fallback/[id].tsx b/example/src/pages/ssg-dynamic-fallback/[id].tsx index 2c6431f..e197798 100644 --- a/example/src/pages/ssg-dynamic-fallback/[id].tsx +++ b/example/src/pages/ssg-dynamic-fallback/[id].tsx @@ -1,4 +1,5 @@ import { GetStaticPaths, GetStaticProps, NextPage } from "next"; +import Head from "next/head"; type Post = {id: string, title: string} const posts: Post[] = [{ @@ -23,15 +24,23 @@ export const getStaticProps: GetStaticProps = async ({ params }) => { } const Post: NextPage<{data: Post, time: number}> = ({ data, time }) => ( + <> + + Static Site Generation with dynamic route fallback - Next.js Feature Test App + + + +
    -

    Static Site Generation with dynamic route fallback

    -

    Test 1

    -

    This timestamp πŸ‘‰ {time} should be when the `npx open-next build` was run, not when the page is refreshed. Hence, this time should not change on refresh.

    -

    Test 2:

    -

    This string πŸ‘‰ "{data && data.title}" should be "First post"

    -

    Test 3:

    -

    Check your browser's developer console. First request might show cache MISS on first load. Subsequent refreshes should shows cache HIT.

    +

    Static Site Generation with dynamic route fallback

    +

    Test 1

    +

    This timestamp πŸ‘‰ {time} should be when the `npx open-next build` was run, not when the page is refreshed. Hence, this time should not change on refresh.

    +

    Test 2:

    +

    This string πŸ‘‰ "{data && data.title}" should be "First post"

    +

    Test 3:

    +

    Check your browser's developer console. First request might show cache MISS on first load. Subsequent refreshes should shows cache HIT.

    + ); -export default Post; \ No newline at end of file +export default Post; diff --git a/example/src/pages/ssg-dynamic/[id].tsx b/example/src/pages/ssg-dynamic/[id].tsx index 2ba5d60..cd4c80a 100644 --- a/example/src/pages/ssg-dynamic/[id].tsx +++ b/example/src/pages/ssg-dynamic/[id].tsx @@ -1,4 +1,5 @@ import { GetStaticPaths, GetStaticProps, NextPage } from "next"; +import Head from "next/head"; type Post = {id: string, title: string} const posts: Post[] = [{ @@ -23,7 +24,14 @@ export const getStaticProps: GetStaticProps = ({params}) => { } const Post: NextPage<{data: Post, time: number}> = ({data, time}) => ( -
    + <> + + Static Site Generation with dynamic routes - Next.js Feature Test App + + + + +

    Static Site Generation with dynamic routes

    Test 1:

    This timestamp πŸ‘‰ {time} should be when the `npx open-next build` was run, not when the page is refreshed. Hence, this time should not change on refresh.

    @@ -31,7 +39,8 @@ const Post: NextPage<{data: Post, time: number}> = ({data, time}) => (

    This string πŸ‘‰ "{data.title}" should be "First post".

    Test 3:

    Check your browser's developer console. First request might show cache MISS on first load. Subsequent refreshes should shows cache HIT.

    -
    +
    + ) -export default Post; \ No newline at end of file +export default Post; diff --git a/example/src/pages/ssg.tsx b/example/src/pages/ssg.tsx index 2a8ab5e..13b28dd 100644 --- a/example/src/pages/ssg.tsx +++ b/example/src/pages/ssg.tsx @@ -1,4 +1,5 @@ import { GetStaticProps, NextPage } from "next"; +import Head from "next/head"; import React from "react"; export const getStaticProps: GetStaticProps = async () => { @@ -10,13 +11,21 @@ export const getStaticProps: GetStaticProps = async () => { } const SSGPage: NextPage<{time: string}> = ({time}) => ( -
    + <> + + Static Site Generation (SSG) - Next.js Feature Test App + + + + +

    Static Site Generation (SSG)

    Test 1:

    This timestamp πŸ‘‰ {time} should be when the `npx open-next build` was run, not when the page is refreshed. Hence, this time should not change on refresh.

    Test 2:

    Check your browser's developer console. the request might show cache MISS on first load. Subsequent refreshes should shows cache HIT.

    -
    +
    + ) -export default SSGPage \ No newline at end of file +export default SSGPage diff --git a/example/src/pages/ssr-not-found.tsx b/example/src/pages/ssr-not-found.tsx index b542b96..a8e3002 100644 --- a/example/src/pages/ssr-not-found.tsx +++ b/example/src/pages/ssr-not-found.tsx @@ -1,4 +1,5 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async () => { return { @@ -7,11 +8,19 @@ export const getServerSideProps: GetServerSideProps = async () => { } const SSRNotFoundPage: NextPage = () => ( + <> + + SSR Not Found - Next.js Feature Test App + + + +
    -

    SSR - Server Side Rendering

    -

    Test 1:

    -

    If you see this page, SSR with notFound is NOT working.

    +

    SSR - Server Side Rendering

    +

    Test 1:

    +

    If you see this page, SSR with notFound is NOT working.

    + ); -export default SSRNotFoundPage \ No newline at end of file +export default SSRNotFoundPage diff --git a/example/src/pages/ssr-redirect-destination.tsx b/example/src/pages/ssr-redirect-destination.tsx index 66f4fc0..858f1ea 100644 --- a/example/src/pages/ssr-redirect-destination.tsx +++ b/example/src/pages/ssr-redirect-destination.tsx @@ -1,11 +1,20 @@ import { NextPage } from "next"; +import Head from "next/head"; const SSRDestinationPage: NextPage = () => ( -
    + <> + + SSR Redirect Destination - Next.js Feature Test App + + + + +

    Server Side Rendering - redirect

    Test 1:

    If you see this page, SSR with redirect is working.

    -
    +
    + ); -export default SSRDestinationPage \ No newline at end of file +export default SSRDestinationPage diff --git a/example/src/pages/ssr-redirect.tsx b/example/src/pages/ssr-redirect.tsx index 7ddf9cb..b188d98 100644 --- a/example/src/pages/ssr-redirect.tsx +++ b/example/src/pages/ssr-redirect.tsx @@ -1,4 +1,5 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async () => { return { @@ -8,13 +9,21 @@ export const getServerSideProps: GetServerSideProps = async () => { }, }; } - + const SSRRedirectPage: NextPage = () => ( + <> + + SSR Redirect - Next.js Feature Test App + + + +
    -

    Server Side Rendering - redirect

    -

    Test 1:

    -

    If you see this page, SSR with redirect is NOT working. You should be redirected to /ssr-redirect-destination.

    -
    +

    Server Side Rendering - redirect

    +

    Test 1:

    +

    If you see this page, SSR with redirect is NOT working. You should be redirected to /ssr-redirect-destination.

    + + ); -export default SSRRedirectPage \ No newline at end of file +export default SSRRedirectPage diff --git a/example/src/pages/ssr.tsx b/example/src/pages/ssr.tsx index e24775b..131294a 100644 --- a/example/src/pages/ssr.tsx +++ b/example/src/pages/ssr.tsx @@ -1,4 +1,5 @@ import { GetServerSideProps, NextPage } from "next"; +import Head from "next/head"; export const getServerSideProps: GetServerSideProps = async () => { return { @@ -7,14 +8,21 @@ export const getServerSideProps: GetServerSideProps = async () => { }, }; } - + const SSRPage: NextPage<{time: number}> = ({ time }) => ( + <> + + Server Side Rendering (SSR) - Next.js Feature Test App + + + +
    -

    Server Side Rendering (SSR)

    -

    Test 1

    -

    This timestamp πŸ‘‰ {time} should change every time the page is refreshed, because the page is rendered on the server on every request.

    +

    Server Side Rendering (SSR)

    +

    Test 1

    +

    This timestamp πŸ‘‰ {time} should change every time the page is refreshed, because the page is rendered on the server on every request.

    + ); export default SSRPage - \ No newline at end of file diff --git a/example/terraform/data.tf b/example/terraform/data.tf new file mode 100644 index 0000000..64e4e65 --- /dev/null +++ b/example/terraform/data.tf @@ -0,0 +1,4 @@ +data "aws_route53_zone" "zone" { + name = local.domain_name + private_zone = false +} diff --git a/example/terraform/main.tf b/example/terraform/main.tf index 3ad3b31..23a681a 100644 --- a/example/terraform/main.tf +++ b/example/terraform/main.tf @@ -1,31 +1,51 @@ terraform { required_version = "~> 1.5" + required_providers { aws = { source = "hashicorp/aws" version = "~> 4.0" } } + + backend "s3" { + bucket = "nhs-england-tools-terraform-state-store" + key = "terraform-aws-opennext/example.tfstate" + region = "eu-west-2" + dynamodb_table = "nhs-england-tools-terraform-state-lock" + } } -provider "aws" { - alias = "global" - region = "us-east-1" +locals { + domain_name = "terraform-aws-opennext.tools.engineering.england.nhs.uk" + default_tags = { + Project = "terraform-aws-opennext" + Environment = "example" + } } -data "aws_route53_zone" "zone" { - name = local.domain_name - private_zone = false +provider "aws" { + region = "eu-west-2" + + default_tags { + tags = local.default_tags + } } -locals { - domain_name = "opennext-example.tomjc.dev" +provider "aws" { + alias = "global" + region = "us-east-1" + + default_tags { + tags = local.default_tags + } } module "opennext" { source = "../../" - prefix = "opennext-example" + prefix = "terraform-aws-opennext-example" + default_tags = local.default_tags opennext_build_path = "../.open-next" hosted_zone_id = data.aws_route53_zone.zone.zone_id diff --git a/locals.tf b/locals.tf new file mode 100644 index 0000000..0a6731e --- /dev/null +++ b/locals.tf @@ -0,0 +1,264 @@ +locals { + opennext_abs_path = "${abspath(path.root)}/${var.opennext_build_path}" +} + +locals { + /** + * CloudFront Options + **/ + cloudfront = { + aliases = var.cloudfront.aliases + acm_certificate_arn = var.cloudfront.acm_certificate_arn + assets_paths = coalesce(var.cloudfront.assets_paths, []) + custom_headers = coalesce(var.cloudfront.custom_headers, []) + geo_restriction = coalesce(try(var.cloudfront.geo_restriction, null), { + restriction_type = "none" + locations = [] + }) + cors = merge({ + allow_credentials = false, + allow_headers = ["*"], + allow_methods = ["ALL"], + allow_origins = ["*"], + origin_override = true + }, var.cloudfront.cors) + hsts = merge({ + access_control_max_age_sec = 31536000 + include_subdomains = true + override = true + preload = true + }, var.cloudfront.hsts) + waf_logging_configuration = var.cloudfront.waf_logging_configuration + cache_policy = { + default_ttl = coalesce(try(var.cloudfront.cache_policy.default_ttl, null), 0) + min_ttl = coalesce(try(var.cloudfront.cache_policy.min_ttl, null), 0) + max_ttl = coalesce(try(var.cloudfront.cache_policy.max_ttl, null), 31536000) + enable_accept_encoding_brotli = try(var.cloudfront.cache_policy.enable_accept_encoding_brotli, true) + enable_accept_encoding_gzip = try(var.cloudfront.cache_policy.enable_accept_encoding_gzip, true) + cookies_config = merge({ + cookie_behavior = "all" + }, try(var.cloudfront.cache_policy.cookies_config, {})) + headers_config = merge({ + header_behavior = "whitelist", + items = [] + }, try(var.cloudfront.cache_policy.headers_config, {})) + query_strings_config = merge({ + query_string_behavior = "all", + items = [] + }, try(var.cloudfront.cache_policy.query_strings_config, {})) + } + origin_request_policy = try(var.cloudfront.origin_request_policy, null) + } + + /** + * Server Function Options + **/ + server_options = { + package = { + source_dir = coalesce(try(var.server_options.package.source_dir, null), "${local.opennext_abs_path}/server-function/") + output_dir = coalesce(try(var.server_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") + } + + function = { + function_name = try(var.server_options.function.function_name, null) + description = coalesce(try(var.server_options.function.description, null), "Next.js Server") + handler = coalesce(try(var.server_options.function.handler, null), "index.handler") + runtime = coalesce(try(var.server_options.function.runtime, null), "nodejs18.x") + architectures = coalesce(try(var.server_options.function.architectures, null), ["arm64"]) + memory_size = coalesce(try(var.server_options.function.memory_size, null), 512) + timeout = coalesce(try(var.server_options.function.timeout, null), 30) + publish = coalesce(try(var.server_options.function.publish, null), false) + dead_letter_config = try(var.server_options.function.dead_letter_config, null) + reserved_concurrent_executions = coalesce(try(var.server_options.function.reserved_concurrent_executions, null), 10) + code_signing_config = try(var.server_options.function.code_signing_config, null) + } + + log_group = { + retention_in_days = coalesce(try(var.server_options.log_group.retention_in_days, null), 365) + kms_key_id = try(var.server_options.log_group.kms_key_id, null) + } + + networking = { + vpc_id = try(var.server_options.networking.vpc_id, null) + subnet_ids = coalesce(try(var.server_options.networking.subnet_ids, null), []) + security_group_ingress_rules = coalesce(try(var.server_options.networking.sg_ingress_rules, null), []) + security_group_egress_rules = coalesce(try(var.server_options.networking.sg_egress_rules, null), []) + } + + environment_variables = merge({ + CACHE_BUCKET_NAME = module.assets.assets_bucket.bucket + CACHE_BUCKET_KEY_PREFIX = "cache" + CACHE_BUCKET_REGION = data.aws_region.current.name + REVALIDATION_QUEUE_URL = module.revalidation_queue.queue.url + REVALIDATION_QUEUE_REGION = data.aws_region.current.name + }, coalesce(try(var.server_options.environment_variables, null), {})) + + iam_policy_statements = concat([ + { + effect = "Allow" + actions = ["s3:GetObject", "s3:PutObject", "s3:ListObjects"] + resources = [module.assets.assets_bucket.arn, "${module.assets.assets_bucket.arn}/*"] + }, + { + effect = "Allow" + actions = ["sqs:SendMessage"] + resources = [module.revalidation_queue.queue.arn] + }, + { + effect = "Allow" + actions = ["kms:GenerateDataKey", "kms:Decrypt"] + resources = [module.revalidation_queue.queue_kms_key.arn] + } + ], coalesce(try(var.server_options.iam_policy, null), [])) + } + + /** + * Image Optimization Function Options + **/ + image_optimization_options = { + package = { + source_dir = coalesce(try(var.image_optimization_options.package.source_dir, null), "${local.opennext_abs_path}/image-optimization-function/") + output_dir = coalesce(try(var.image_optimization_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") + } + + function = { + function_name = try(var.image_optimization_options.function.function_name, null) + description = coalesce(try(var.image_optimization_options.function.description, null), "Next.js Image Optimization") + handler = coalesce(try(var.image_optimization_options.function.handler, null), "index.handler") + runtime = coalesce(try(var.image_optimization_options.function.runtime, null), "nodejs18.x") + architectures = coalesce(try(var.image_optimization_options.function.architectures, null), ["arm64"]) + memory_size = coalesce(try(var.image_optimization_options.function.memory_size, null), 512) + timeout = coalesce(try(var.image_optimization_options.function.timeout, null), 30) + publish = coalesce(try(var.image_optimization_options.function.publish, null), false) + dead_letter_config = try(var.image_optimization_options.function.dead_letter_config, null) + reserved_concurrent_executions = coalesce(try(var.image_optimization_options.function.reserved_concurrent_executions, null), 3) + code_signing_config = try(var.image_optimization_options.function.code_signing_config, null) + } + + log_group = { + retention_in_days = coalesce(try(var.image_optimization_options.log_group.retention_in_days, null), 365) + kms_key_id = try(var.image_optimization_options.log_group.kms_key_id, null) + } + + networking = { + vpc_id = try(var.image_optimization_options.networking.vpc_id, null) + subnet_ids = coalesce(try(var.image_optimization_options.networking.subnet_ids, null), []) + security_group_ingress_rules = coalesce(try(var.image_optimization_options.networking.sg_ingress_rules, null), []) + security_group_egress_rules = coalesce(try(var.image_optimization_options.networking.sg_egress_rules, null), []) + } + + environment_variables = merge({ + BUCKET_NAME = module.assets.assets_bucket.bucket, + BUCKET_KEY_PREFIX = "assets" + }, coalesce(try(var.image_optimization_options.environment_variables, null), {})) + + iam_policy_statements = concat([ + { + effect = "Allow" + actions = ["s3:GetObject"] + resources = [module.assets.assets_bucket.arn, "${module.assets.assets_bucket.arn}/*"] + } + ], coalesce(try(var.image_optimization_options.iam_policy, null), [])) + } + + + /** + * ISR Revalidation Function Options + **/ + revalidation_options = { + package = { + source_dir = coalesce(try(var.revalidation_options.package.source_dir, null), "${local.opennext_abs_path}/revalidation-function/") + output_dir = coalesce(try(var.revalidation_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") + } + + function = { + function_name = try(var.revalidation_options.function.function_name, null) + description = coalesce(try(var.revalidation_options.function.description, null), "Next.js ISR Revalidation Function") + handler = coalesce(try(var.revalidation_options.function.handler, null), "index.handler") + runtime = coalesce(try(var.revalidation_options.function.runtime, null), "nodejs18.x") + architectures = coalesce(try(var.revalidation_options.function.architectures, null), ["arm64"]) + memory_size = coalesce(try(var.revalidation_options.function.memory_size, null), 128) + timeout = coalesce(try(var.revalidation_options.function.timeout, null), 30) + publish = coalesce(try(var.revalidation_options.function.publish, null), false) + dead_letter_config = try(var.revalidation_options.function.dead_letter_config, null) + reserved_concurrent_executions = coalesce(try(var.revalidation_options.function.reserved_concurrent_executions, null), 3) + code_signing_config = try(var.revalidation_options.function.code_signing_config, null) + } + + log_group = { + retention_in_days = coalesce(try(var.revalidation_options.log_group.retention_in_days, null), 365) + kms_key_id = try(var.revalidation_options.log_group.kms_key_id, null) + } + + networking = { + vpc_id = try(var.revalidation_options.networking.vpc_id, null) + subnet_ids = coalesce(try(var.revalidation_options.networking.subnet_ids, null), []) + security_group_ingress_rules = coalesce(try(var.revalidation_options.networking.sg_ingress_rules, null), []) + security_group_egress_rules = coalesce(try(var.revalidation_options.networking.sg_egress_rules, null), []) + } + + environment_variables = coalesce(try(var.revalidation_options.environment_variables, null), {}) + + iam_policy_statements = concat([ + { + effect = "Allow" + actions = ["sqs:ReceiveMessage", "sqs:DeleteMessage", "sqs:GetQueueAttributes"] + resources = [module.revalidation_queue.queue.arn] + }, + { + effect = "Allow" + actions = ["kms:Decrypt", "kms:DescribeKey"] + resources = [module.revalidation_queue.queue_kms_key.arn] + } + ], coalesce(try(var.revalidation_options.iam_policy, null), [])) + } + + /** + * Warmer Function Options + **/ + warmer_options = { + package = { + source_dir = coalesce(try(var.warmer_options.package.source_dir, null), "${local.opennext_abs_path}/warmer-function/") + output_dir = coalesce(try(var.warmer_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") + } + + function = { + function_name = try(var.warmer_options.function.function_name, null) + description = coalesce(try(var.warmer_options.function.description, null), "Next.js Warmer Function") + handler = coalesce(try(var.warmer_options.function.handler, null), "index.handler") + runtime = coalesce(try(var.warmer_options.function.runtime, null), "nodejs18.x") + architectures = coalesce(try(var.warmer_options.function.architectures, null), ["arm64"]) + memory_size = coalesce(try(var.warmer_options.function.memory_size, null), 128) + timeout = coalesce(try(var.warmer_options.function.timeout, null), 30) + publish = coalesce(try(var.warmer_options.function.publish, null), false) + dead_letter_config = try(var.warmer_options.function.dead_letter_config, null) + reserved_concurrent_executions = coalesce(try(var.warmer_options.function.reserved_concurrent_executions, null), 3) + code_signing_config = try(var.warmer_options.function.code_signing_config, null) + } + + log_group = { + retention_in_days = coalesce(try(var.warmer_options.log_group.retention_in_days, null), 365) + kms_key_id = try(var.warmer_options.log_group.kms_key_id, null) + } + + networking = { + vpc_id = try(var.warmer_options.networking.vpc_id, null) + subnet_ids = coalesce(try(var.warmer_options.networking.subnet_ids, null), []) + security_group_ingress_rules = coalesce(try(var.warmer_options.networking.sg_ingress_rules, null), []) + security_group_egress_rules = coalesce(try(var.warmer_options.networking.sg_egress_rules, null), []) + } + + environment_variables = merge({ + FUNCTION_NAME = module.server_function.lambda_function.function_name, + CONCURRENCY = 1 + }, coalesce(try(var.warmer_options.environment_variables, null), {})) + + iam_policy_statements = concat([ + { + effect = "Allow" + actions = ["lambda:InvokeFunction"] + resources = [module.server_function.lambda_function.arn] + } + ], coalesce(try(var.warmer_options.iam_policy, null), [])) + } +} diff --git a/main.tf b/main.tf index bb27a09..d31053b 100644 --- a/main.tf +++ b/main.tf @@ -9,15 +9,6 @@ terraform { } } -provider "aws" { - alias = "global" - region = "us-east-1" -} - -locals { - opennext_abs_path = "${abspath(path.root)}/${var.opennext_build_path}" -} - data "aws_caller_identity" "current" {} data "aws_region" "current" {} @@ -25,7 +16,8 @@ data "aws_region" "current" {} * Assets & Cache S3 Bucket **/ module "assets" { - source = "./modules/opennext-assets" + source = "./modules/opennext-assets" + default_tags = var.default_tags prefix = "${var.prefix}-assets" assets_path = "${local.opennext_abs_path}/assets" @@ -37,59 +29,9 @@ module "assets" { /** * Next.js Server Function **/ -locals { - server_options = { - package = { - source_dir = coalesce(try(var.server_options.package.source_dir, null), "${local.opennext_abs_path}/server-function/") - output_dir = coalesce(try(var.server_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") - } - - function = { - function_name = try(var.server_options.function.function_name, null) - description = coalesce(try(var.server_options.function.description, null), "Next.js Server") - handler = coalesce(try(var.server_options.function.handler, null), "index.handler") - runtime = coalesce(try(var.server_options.function.runtime, null), "nodejs18.x") - architectures = coalesce(try(var.server_options.function.architectures, null), ["arm64"]) - memory_size = coalesce(try(var.server_options.function.memory_size, null), 1024) - timeout = coalesce(try(var.server_options.function.timeout, null), 30) - publish = coalesce(try(var.server_options.function.publish, null), true) - dead_letter_config = try(var.server_options.function.dead_letter_config, null) - reserved_concurrent_executions = coalesce(try(var.server_options.function.reserved_concurrent_executions, null), 10) - code_signing_config = try(var.server_options.function.code_signing_config, null) - } - - networking = { - vpc_id = try(var.server_options.networking.vpc_id, null) - subnet_ids = coalesce(try(var.server_options.networking.subnet_ids, null), []) - security_group_ingress_rules = coalesce(try(var.server_options.networking.sg_ingress_rules, null), []) - security_group_egress_rules = coalesce(try(var.server_options.networking.sg_egress_rules, null), []) - } - - environment_variables = merge({ - CACHE_BUCKET_NAME = module.assets.assets_bucket.bucket - CACHE_BUCKET_KEY_PREFIX = "cache" - CACHE_BUCKET_REGION = data.aws_region.current.name - REVALIDATION_QUEUE_URL = module.revalidation_queue.queue.url - REVALIDATION_QUEUE_REGION = data.aws_region.current.name - }, coalesce(try(var.server_options.environment_variables, null), {})) - - iam_policy_statements = concat([ - { - effect = "Allow" - actions = ["s3:GetObject", "s3:PutObject", "s3:ListObjects"] - resources = [module.assets.assets_bucket.arn, "${module.assets.assets_bucket.arn}/*"] - }, - { - effect = "Allow" - actions = ["sqs:SendMessage"] - resources = [module.revalidation_queue.queue.arn] - } - ], coalesce(try(var.server_options.iam_policy, null), [])) - } -} - module "server_function" { - source = "./modules/opennext-lambda" + source = "./modules/opennext-lambda" + default_tags = var.default_tags prefix = "${var.prefix}-nextjs-server" @@ -104,6 +46,8 @@ module "server_function" { dead_letter_config = local.server_options.function.dead_letter_config reserved_concurrent_executions = local.server_options.function.reserved_concurrent_executions code_signing_config = local.server_options.function.code_signing_config + log_group = local.server_options.log_group + source_dir = local.server_options.package.source_dir output_dir = local.server_options.package.output_dir @@ -121,51 +65,9 @@ module "server_function" { /** * Image Optimization Function **/ -locals { - image_optimization_options = { - package = { - source_dir = coalesce(try(var.image_optimization_options.package.source_dir, null), "${local.opennext_abs_path}/image-optimization-function/") - output_dir = coalesce(try(var.image_optimization_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") - } - - function = { - function_name = try(var.image_optimization_options.function.function_name, null) - description = coalesce(try(var.image_optimization_options.function.description, null), "Next.js Image Optimization") - handler = coalesce(try(var.image_optimization_options.function.handler, null), "index.handler") - runtime = coalesce(try(var.image_optimization_options.function.runtime, null), "nodejs18.x") - architectures = coalesce(try(var.image_optimization_options.function.architectures, null), ["arm64"]) - memory_size = coalesce(try(var.image_optimization_options.function.memory_size, null), 512) - timeout = coalesce(try(var.image_optimization_options.function.timeout, null), 30) - publish = coalesce(try(var.image_optimization_options.function.publish, null), false) - dead_letter_config = try(var.image_optimization_options.function.dead_letter_config, null) - reserved_concurrent_executions = coalesce(try(var.image_optimization_options.function.reserved_concurrent_executions, null), 3) - code_signing_config = try(var.image_optimization_options.function.code_signing_config, null) - } - - networking = { - vpc_id = try(var.image_optimization_options.networking.vpc_id, null) - subnet_ids = coalesce(try(var.image_optimization_options.networking.subnet_ids, null), []) - security_group_ingress_rules = coalesce(try(var.image_optimization_options.networking.sg_ingress_rules, null), []) - security_group_egress_rules = coalesce(try(var.image_optimization_options.networking.sg_egress_rules, null), []) - } - - environment_variables = merge({ - BUCKET_NAME = module.assets.assets_bucket.bucket, - BUCKET_KEY_PREFIX = "assets" - }, coalesce(try(var.image_optimization_options.environment_variables, null), {})) - - iam_policy_statements = concat([ - { - effect = "Allow" - actions = ["s3:GetObject"] - resources = [module.assets.assets_bucket.arn, "${module.assets.assets_bucket.arn}/*"] - } - ], coalesce(try(var.image_optimization_options.iam_policy, null), [])) - } -} - module "image_optimization_function" { - source = "./modules/opennext-lambda" + source = "./modules/opennext-lambda" + default_tags = var.default_tags prefix = "${var.prefix}-nextjs-image-optimization" @@ -180,6 +82,7 @@ module "image_optimization_function" { dead_letter_config = local.image_optimization_options.function.dead_letter_config reserved_concurrent_executions = local.image_optimization_options.function.reserved_concurrent_executions code_signing_config = local.image_optimization_options.function.code_signing_config + log_group = local.image_optimization_options.log_group source_dir = local.image_optimization_options.package.source_dir output_dir = local.image_optimization_options.package.output_dir @@ -196,48 +99,9 @@ module "image_optimization_function" { /** * ISR Revalidation Function **/ -locals { - revalidation_options = { - package = { - source_dir = coalesce(try(var.revalidation_options.package.source_dir, null), "${local.opennext_abs_path}/revalidation-function/") - output_dir = coalesce(try(var.revalidation_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") - } - - function = { - function_name = try(var.revalidation_options.function.function_name, null) - description = coalesce(try(var.revalidation_options.function.description, null), "Next.js ISR Revalidation Function") - handler = coalesce(try(var.revalidation_options.function.handler, null), "index.handler") - runtime = coalesce(try(var.revalidation_options.function.runtime, null), "nodejs18.x") - architectures = coalesce(try(var.revalidation_options.function.architectures, null), ["arm64"]) - memory_size = coalesce(try(var.revalidation_options.function.memory_size, null), 128) - timeout = coalesce(try(var.revalidation_options.function.timeout, null), 30) - publish = coalesce(try(var.revalidation_options.function.publish, null), false) - dead_letter_config = try(var.revalidation_options.function.dead_letter_config, null) - reserved_concurrent_executions = coalesce(try(var.revalidation_options.function.reserved_concurrent_executions, null), 3) - code_signing_config = try(var.revalidation_options.function.code_signing_config, null) - } - - networking = { - vpc_id = try(var.revalidation_options.networking.vpc_id, null) - subnet_ids = coalesce(try(var.revalidation_options.networking.subnet_ids, null), []) - security_group_ingress_rules = coalesce(try(var.revalidation_options.networking.sg_ingress_rules, null), []) - security_group_egress_rules = coalesce(try(var.revalidation_options.networking.sg_egress_rules, null), []) - } - - environment_variables = coalesce(try(var.revalidation_options.environment_variables, null), {}) - - iam_policy_statements = concat([ - { - effect = "Allow" - actions = ["sqs:ReceiveMessage", "sqs:DeleteMessage", "sqs:GetQueueAttributes"] - resources = [module.revalidation_queue.queue.arn] - } - ], coalesce(try(var.revalidation_options.iam_policy, null), [])) - } -} - module "revalidation_function" { - source = "./modules/opennext-lambda" + source = "./modules/opennext-lambda" + default_tags = var.default_tags prefix = "${var.prefix}-nextjs-revalidation" @@ -252,6 +116,7 @@ module "revalidation_function" { dead_letter_config = local.revalidation_options.function.dead_letter_config reserved_concurrent_executions = local.revalidation_options.function.reserved_concurrent_executions code_signing_config = local.revalidation_options.function.code_signing_config + log_group = local.revalidation_options.log_group source_dir = local.revalidation_options.package.source_dir output_dir = local.revalidation_options.package.output_dir @@ -269,8 +134,9 @@ module "revalidation_function" { * ISR Revalidation Queue **/ module "revalidation_queue" { - source = "./modules/opennext-revalidation-queue" - prefix = "${var.prefix}-revalidation-queue" + source = "./modules/opennext-revalidation-queue" + prefix = "${var.prefix}-revalidation-queue" + default_tags = var.default_tags aws_account_id = data.aws_caller_identity.current.account_id revalidation_function_arn = module.revalidation_function.lambda_function.arn @@ -279,51 +145,10 @@ module "revalidation_queue" { /** * Warmer Function **/ -locals { - warmer_options = { - package = { - source_dir = coalesce(try(var.warmer_options.package.source_dir, null), "${local.opennext_abs_path}/warmer-function/") - output_dir = coalesce(try(var.warmer_options.package.output_dir, null), "${local.opennext_abs_path}/.build/") - } - - function = { - function_name = try(var.warmer_options.function.function_name, null) - description = coalesce(try(var.warmer_options.function.description, null), "Next.js Warmer Function") - handler = coalesce(try(var.warmer_options.function.handler, null), "index.handler") - runtime = coalesce(try(var.warmer_options.function.runtime, null), "nodejs18.x") - architectures = coalesce(try(var.warmer_options.function.architectures, null), ["arm64"]) - memory_size = coalesce(try(var.warmer_options.function.memory_size, null), 128) - timeout = coalesce(try(var.warmer_options.function.timeout, null), 30) - publish = coalesce(try(var.warmer_options.function.publish, null), false) - dead_letter_config = try(var.warmer_options.function.dead_letter_config, null) - reserved_concurrent_executions = coalesce(try(var.warmer_options.function.reserved_concurrent_executions, null), 3) - code_signing_config = try(var.warmer_options.function.code_signing_config, null) - } - - networking = { - vpc_id = try(var.warmer_options.networking.vpc_id, null) - subnet_ids = coalesce(try(var.warmer_options.networking.subnet_ids, null), []) - security_group_ingress_rules = coalesce(try(var.warmer_options.networking.sg_ingress_rules, null), []) - security_group_egress_rules = coalesce(try(var.warmer_options.networking.sg_egress_rules, null), []) - } - - environment_variables = merge({ - FUNCTION_NAME = module.server_function.lambda_function.function_name, - CONCURRENCY = 1 - }, coalesce(try(var.warmer_options.environment_variables, null), {})) - - iam_policy_statements = concat([ - { - effect = "Allow" - actions = ["lambda:InvokeFunction"] - resources = [module.server_function.lambda_function.arn] - } - ], coalesce(try(var.warmer_options.iam_policy, null), [])) - } -} module "warmer_function" { - source = "./modules/opennext-lambda" + source = "./modules/opennext-lambda" + default_tags = var.default_tags prefix = "${var.prefix}-nextjs-warmer" create_eventbridge_scheduled_rule = true @@ -340,6 +165,7 @@ module "warmer_function" { dead_letter_config = local.warmer_options.function.dead_letter_config reserved_concurrent_executions = local.warmer_options.function.reserved_concurrent_executions code_signing_config = local.warmer_options.function.code_signing_config + log_group = local.warmer_options.log_group source_dir = local.warmer_options.package.source_dir output_dir = local.warmer_options.package.output_dir @@ -357,7 +183,8 @@ module "warmer_function" { * CloudFront -> CloudWatch Logs **/ module "cloudfront_logs" { - source = "./modules/cloudfront-logs" + source = "./modules/cloudfront-logs" + default_tags = var.default_tags log_group_name = "${var.prefix}-cloudfront-logs" log_bucket_name = "${var.prefix}-cloudfront-logs" @@ -367,50 +194,10 @@ module "cloudfront_logs" { /** * Next.js CloudFront Distribution **/ -locals { - cloudfront = { - aliases = var.cloudfront.aliases - acm_certificate_arn = var.cloudfront.acm_certificate_arn - assets_paths = coalesce(var.cloudfront.assets_paths, []) - custom_headers = coalesce(var.cloudfront.custom_headers, []) - cors = merge({ - allow_credentials = false, - allow_headers = ["*"], - allow_methods = ["ALL"], - allow_origins = ["*"], - origin_override = true - }, var.cloudfront.cors) - hsts = merge({ - access_control_max_age_sec = 31536000 - include_subdomains = true - override = true - preload = true - }, var.cloudfront.hsts) - waf_logging_configuration = var.cloudfront.waf_logging_configuration - cache_policy = { - default_ttl = coalesce(try(var.cloudfront.cache_policy.default_ttl, null), 0) - min_ttl = coalesce(try(var.cloudfront.cache_policy.min_ttl, null), 0) - max_ttl = coalesce(try(var.cloudfront.cache_policy.max_ttl, null), 31536000) - cookies_config = merge({ - cookie_behavior = "all" - }, try(var.cloudfront.cache_policy.cookies_config, {})) - headers_config = merge({ - header_behavior = "whitelist", - items = [] - }, try(var.cloudfront.cache_policy.headers_config, {})) - query_strings_config = merge({ - query_string_behavior = "all", - items = [] - }, try(var.cloudfront.cache_policy.query_strings_config, {})) - } - origin_request_policy = try(var.cloudfront.origin_request_policy, null) - } -} - - module "cloudfront" { - source = "./modules/opennext-cloudfront" - prefix = "${var.prefix}-cloudfront" + source = "./modules/opennext-cloudfront" + prefix = "${var.prefix}-cloudfront" + default_tags = var.default_tags logging_bucket_domain_name = module.cloudfront_logs.logs_s3_bucket.bucket_regional_domain_name assets_origin_access_identity = module.assets.cloudfront_origin_access_identity.cloudfront_access_identity_path @@ -425,6 +212,7 @@ module "cloudfront" { acm_certificate_arn = local.cloudfront.acm_certificate_arn assets_paths = local.cloudfront.assets_paths custom_headers = local.cloudfront.custom_headers + geo_restriction = local.cloudfront.geo_restriction cors = local.cloudfront.cors hsts = local.cloudfront.hsts waf_logging_configuration = local.cloudfront.waf_logging_configuration diff --git a/modules/cloudfront-logs/kms.tf b/modules/cloudfront-logs/kms.tf index 5b648bd..25c59b0 100644 --- a/modules/cloudfront-logs/kms.tf +++ b/modules/cloudfront-logs/kms.tf @@ -4,6 +4,7 @@ resource "aws_kms_key" "cloudwatch_logs_key" { description = "KMS Key for ${var.log_group_name} log group" deletion_window_in_days = 10 policy = data.aws_iam_policy_document.cloudwatch_logs_key_policy[0].json + enable_key_rotation = true } data "aws_iam_policy_document" "cloudwatch_logs_key_policy" { @@ -33,7 +34,7 @@ data "aws_iam_policy_document" "cloudwatch_logs_key_policy" { principals { type = "Service" - identifiers = ["logs.amazonaws.com"] + identifiers = ["logs.amazonaws.com", "delivery.logs.amazonaws.com"] } } } diff --git a/modules/cloudfront-logs/main.tf b/modules/cloudfront-logs/main.tf index 8cc753f..03e16a2 100644 --- a/modules/cloudfront-logs/main.tf +++ b/modules/cloudfront-logs/main.tf @@ -13,3 +13,18 @@ terraform { } } } + +provider "aws" { + default_tags { + tags = var.default_tags + } +} + +provider "aws" { + alias = "global" + region = "us-east-1" + + default_tags { + tags = var.default_tags + } +} diff --git a/modules/cloudfront-logs/variables.tf b/modules/cloudfront-logs/variables.tf index 6907747..c95a0e9 100644 --- a/modules/cloudfront-logs/variables.tf +++ b/modules/cloudfront-logs/variables.tf @@ -1,3 +1,10 @@ +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + + variable "log_group_name" { type = string } diff --git a/modules/opennext-assets/main.tf b/modules/opennext-assets/main.tf index fe30da3..eb1942e 100644 --- a/modules/opennext-assets/main.tf +++ b/modules/opennext-assets/main.tf @@ -8,3 +8,9 @@ terraform { } } } + +provider "aws" { + default_tags { + tags = var.default_tags + } +} diff --git a/modules/opennext-assets/s3.tf b/modules/opennext-assets/s3.tf index 4055f5a..7c0668d 100644 --- a/modules/opennext-assets/s3.tf +++ b/modules/opennext-assets/s3.tf @@ -49,7 +49,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "assets" { rule { apply_server_side_encryption_by_default { - sse_algorithm = "AES256" + sse_algorithm = "AES256" } } } @@ -165,6 +165,22 @@ data "aws_iam_policy_document" "read_assets_bucket" { identifiers = [var.server_function_role_arn] } } + statement { + effect = "Deny" + actions = ["s3:*"] + resources = [aws_s3_bucket.assets.arn, "${aws_s3_bucket.assets.arn}/*"] + + condition { + test = "Bool" + values = ["false"] + variable = "aws:SecureTransport" + } + + principals { + type = "*" + identifiers = ["*"] + } + } } # Static Assets diff --git a/modules/opennext-assets/variables.tf b/modules/opennext-assets/variables.tf index 90a6ce3..4049bd5 100644 --- a/modules/opennext-assets/variables.tf +++ b/modules/opennext-assets/variables.tf @@ -3,6 +3,14 @@ variable "prefix" { description = "Prefix for created resource IDs" } +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + + + variable "assets_path" { type = string description = "The path of the open-next static assets" diff --git a/modules/opennext-cloudfront/cloudfront.tf b/modules/opennext-cloudfront/cloudfront.tf index a63d635..36a1a6c 100644 --- a/modules/opennext-cloudfront/cloudfront.tf +++ b/modules/opennext-cloudfront/cloudfront.tf @@ -36,8 +36,12 @@ resource "aws_cloudfront_origin_request_policy" "origin_request_policy" { headers_config { header_behavior = var.origin_request_policy.headers_config.header_behavior + headers { - items = var.origin_request_policy.headers_config.items + items = concat( + ["accept", "rsc", "next-router-prefetch", "next-router-state-tree", "x-prerender-revalidate"], + coalesce(var.origin_request_policy.headers_config.items, []) + ) } } @@ -56,8 +60,10 @@ resource "aws_cloudfront_cache_policy" "cache_policy" { min_ttl = var.cache_policy.min_ttl max_ttl = var.cache_policy.max_ttl - parameters_in_cache_key_and_forwarded_to_origin { + enable_accept_encoding_brotli = var.cache_policy.enable_accept_encoding_brotli + enable_accept_encoding_gzip = var.cache_policy.enable_accept_encoding_gzip + cookies_config { cookie_behavior = var.cache_policy.cookies_config.cookie_behavior @@ -75,7 +81,7 @@ resource "aws_cloudfront_cache_policy" "cache_policy" { headers { items = concat( - ["accept", "rsc", "next-router-prefetch", "next-router-state-tree"], + ["accept", "rsc", "next-router-prefetch", "next-router-state-tree", "x-prerender-revalidate"], coalesce(var.cache_policy.headers_config.items, []) ) } @@ -166,9 +172,8 @@ resource "aws_cloudfront_distribution" "distribution" { restrictions { geo_restriction { - restriction_type = "whitelist" - # TODO: Remove US location after implementing GitHub Self-Hosted runners - locations = ["GB", "US"] + restriction_type = var.geo_restriction.restriction_type + locations = var.geo_restriction.locations } } diff --git a/modules/opennext-cloudfront/main.tf b/modules/opennext-cloudfront/main.tf index 11d541a..f3b4c8d 100644 --- a/modules/opennext-cloudfront/main.tf +++ b/modules/opennext-cloudfront/main.tf @@ -9,7 +9,17 @@ terraform { } } +provider "aws" { + default_tags { + tags = var.default_tags + } +} + provider "aws" { alias = "global" region = "us-east-1" + + default_tags { + tags = var.default_tags + } } diff --git a/modules/opennext-cloudfront/variables.tf b/modules/opennext-cloudfront/variables.tf index 1bcac5d..d3cea54 100644 --- a/modules/opennext-cloudfront/variables.tf +++ b/modules/opennext-cloudfront/variables.tf @@ -3,6 +3,13 @@ variable "prefix" { description = "Prefix for created resource IDs" } +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + + variable "acm_certificate_arn" { type = string } @@ -128,9 +135,11 @@ variable "origin_request_policy" { variable "cache_policy" { type = object({ - default_ttl = number - min_ttl = number - max_ttl = number + default_ttl = number + min_ttl = number + max_ttl = number + enable_accept_encoding_gzip = bool + enable_accept_encoding_brotli = bool cookies_config = object({ cookie_behavior = string items = optional(list(string)) @@ -145,3 +154,11 @@ variable "cache_policy" { }) }) } + +variable "geo_restriction" { + description = "The georestriction configuration for the CloudFront distribution" + type = object({ + restriction_type = string + locations = list(string) + }) +} diff --git a/modules/opennext-cloudfront/waf.tf b/modules/opennext-cloudfront/waf.tf index 7d452ba..0df510a 100644 --- a/modules/opennext-cloudfront/waf.tf +++ b/modules/opennext-cloudfront/waf.tf @@ -21,13 +21,21 @@ resource "aws_wafv2_web_acl" "cloudfront_waf" { managed_rule_group_statement { name = "AWSManagedRulesCommonRuleSet" vendor_name = "AWS" + + rule_action_override { + action_to_use { + count {} + } + + name = "NoUserAgent_HEADER" + } } } visibility_config { cloudwatch_metrics_enabled = true metric_name = "${var.prefix}-WAF-AWSManagedRulesCommonRuleSet" - sampled_requests_enabled = false + sampled_requests_enabled = true } } diff --git a/modules/opennext-lambda/eventbridge.tf b/modules/opennext-lambda/eventbridge.tf index 93ad3e0..cdd64cb 100644 --- a/modules/opennext-lambda/eventbridge.tf +++ b/modules/opennext-lambda/eventbridge.tf @@ -1,7 +1,7 @@ resource "aws_cloudwatch_event_rule" "scheduled_lambda_event_rule" { count = var.create_eventbridge_scheduled_rule ? 1 : 0 - name = "${var.prefix}-scheduled-lambda-event-rule" + name = "${var.prefix}-scheduled-rule" schedule_expression = var.schedule_expression } diff --git a/modules/opennext-lambda/lambda.tf b/modules/opennext-lambda/lambda.tf index 29643a1..d4afb07 100644 --- a/modules/opennext-lambda/lambda.tf +++ b/modules/opennext-lambda/lambda.tf @@ -135,3 +135,10 @@ resource "aws_lambda_permission" "allow_execution_from_eventbridge" { function_name = aws_lambda_function.function.function_name principal = "events.amazonaws.com" } + +resource "aws_cloudwatch_log_group" "function_log_group" { + name = "/aws/lambda/${aws_lambda_function.function.function_name}" + skip_destroy = true + retention_in_days = var.log_group.retention_in_days + kms_key_id = var.log_group.kms_key_id +} diff --git a/modules/opennext-lambda/main.tf b/modules/opennext-lambda/main.tf index 86ea302..4289bb0 100644 --- a/modules/opennext-lambda/main.tf +++ b/modules/opennext-lambda/main.tf @@ -12,3 +12,9 @@ terraform { } } } + +provider "aws" { + default_tags { + tags = var.default_tags + } +} diff --git a/modules/opennext-lambda/outputs.tf b/modules/opennext-lambda/outputs.tf index c4f9d3d..a4fc086 100644 --- a/modules/opennext-lambda/outputs.tf +++ b/modules/opennext-lambda/outputs.tf @@ -17,3 +17,7 @@ output "cloudwatch_event_target" { output "lambda_role" { value = aws_iam_role.lambda_role } + +output "log_group" { + value = aws_cloudwatch_log_group.function_log_group +} diff --git a/modules/opennext-lambda/variables.tf b/modules/opennext-lambda/variables.tf index e95da0c..40c3422 100644 --- a/modules/opennext-lambda/variables.tf +++ b/modules/opennext-lambda/variables.tf @@ -6,6 +6,14 @@ variable "prefix" { description = "Prefix for created resource IDs" } +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + + + /** * Create Toggles **/ @@ -92,6 +100,14 @@ variable "kms_key_arn" { default = null } +variable "log_group" { + description = "Options passed to the CloudWatch log group for the Lambda function" + type = object({ + retention_in_days = number + kms_key_id = string + }) +} + variable "code_signing_config" { description = "Code Signing Config for the Lambda Function" type = object({ diff --git a/modules/opennext-revalidation-queue/kms.tf b/modules/opennext-revalidation-queue/kms.tf index 1660aae..8d6e247 100644 --- a/modules/opennext-revalidation-queue/kms.tf +++ b/modules/opennext-revalidation-queue/kms.tf @@ -10,6 +10,7 @@ resource "aws_kms_key" "revalidation_queue_key" { deletion_window_in_days = 10 policy = data.aws_iam_policy_document.revalidation_queue_key_policy[0].json + enable_key_rotation = true } data "aws_iam_policy_document" "revalidation_queue_key_policy" { @@ -29,17 +30,17 @@ data "aws_iam_policy_document" "revalidation_queue_key_policy" { statement { effect = "Allow" actions = [ - "kms:Encrypt*", - "kms:Decrypt*", + "kms:Encrypt", + "kms:Decrypt", "kms:ReEncrypt*", "kms:GenerateDataKey*", - "kms:Describe*" + "kms:DescribeKey" ] resources = ["*"] principals { type = "Service" - identifiers = ["lambda.amazonaws.com", "sqs.amazonaws.com"] + identifiers = ["lambda.amazonaws.com", "edgelambda.amazonaws.com", "sqs.amazonaws.com"] } } } diff --git a/modules/opennext-revalidation-queue/main.tf b/modules/opennext-revalidation-queue/main.tf index fe30da3..eb1942e 100644 --- a/modules/opennext-revalidation-queue/main.tf +++ b/modules/opennext-revalidation-queue/main.tf @@ -8,3 +8,9 @@ terraform { } } } + +provider "aws" { + default_tags { + tags = var.default_tags + } +} diff --git a/modules/opennext-revalidation-queue/variables.tf b/modules/opennext-revalidation-queue/variables.tf index 95e2805..3ace808 100644 --- a/modules/opennext-revalidation-queue/variables.tf +++ b/modules/opennext-revalidation-queue/variables.tf @@ -3,6 +3,12 @@ variable "prefix" { description = "Prefix for created resource IDs" } +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + variable "aws_account_id" { type = string description = "The account ID of the current AWS account" diff --git a/scripts/cloc-repository.sh b/scripts/cloc-repository.sh new file mode 100755 index 0000000..4eb71db --- /dev/null +++ b/scripts/cloc-repository.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +set -e + +# Count lines of code of this repository. +# +# Usage: +# $ ./cloc-repository.sh +# +# Options: +# VERBOSE=true # Show all the executed commands, default is `false` +# FORMAT=[format] # Set output format [default,cloc-xml,sloccount,json], default is `default` + +# ============================================================================== + +# SEE: https://github.com/make-ops-tools/gocloc/pkgs/container/gocloc, use the `linux/amd64` os/arch +image_version=latest@sha256:6888e62e9ae693c4ebcfed9f1d86c70fd083868acb8815fe44b561b9a73b5032 + +# ============================================================================== + +function main() { + + docker run --rm --platform linux/amd64 \ + --volume=$PWD:/workdir \ + ghcr.io/make-ops-tools/gocloc:$image_version \ + --output-type=${FORMAT:-default} . +} + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "$VERBOSE" && set -x + +main $* + +exit 0 diff --git a/.gitleaks.toml b/scripts/config/.gitleaks.toml similarity index 56% rename from .gitleaks.toml rename to scripts/config/.gitleaks.toml index e8512d3..fba2604 100644 --- a/.gitleaks.toml +++ b/scripts/config/.gitleaks.toml @@ -1,4 +1,7 @@ # SEE: https://github.com/gitleaks/gitleaks/#configuration [extend] -useDefault = true # SEE: https://github.com/gitleaks/gitleaks/blob/master/config/gitleaks.toml \ No newline at end of file +useDefault = true # SEE: https://github.com/gitleaks/gitleaks/blob/master/config/gitleaks.toml + +[allowlist] +files = ['.terraform.lock.hcl', 'poetry.lock', 'yarn.lock'] diff --git a/scripts/config/.grype.yaml b/scripts/config/.grype.yaml new file mode 100644 index 0000000..80c752e --- /dev/null +++ b/scripts/config/.grype.yaml @@ -0,0 +1,19 @@ +# If using SBOM input, automatically generate CPEs when packages have none +add-cpes-if-none: true + +# ignore: +# # This is the full set of supported rule fields: +# - vulnerability: CVE-2008-4318 +# fix-state: unknown +# package: +# name: libcurl +# version: 1.5.1 +# type: npm +# location: "/usr/local/lib/node_modules/**" + +# # We can make rules to match just by vulnerability ID: +# - vulnerability: CVE-2014-54321 + +# # ...or just by a single package field: +# - package: +# type: gem diff --git a/scripts/config/.pre-commit.yaml b/scripts/config/.pre-commit.yaml new file mode 100644 index 0000000..8e24270 --- /dev/null +++ b/scripts/config/.pre-commit.yaml @@ -0,0 +1,39 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: local + hooks: + - id: scan-secrets + name: Scan Secrets + entry: ./scripts/githooks/scan-secrets.sh + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-file-format + name: Check File Format + entry: ./scripts/githooks/check-file-format.sh + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-markdown-format + name: Check Markdown Format + entry: ./scripts/githooks/check-markdown-format.sh + language: script + pass_filenames: false +- repo: local + hooks: + - id: check-terraform-format + name: Check Terraform Format + entry: ./scripts/githooks/check-terraform-format.sh + language: script + pass_filenames: false +- repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.81.0 + hooks: + - id: terraform_tflint diff --git a/scripts/config/.syft.yaml b/scripts/config/.syft.yaml new file mode 100644 index 0000000..e9f5f58 --- /dev/null +++ b/scripts/config/.syft.yaml @@ -0,0 +1,83 @@ +# a list of globs to exclude from scanning. same as --exclude ; for example: +# exclude: +# - "/etc/**" +# - "./out/**/*.json" +exclude: + - ./.git/** + +# maximum number of workers used to process the list of package catalogers in parallel +parallelism: 3 + +# cataloging packages is exposed through the packages and power-user subcommands +package: + # search within archives that do contain a file index to search against (zip) + # note: for now this only applies to the java package cataloger + # SYFT_PACKAGE_SEARCH_INDEXED_ARCHIVES env var + search-indexed-archives: true + # search within archives that do not contain a file index to search against (tar, tar.gz, tar.bz2, etc) + # note: enabling this may result in a performance impact since all discovered compressed tars will be decompressed + # note: for now this only applies to the java package cataloger + # SYFT_PACKAGE_SEARCH_UNINDEXED_ARCHIVES env var + search-unindexed-archives: true + cataloger: + # enable/disable cataloging of packages + # SYFT_PACKAGE_CATALOGER_ENABLED env var + enabled: true + # the search space to look for packages (options: all-layers, squashed) + # same as -s ; SYFT_PACKAGE_CATALOGER_SCOPE env var + scope: "squashed" + +# cataloging file contents is exposed through the power-user subcommand +file-contents: + cataloger: + # enable/disable cataloging of secrets + # SYFT_FILE_CONTENTS_CATALOGER_ENABLED env var + enabled: true + # the search space to look for secrets (options: all-layers, squashed) + # SYFT_FILE_CONTENTS_CATALOGER_SCOPE env var + scope: "squashed" + # skip searching a file entirely if it is above the given size (default = 1MB; unit = bytes) + # SYFT_FILE_CONTENTS_SKIP_FILES_ABOVE_SIZE env var + skip-files-above-size: 1048576 + # file globs for the cataloger to match on + # SYFT_FILE_CONTENTS_GLOBS env var + globs: [] + +# cataloging file metadata is exposed through the power-user subcommand +file-metadata: + cataloger: + # enable/disable cataloging of file metadata + # SYFT_FILE_METADATA_CATALOGER_ENABLED env var + enabled: true + # the search space to look for file metadata (options: all-layers, squashed) + # SYFT_FILE_METADATA_CATALOGER_SCOPE env var + scope: "squashed" + # the file digest algorithms to use when cataloging files (options: "sha256", "md5", "sha1") + # SYFT_FILE_METADATA_DIGESTS env var + digests: ["sha256"] + +# cataloging secrets is exposed through the power-user subcommand +secrets: + cataloger: + # enable/disable cataloging of secrets + # SYFT_SECRETS_CATALOGER_ENABLED env var + enabled: true + # the search space to look for secrets (options: all-layers, squashed) + # SYFT_SECRETS_CATALOGER_SCOPE env var + scope: "all-layers" + # show extracted secret values in the final JSON report + # SYFT_SECRETS_REVEAL_VALUES env var + reveal-values: false + # skip searching a file entirely if it is above the given size (default = 1MB; unit = bytes) + # SYFT_SECRETS_SKIP_FILES_ABOVE_SIZE env var + skip-files-above-size: 1048576 + # name-regex pairs to consider when searching files for secrets. Note: the regex must match single line patterns + # but may also have OPTIONAL multiline capture groups. Regexes with a named capture group of "value" will + # use the entire regex to match, but the secret value will be assumed to be entirely contained within the + # "value" named capture group. + additional-patterns: {} + # names to exclude from the secrets search, valid values are: "aws-access-key", "aws-secret-key", "pem-private-key", + # "docker-config-auth", and "generic-api-key". Note: this does not consider any names introduced in the + # "secrets.additional-patterns" config option. + # SYFT_SECRETS_EXCLUDE_PATTERN_NAMES env var + exclude-pattern-names: [] diff --git a/scripts/cve-scanner.sh b/scripts/cve-scanner.sh new file mode 100755 index 0000000..4f3d3b0 --- /dev/null +++ b/scripts/cve-scanner.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +set -e + +# Script to scan an SBOM file for CVEs (Common Vulnerabilities and Exposures). +# +# Usage: +# $ ./cve-scanner.sh +# +# Options: +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +# SEE: https://github.com/anchore/grype/pkgs/container/grype, use the `linux/amd64` os/arch +image_version=v0.63.1@sha256:124447c7abae54d6fdad2d3a18c9c71d88af46404c55437c3acbf6dde524c417 + +# ============================================================================== + +function main() { + + docker run --rm --platform linux/amd64 \ + --volume $PWD:/scan \ + ghcr.io/anchore/grype:$image_version \ + sbom:/scan/sbom-spdx.json \ + --config /scan/scripts/config/.grype.yaml \ + --output json \ + --file=/scan/cve-scan.json +} + +function is_arg_true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is_arg_true "$VERBOSE" && set -x + +main $* + +exit 0 diff --git a/scripts/githooks/editorconfig-pre-commit.sh b/scripts/githooks/check-file-format.sh similarity index 86% rename from scripts/githooks/editorconfig-pre-commit.sh rename to scripts/githooks/check-file-format.sh index 6da45a4..ba6b4e8 100755 --- a/scripts/githooks/editorconfig-pre-commit.sh +++ b/scripts/githooks/check-file-format.sh @@ -7,7 +7,7 @@ set +e # according to the style defined in the `.editorconfig` file. # # Usage: -# $ ./editorconfig-pre-commit.sh +# $ ./check-file-format.sh # # Options: # BRANCH_NAME=other-branch-than-main # Branch to compare with, default is `origin/main` @@ -26,8 +26,9 @@ set +e # ============================================================================== -exit_code=0 +# SEE: https://hub.docker.com/r/mstruebing/editorconfig-checker/tags, use the `linux/amd64` os/arch image_version=2.7.0@sha256:0f8f8dd4f393d29755bef2aef4391d37c34e358d676e9d66ce195359a9c72ef3 +exit_code=0 # ============================================================================== @@ -45,8 +46,8 @@ function main() { else # Check changed files only - changed_files=$(git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main}) - if [ -n "$changed_files" ]; then + files=$( (git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main}; git diff --name-only) | sort | uniq ) + if [ -n "$files" ]; then while read file; do docker run --rm --platform linux/amd64 \ --volume=$PWD:/check \ @@ -55,7 +56,7 @@ function main() { --exclude '.git/' \ "$file" [ $? != 0 ] && exit_code=1 ||: - done < <(echo "$changed_files") + done < <(echo "$files") fi fi diff --git a/scripts/githooks/markdown-pre-commit.sh b/scripts/githooks/check-markdown-format.sh similarity index 79% rename from scripts/githooks/markdown-pre-commit.sh rename to scripts/githooks/check-markdown-format.sh index 2b2609d..457076f 100755 --- a/scripts/githooks/markdown-pre-commit.sh +++ b/scripts/githooks/check-markdown-format.sh @@ -6,7 +6,7 @@ set -e #Β over changed files. # # Usage: -# $ ./markdown-pre-commit.sh +# $ ./check-markdown-format.sh # # Options: # BRANCH_NAME=other-branch-than-main # Branch to compare with, default is `origin/main` @@ -26,7 +26,8 @@ set -e # ============================================================================== -image_version=v0.34.0@sha256:230b1e0e0fa1c7dd6261e025cacf6761ac5ba3557a6a919eec910d731817ff28 +# SEE: https://github.com/igorshubovych/markdownlint-cli/pkgs/container/markdownlint-cli, use the `linux/amd64` os/arch +image_version=v0.35.0@sha256:4ec089301e2e3e1298424f4d2b5d9e18af3aa005402590770c339b6637100dc6 # ============================================================================== @@ -34,10 +35,10 @@ function main() { if is-arg-true "$ALL_FILES"; then # Check all files - files="*.md" + files="$(find ./ -type f -name "*.md")" else # Check changed files only - files="$(git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main} "*.md")" + files="$( (git diff --diff-filter=ACMRT --name-only ${BRANCH_NAME:-origin/main} "*.md"; git diff --name-only "*.md") | sort | uniq )" fi if [ -n "$files" ]; then diff --git a/scripts/githooks/check-terraform-format.sh b/scripts/githooks/check-terraform-format.sh new file mode 100755 index 0000000..68af555 --- /dev/null +++ b/scripts/githooks/check-terraform-format.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -e + +# Pre-commit git hook to check format Terraform code. +# +# Usage: +# $ ./check-terraform-format.sh +# +# Options: +# CHECK_ONLY=true # Do not format, run check only, default is `false` +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +versions=$(git rev-parse --show-toplevel)/.tool-versions +terraform_version=$(grep terraform $versions | cut -f2 -d' ') +image_version=${terraform_version:-latest} + +# ============================================================================== + +function main() { + + opts= + if is-arg-true "$CHECK_ONLY"; then + opts="-check" + fi + + docker run --rm --platform linux/amd64 \ + --volume=$PWD:/workdir \ + hashicorp/terraform:$image_version \ + fmt -recursive $opts +} + +function is-arg-true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is-arg-true "$VERBOSE" && set -x + +main $* + +exit 0 diff --git a/scripts/githooks/pre-commit b/scripts/githooks/pre-commit deleted file mode 100755 index ed98324..0000000 --- a/scripts/githooks/pre-commit +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -e - -# Pre-commit git hook runner - -# ============================================================================== - -project_dir=$(git rev-parse --show-toplevel) -cd $project_dir - -pre_commit_scripts=$(ls -1 $project_dir/scripts/githooks/*-pre-commit.sh 2> /dev/null) -for script in $pre_commit_scripts; do - printf "Running githook: $(echo $script | sed "s;$project_dir/;;g")\n" - $script "$@" -done -printf "Successfully run all githooks\n" diff --git a/scripts/githooks/secret-scan-pre-commit.sh b/scripts/githooks/scan-secrets.sh similarity index 73% rename from scripts/githooks/secret-scan-pre-commit.sh rename to scripts/githooks/scan-secrets.sh index b61e36e..b9eb760 100755 --- a/scripts/githooks/secret-scan-pre-commit.sh +++ b/scripts/githooks/scan-secrets.sh @@ -5,7 +5,7 @@ set -e # Pre-commit git hook to scan for secrets hardcoded in the codebase. # # Usage: -# $ ./secret-scan-pre-commit.sh +# $ ./scan-secrets.sh # # Options: # ALL_FILES=true # Scan whole git history or 'last-commit', default is `false` @@ -18,7 +18,8 @@ set -e # ============================================================================== -image_version=v8.16.3@sha256:05b48ff3f4fd7daa9487b42cbf9d576f2dc0dbe2551e3d0a8738e18ba2278091 +# SEE: https://github.com/gitleaks/gitleaks/pkgs/container/gitleaks, use the `linux/amd64` os/arch +image_version=v8.17.0@sha256:99e40155529614d09d264cc886c1326c9a4593ad851ccbeaaed8dcf03ff3d3d7 # ============================================================================== @@ -34,12 +35,17 @@ function main() { # Scan staged files only cmd="protect --source=/scan --verbose --staged" fi + # Include base line file if it exists + if [ -f $PWD/scripts/config/.gitleaks-baseline.json ]; then + cmd="$cmd --baseline-path /scan/scripts/config/.gitleaks-baseline.json" + fi docker run --rm --platform linux/amd64 \ --volume=$PWD:/scan \ --workdir=/scan \ ghcr.io/gitleaks/gitleaks:$image_version \ - $cmd + $cmd \ + --config /scan/scripts/config/.gitleaks.toml } function is_arg_true() { diff --git a/scripts/init.mk b/scripts/init.mk new file mode 100644 index 0000000..a213764 --- /dev/null +++ b/scripts/init.mk @@ -0,0 +1,80 @@ +# This file is part of the repository template project. Please, DO NOT edit this file. + +nodejs-install: # Install Node.js + make _install-dependency name="nodejs" + make _install-dependency name="yarn" + +python-install: # Install Python + make _install-dependency name="python" + make _install-dependency name="poetry" + +terraform-install: # Install Terraform + make _install-dependency name="terraform" + +githooks-install: # Install git hooks configured in this repository + make _install-dependency name="pre-commit" + pre-commit install \ + --config ./scripts/config/.pre-commit.yaml \ + --install-hooks + +githooks-run: # Run git hooks configured in this repository + pre-commit run \ + --config ./scripts/config/.pre-commit.yaml \ + --all-files + +asdf-install: # Install asdf from https://asdf-vm.com/ + if [ -d "${HOME}/.asdf" ]; then + ( cd "${HOME}/.asdf"; git pull ) + else + git clone --depth=1 https://github.com/asdf-vm/asdf.git "${HOME}/.asdf" ||: + fi + asdf plugin update --all + +_install-dependency: # Install asdf dependency - mandatory: name=[listed in the `./.tool-versions` file]; optional: version=[if not listed] + asdf plugin add ${name} ||: + asdf install ${name} $(or ${version},) + +clean:: # Remove all generated and temporary files + rm -rf \ + docs/diagrams/.*.bkp \ + docs/diagrams/.*.dtmp \ + cve-scan*.json \ + sbom-spdx*.json + +help: # List Makefile targets + awk 'BEGIN {FS = ":.*?# "} /^[ a-zA-Z0-9_-]+:.*? # / {printf "\033[36m%-41s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort + +list-variables: # List all the variables available to make + $(foreach v, $(sort $(.VARIABLES)), + $(if $(filter-out default automatic, $(origin $v)), + $(if $(and $(patsubst %_PASSWORD,,$v), $(patsubst %_PASS,,$v), $(patsubst %_KEY,,$v), $(patsubst %_SECRET,,$v)), + $(info $v=$($v) ($(value $v)) [$(flavor $v),$(origin $v)]), + $(info $v=****** (******) [$(flavor $v),$(origin $v)]) + ) + ) + ) + +.DEFAULT_GOAL := help +.EXPORT_ALL_VARIABLES: +.NOTPARALLEL: +.ONESHELL: +.PHONY: * +MAKEFLAGS := --no-print-director +SHELL := /bin/bash +ifeq (true, $(shell [[ "$(VERBOSE)" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo true)) + .SHELLFLAGS := -cex +else + .SHELLFLAGS := -ce +endif + +.SILENT: \ + _install-dependency \ + asdf-install \ + clean \ + githooks-install \ + githooks-run \ + help \ + list-variables \ + nodejs-install \ + python-install \ + terraform-install diff --git a/scripts/makefile/Makefile.init b/scripts/makefile/Makefile.init deleted file mode 100644 index a84a919..0000000 --- a/scripts/makefile/Makefile.init +++ /dev/null @@ -1,37 +0,0 @@ -config: githooks-install # Configure development environment - -githooks-install: # Install git hooks configured in this repository - echo "./scripts/githooks/pre-commit" > .git/hooks/pre-commit - chmod +x .git/hooks/pre-commit - -# ============================================================================== - -help: # List Makefile targets - @awk 'BEGIN {FS = ":.*?# "} /^[ a-zA-Z0-9_-]+:.*? # / {printf "\033[36m%-41s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort - -list-variables: # List all the variables available to make - @$(foreach v, $(sort $(.VARIABLES)), - $(if $(filter-out default automatic, $(origin $v)), - $(if $(and $(patsubst %_PASSWORD,,$v), $(patsubst %_PASS,,$v), $(patsubst %_KEY,,$v), $(patsubst %_SECRET,,$v)), - $(info $v=$($v) ($(value $v)) [$(flavor $v),$(origin $v)]), - $(info $v=****** (******) [$(flavor $v),$(origin $v)]) - ) - ) - ) - -.DEFAULT_GOAL := help -.EXPORT_ALL_VARIABLES: -.NOTPARALLEL: -.ONESHELL: -.PHONY: * -MAKEFLAGS := --no-print-director -SHELL := /bin/bash -ifeq (true, $(shell [[ "$(VERBOSE)" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$$ ]] && echo true)) - .SHELLFLAGS := -cex -else - .SHELLFLAGS := -ce -endif - -.SILENT: \ - config \ - githooks-install \ No newline at end of file diff --git a/scripts/sbom-generator.sh b/scripts/sbom-generator.sh new file mode 100755 index 0000000..ed9ecc0 --- /dev/null +++ b/scripts/sbom-generator.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +set -e + +# Script to generate SBOM (Software Bill of Materials) for the repository +# content and any artefact created by the CI/CD pipeline. +# +# Usage: +# $ ./generate-sbom.sh +# +# Options: +# VERBOSE=true # Show all the executed commands, default is `false` + +# ============================================================================== + +# SEE: https://github.com/anchore/syft/pkgs/container/syft, use the `linux/amd64` os/arch +image_version=v0.84.1@sha256:9a8f80eee3984d4a3f9a86e4d66e739e30dfc34564d76d3574f98798db5d5b35 + +# ============================================================================== + +function main() { + + create-sbom + enrich-sbom +} + +function create-sbom() { + + docker run --rm --platform linux/amd64 \ + --volume $PWD:/scan \ + ghcr.io/anchore/syft:$image_version \ + packages dir:/scan \ + --config /scan/scripts/config/.syft.yaml \ + --output spdx-json=/scan/sbom-spdx.tmp.json +} + +function enrich-sbom() { + + git_url=$(git config --get remote.origin.url) + git_branch=$(git rev-parse --abbrev-ref HEAD) + git_commit_hash=$(git rev-parse HEAD) + git_tags=$(echo \"$(git tag | tr '\n' ',' | sed 's/,$//' | sed 's/,/","/g')\" | sed 's/""//g') + pipeline_run_id=${GITHUB_RUN_ID:-0} + pipeline_run_number=${GITHUB_RUN_NUMBER:-0} + pipeline_run_attempt=${GITHUB_RUN_ATTEMPT:-0} + + docker run --rm --platform linux/amd64 \ + --volume $PWD:/repo \ + --workdir /repo \ + ghcr.io/make-ops-tools/jq:latest \ + '.creationInfo |= . + {"repository":{"url":"'${git_url}'","branch":"'${git_branch}'","tags":['${git_tags}'],"commitHash":"'${git_commit_hash}'"},"pipeline":{"id":'${pipeline_run_id}',"number":'${pipeline_run_number}',"attempt":'${pipeline_run_attempt}'}}' \ + sbom-spdx.tmp.json \ + > sbom-spdx.json + rm -f sbom-spdx.tmp.json +} + +function is_arg_true() { + + if [[ "$1" =~ ^(true|yes|y|on|1|TRUE|YES|Y|ON)$ ]]; then + return 0 + else + return 1 + fi +} + +# ============================================================================== + +is_arg_true "$VERBOSE" && set -x + +main $* + +exit 0 diff --git a/variables.tf b/variables.tf index 5f7c6a6..527a965 100644 --- a/variables.tf +++ b/variables.tf @@ -8,6 +8,12 @@ variable "prefix" { default = "opennext" } +variable "default_tags" { + type = map(string) + description = "Default tags to apply to all created resources" + default = {} +} + /** * Route53 (DNS) Variables **/ @@ -43,7 +49,6 @@ variable "server_options" { source_dir = optional(string) output_dir = optional(string) })) - function = optional(object({ function_name = optional(string) description = optional(string) @@ -95,6 +100,10 @@ variable "server_options" { self = optional(bool) }))) })) + log_group = optional(object({ + retention_in_days = optional(number) + kms_key_id = optional(string) + })) }) default = {} } @@ -157,6 +166,10 @@ variable "image_optimization_options" { self = optional(bool) }))) })) + log_group = optional(object({ + retention_in_days = optional(number) + kms_key_id = optional(string) + })) }) default = {} } @@ -219,6 +232,10 @@ variable "revalidation_options" { self = optional(bool) }))) })) + log_group = optional(object({ + retention_in_days = optional(number) + kms_key_id = optional(string) + })) }) default = {} } @@ -281,6 +298,10 @@ variable "warmer_options" { self = optional(bool) }))) })) + log_group = optional(object({ + retention_in_days = optional(number) + kms_key_id = optional(string) + })) }) default = {} } @@ -295,6 +316,10 @@ variable "cloudfront" { override = bool value = string }))) + geo_restriction = optional(object({ + restriction_type = string + locations = list(string) + })) cors = optional(object({ allow_credentials = bool, allow_headers = list(string) @@ -333,9 +358,11 @@ variable "cloudfront" { }))) })) cache_policy = optional(object({ - default_ttl = optional(number) - min_ttl = optional(number) - max_ttl = optional(number) + default_ttl = optional(number) + min_ttl = optional(number) + max_ttl = optional(number) + enable_accept_encoding_gzip = optional(bool) + enable_accept_encoding_brotli = optional(bool) cookies_config = optional(object({ cookie_behavior = string }))