Skip to content

Commit

Permalink
Merge branch 'develop' into nci_links
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana-B authored Mar 5, 2024
2 parents 358fe01 + b611e52 commit d6ffbd7
Show file tree
Hide file tree
Showing 69 changed files with 2,930 additions and 2,922 deletions.
12 changes: 12 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"
- package-ecosystem: docker
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"
2 changes: 1 addition & 1 deletion .github/workflows/deployment_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
fetch-depth: 0

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/doc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name: Docs QA

on:
pull_request:
pull_request_target:
paths:
- "**"

Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:

steps:
- name: Checkout git
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 0

Expand All @@ -36,7 +36,7 @@ jobs:

- name: Build and Push unstable + latest Docker image tag
if: github.event_name != 'release'
uses: whoan/docker-build-with-cache-action@v4
uses: whoan/docker-build-with-cache-action@v8
with:
image_name: ${{ env.IMAGE_NAME }}
username: gadockersvc
Expand All @@ -54,7 +54,7 @@ jobs:
run: echo $RELEASE

- name: Build and Push release if we have a tag
uses: whoan/docker-build-with-cache-action@v4
uses: whoan/docker-build-with-cache-action@v8
if: github.event_name == 'release'
with:
image_name: ${{ env.IMAGE_NAME }}
Expand All @@ -64,7 +64,7 @@ jobs:
build_extra_args: "--build-arg=ENVIRONMENT=deployment"

- name: Update Docker Hub Description
uses: peter-evans/dockerhub-description@v2
uses: peter-evans/dockerhub-description@v4
with:
username: gadockersvc
password: ${{ secrets.DockerPassword }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4

- name: Setup Python
uses: actions/setup-python@v1
uses: actions/setup-python@v5
with:
python-version: 3.8

Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/scan.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout git
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
branch: develop
Expand All @@ -26,7 +26,7 @@ jobs:

- name: Build unstable + latest Docker image tag
if: github.event_name != 'release'
uses: whoan/docker-build-with-cache-action@v4
uses: whoan/docker-build-with-cache-action@v8
with:
image_name: ${{ env.IMAGE_NAME }}
image_tag: ${{ env.UNSTABLE_TAG }},latest
Expand All @@ -45,6 +45,6 @@ jobs:
severity: "CRITICAL,HIGH"

- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v2
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
fetch-depth: 0

Expand All @@ -37,7 +37,7 @@ jobs:
make docker-clean
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
files: ./coverage.xml
fail_ci_if_error: false
16 changes: 9 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
repos:
# Normalise all Python code. (Black + isort + pyupgrade + autoflake)
- repo: https://github.com/Zac-HD/shed
rev: 2023.4.1
rev: 2024.1.1
hooks:
- id: shed
# Python Linting
- repo: https://github.com/pycqa/flake8
rev: 6.0.0
rev: 7.0.0
hooks:
- id: flake8
exclude: ^docs/
Expand All @@ -25,7 +25,7 @@ repos:
- pep8-naming # Follow pep8 naming rules (eg. function names lowercase)
# Lint Python snippets embedded in Markdown (using flake8)
- repo: https://github.com/johnfraney/flake8-markdown
rev: v0.4.0
rev: v0.5.0
hooks:
- id: flake8-markdown
# # Lint Jinja2 templates
Expand All @@ -35,32 +35,34 @@ repos:
# - id: curlylint
# Lint Yaml files
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.31.0
rev: v1.35.1
hooks:
- id: yamllint
args: ['-c', '.yamllint']
# Common pre-commit checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-added-large-files # We don't want huge files. (Cut down test data!)
args: ['--maxkb=2000']
- id: check-case-conflict # Don't allow files that differ by case sensitivity.
- id: check-docstring-first # Avoid common error of code before docstring.
- id: check-json # Check json file syntax
# (Exclude Typescript config: it uses json5 extensions)
exclude: 'tsconfig.json'
exclude: 'tsconfig.json|^integration_tests/schemas/'
- id: check-merge-conflict # Don't commit merge-conflicts
- id: check-symlinks # Symlinks that don't point to anything?
- id: check-yaml # Check Yaml file syntax
args: [--allow-multiple-documents]
- id: debug-statements # Avoid commiting debug/breakpoints
- id: end-of-file-fixer # Normalise on exactly one newline
exclude: '^integration_tests/schemas/'
- id: fix-byte-order-marker # No UTF-8 byte order marks
- id: mixed-line-ending # Don't allow mixed line endings
- id: pretty-format-json
args: ['--no-sort-keys', '--indent=4', '--autofix']
# (Exclude Typescript config: it uses json5 extensions)
exclude: 'tsconfig.json'
exclude: 'tsconfig.json|^integration_tests/schemas/'
- id: requirements-txt-fixer # Keep requirements files sorted.
- id: trailing-whitespace # Auto remove trailing whitespace
exclude: '^integration_tests/schemas/'
6 changes: 3 additions & 3 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
---
version: 2
build:
image: latest
os: ubuntu-20.04
tools:
python: "3.8"
apt_packages:
- libgdal-dev
- gdal-bin
python:
version: 3.8
install:
- requirements: docs/rtd-requirements.txt
- requirements: docs/requirements.txt
system_packages: true
53 changes: 36 additions & 17 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM osgeo/gdal:ubuntu-small-3.3.2
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.4 as builder

ENV DEBIAN_FRONTEND=noninteractive \
LC_ALL=C.UTF-8 \
Expand All @@ -7,48 +7,67 @@ ENV DEBIAN_FRONTEND=noninteractive \

# Apt installation
RUN apt-get update && \
apt-get install -y \
apt-get upgrade -y && \
apt-get install -y --no-install-recommends \
build-essential \
git \
vim \
nano \
# For Psycopg2
libpq-dev \
python3-dev \
python3-pip

WORKDIR /build

RUN python3.10 -m pip --disable-pip-version-check -q wheel --no-binary psycopg2 psycopg2

FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.4

ENV DEBIAN_FRONTEND=noninteractive \
LC_ALL=C.UTF-8 \
LANG=C.UTF-8 \
PYTHONFAULTHANDLER=1

# Apt installation
RUN apt-get update && \
apt-get upgrade -y && \
apt-get install -y --no-install-recommends \
git \
# For Psycopg2
libpq5 \
tini \
wget \
postgresql-client \
python3-pip \
# For Psycopg2
libpq-dev python-dev \
&& apt-get autoclean && \
apt-get autoremove && \
rm -rf /var/lib/{apt,dpkg,cache,log}


# Environment can be whatever is supported by setup.py
# so, either deployment, test
ARG ENVIRONMENT=deployment
# ARG ENVIRONMENT=test

RUN echo "Environment is: $ENVIRONMENT"

RUN pip install pip-tools pytest-cov
RUN echo "Environment is: $ENVIRONMENT" && \
[ "$ENVIRONMENT" = "deployment" ] || pip install pip-tools pytest-cov

# Set up a nice workdir and add the live code
ENV APPDIR=/code
RUN mkdir -p $APPDIR
COPY . $APPDIR
WORKDIR $APPDIR
COPY . $APPDIR

COPY --from=builder --link /build/*.whl ./
RUN python3.10 -m pip --disable-pip-version-check -q install *.whl && \
rm *.whl

# These ENVIRONMENT flags make this a bit complex, but basically, if we are in dev
# then we want to link the source (with the -e flag) and if we're in prod, we
# want to delete the stuff in the /code folder to keep it simple.
RUN if [ "$ENVIRONMENT" = "deployment" ] ; then\
pip install .[$ENVIRONMENT]; \
rm -rf /code/* ; \
rm -rf /code/* /code/.git* ; \
else \
pip install --editable .[$ENVIRONMENT]; \
fi

RUN pip freeze
fi && \
pip freeze

ENTRYPOINT ["/bin/tini", "--"]

Expand Down
1 change: 0 additions & 1 deletion cubedash/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Common global filters for templates.
"""


import calendar
import logging
from datetime import datetime
Expand Down
31 changes: 10 additions & 21 deletions cubedash/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,11 @@
if os.getenv("SENTRY_DSN"):
sentry_sdk.init(
dsn=os.getenv("SENTRY_DSN"),
environment=os.getenv("SENTRY_ENV_TAG")
if os.getenv("SENTRY_ENV_TAG")
else "dev-explorer",
environment=(
os.getenv("SENTRY_ENV_TAG")
if os.getenv("SENTRY_ENV_TAG")
else "dev-explorer"
),
integrations=[
FlaskIntegration(),
],
Expand Down Expand Up @@ -295,22 +297,9 @@ def internal_server_error(error):
return flask.render_template("500.html")


@app.before_first_request
def enable_prometheus():
# Enable deployment specific code for Prometheus metrics
if os.environ.get("PROMETHEUS_MULTIPROC_DIR", False):
from prometheus_flask_exporter.multiprocess import (
GunicornInternalPrometheusMetrics,
)

metrics = GunicornInternalPrometheusMetrics(app, group_by="endpoint")
_LOG.info("Prometheus metrics enabled : {metrics}", extra=dict(metrics=metrics))

# Enable deployment specific code for Prometheus metrics
if os.environ.get("PROMETHEUS_MULTIPROC_DIR", False):
from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics

@app.before_first_request
def check_schema_compatibility():
if not STORE.is_schema_compatible():
raise RuntimeError(
"Cubedash schema is out of date. "
"Please rerun `cubedash-gen -v --init` to apply updates."
)
metrics = GunicornInternalPrometheusMetrics(app, group_by="endpoint")
_LOG.info("Prometheus metrics enabled : {metrics}", extra=dict(metrics=metrics))
12 changes: 6 additions & 6 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,11 @@ def product_page(
datasets_geojson=None, # _model.get_datasets_geojson(product_name, year, month, day),
footprint_geojson=_model.get_footprint_geojson(product_name, year, month, day),
product=product,
product_region_info=_model.STORE.get_product_region_info(product_name)
if region_geojson
else None,
product_region_info=(
_model.STORE.get_product_region_info(product_name)
if region_geojson
else None
),
# Summary for the whole product
product_summary=product_summary,
# Summary for the users' currently selected filters.
Expand Down Expand Up @@ -425,9 +427,7 @@ def timeline_page(product_name: str):
return redirect(url_for("product_page", product_name=product_name))


def _load_product(
product_name, year, month, day
) -> Tuple[
def _load_product(product_name, year, month, day) -> Tuple[
DatasetType,
ProductSummary,
TimePeriodOverview,
Expand Down
Loading

0 comments on commit d6ffbd7

Please sign in to comment.