From 18c55672c377b34cb984ccfa735a190a755d0c18 Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Tue, 18 Jun 2024 15:51:40 -0400 Subject: [PATCH 01/26] [COST-4741] Add data_transfer_direction for AWS network costs to Trino tables (#5129) --- .../trino_sql/reporting_ocpawscostlineitem_daily_summary.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/koku/masu/database/trino_sql/reporting_ocpawscostlineitem_daily_summary.sql b/koku/masu/database/trino_sql/reporting_ocpawscostlineitem_daily_summary.sql index 819e20330b..7092ea16cf 100644 --- a/koku/masu/database/trino_sql/reporting_ocpawscostlineitem_daily_summary.sql +++ b/koku/masu/database/trino_sql/reporting_ocpawscostlineitem_daily_summary.sql @@ -12,6 +12,7 @@ CREATE TABLE IF NOT EXISTS {{schema | sqlsafe}}.aws_openshift_daily_resource_mat region varchar, unit varchar, usage_amount double, + data_transfer_direction varchar, currency_code varchar, unblended_cost double, blended_cost double, @@ -135,6 +136,7 @@ CREATE TABLE IF NOT EXISTS hive.{{schema | sqlsafe}}.reporting_ocpawscostlineite region varchar, unit varchar, usage_amount double, + data_transfer_direction varchar, currency_code varchar, unblended_cost double, markup_cost double, From 7d557504f38de83ba7c3c90f91883f984cf69ea5 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Wed, 19 Jun 2024 17:58:11 +0100 Subject: [PATCH 02/26] [COST-5168] - Adding new penalty pipeline (#5176) * [COST-5168] - Adding new penalty pipeline --- deploy/clowdapp.yaml | 1430 ++++++++++++++++- deploy/kustomize/kustomization.yaml | 24 + deploy/kustomize/patches/scheduler.yaml | 2 +- .../patches/worker-cost-model-penalty.yaml | 227 +++ .../patches/worker-download-penalty.yaml | 231 +++ .../kustomize/patches/worker-ocp-penalty.yaml | 229 +++ .../patches/worker-priority-penalty.yaml | 235 +++ .../patches/worker-refresh-penalty.yaml | 227 +++ .../patches/worker-summary-penalty.yaml | 231 +++ koku/api/settings/cost_groups/view.py | 9 +- .../test/cost_groups/test_query_handler.py | 27 +- koku/common/queues.py | 76 + koku/cost_models/cost_model_manager.py | 9 +- .../test/test_cost_model_manager.py | 6 +- koku/masu/api/ingress_reports.py | 6 +- koku/masu/api/process_openshift_on_cloud.py | 11 +- koku/masu/api/report_data.py | 11 +- koku/masu/api/update_cost_model_costs.py | 11 +- koku/masu/api/update_openshift_on_cloud.py | 12 +- koku/masu/api/upgrade_trino/test/test_view.py | 4 +- .../api/upgrade_trino/util/task_handler.py | 9 +- koku/masu/celery/tasks.py | 18 +- koku/masu/external/kafka_msg_handler.py | 9 +- .../commands/aws_null_bill_cleanup.py | 7 +- koku/masu/processor/__init__.py | 7 + koku/masu/processor/orchestrator.py | 15 +- koku/masu/processor/tasks.py | 113 +- .../api/test_process_openshift_on_cloud.py | 2 +- koku/masu/test/api/test_report_data.py | 35 +- .../test/api/test_update_cost_model_costs.py | 2 +- .../api/test_update_openshift_on_cloud.py | 2 +- .../test/external/test_kafka_msg_handler.py | 9 +- koku/masu/test/processor/test_tasks.py | 22 +- .../reporting_common/test_reporting_common.py | 19 +- koku/sources/tasks.py | 10 +- 35 files changed, 2975 insertions(+), 322 deletions(-) create mode 100644 deploy/kustomize/patches/worker-cost-model-penalty.yaml create mode 100644 deploy/kustomize/patches/worker-download-penalty.yaml create mode 100644 deploy/kustomize/patches/worker-ocp-penalty.yaml create mode 100644 deploy/kustomize/patches/worker-priority-penalty.yaml create mode 100644 deploy/kustomize/patches/worker-refresh-penalty.yaml create mode 100644 deploy/kustomize/patches/worker-summary-penalty.yaml create mode 100644 koku/common/queues.py diff --git a/deploy/clowdapp.yaml b/deploy/clowdapp.yaml index 68ad8be1ff..56c7730cf5 100644 --- a/deploy/clowdapp.yaml +++ b/deploy/clowdapp.yaml @@ -1610,6 +1610,188 @@ objects: enabled: false public: enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-cost-model-penalty + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_COST_MODEL_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_COST_MODEL_PENALTY_CPU_LIMIT} + memory: ${WORKER_COST_MODEL_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_COST_MODEL_PENALTY_CPU_REQUEST} + memory: ${WORKER_COST_MODEL_PENALTY_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_COST_MODEL_PENALTY_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false - metadata: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses @@ -1660,8 +1842,932 @@ objects: value: ${ENABLE_S3_ARCHIVING} - name: PARQUET_PROCESSING_BATCH_SIZE value: ${PARQUET_PROCESSING_BATCH_SIZE} - - name: PANDAS_COLUMN_BATCH_SIZE - value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_DOWNLOAD_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_DOWNLOAD_CPU_LIMIT} + memory: ${WORKER_DOWNLOAD_MEMORY_LIMIT} + requests: + cpu: ${WORKER_DOWNLOAD_CPU_REQUEST} + memory: ${WORKER_DOWNLOAD_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_DOWNLOAD_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-download-xl + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_DOWNLOAD_XL_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_DOWNLOAD_XL_CPU_LIMIT} + memory: ${WORKER_DOWNLOAD_XL_MEMORY_LIMIT} + requests: + cpu: ${WORKER_DOWNLOAD_XL_CPU_REQUEST} + memory: ${WORKER_DOWNLOAD_XL_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_DOWNLOAD_XL_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-download-penalty + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_DOWNLOAD_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_DOWNLOAD_PENALTY_CPU_LIMIT} + memory: ${WORKER_DOWNLOAD_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_DOWNLOAD_PENALTY_CPU_REQUEST} + memory: ${WORKER_DOWNLOAD_PENALTY_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_DOWNLOAD_PENALTY_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-ocp + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_OCP_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_OCP_CPU_LIMIT} + memory: ${WORKER_OCP_MEMORY_LIMIT} + requests: + cpu: ${WORKER_OCP_CPU_REQUEST} + memory: ${WORKER_OCP_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_OCP_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-ocp-xl + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_OCP_XL_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + image: ${IMAGE}:${IMAGE_TAG} + livenessProbe: + failureThreshold: 5 + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + readinessProbe: + failureThreshold: 5 + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + timeoutSeconds: 10 + resources: + limits: + cpu: ${WORKER_OCP_XL_CPU_LIMIT} + memory: ${WORKER_OCP_XL_MEMORY_LIMIT} + requests: + cpu: ${WORKER_OCP_XL_CPU_REQUEST} + memory: ${WORKER_OCP_XL_MEMORY_REQUEST} + terminationGracePeriodSeconds: 3600 + volumeMounts: + - mountPath: /etc/aws + name: aws-credentials + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - mountPath: /etc/gcp + name: gcp-credentials + readOnly: true + - mountPath: /etc/oci + name: oci-credentials + readOnly: true + - mountPath: ${TMP_DIR} + name: tmp-data + volumes: + - emptyDir: {} + name: tmp-data + - emptyDir: {} + name: koku-worker-data + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + items: + - key: gcp-credentials + path: gcp-credentials.json + secretName: koku-gcp + - name: oci-credentials + secret: + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + secretName: koku-oci + replicas: ${{WORKER_OCP_XL_REPLICAS}} + webServices: + private: + enabled: false + public: + enabled: false + - metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses + 1 pod at times for cost saving purposes + name: clowder-worker-ocp-penalty + podSpec: + command: + - /bin/bash + - -c + - | + PYTHONPATH=${APP_HOME} celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} - name: TRINO_DATE_STEP value: ${TRINO_DATE_STEP} - name: KOKU_ENABLE_SENTRY @@ -1677,7 +2783,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_DOWNLOAD_WORKER_QUEUE} + value: ${WORKER_OCP_PENALTY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -1745,11 +2851,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_DOWNLOAD_CPU_LIMIT} - memory: ${WORKER_DOWNLOAD_MEMORY_LIMIT} + cpu: ${WORKER_OCP_PENALTY_CPU_LIMIT} + memory: ${WORKER_OCP_PENALTY_MEMORY_LIMIT} requests: - cpu: ${WORKER_DOWNLOAD_CPU_REQUEST} - memory: ${WORKER_DOWNLOAD_MEMORY_REQUEST} + cpu: ${WORKER_OCP_PENALTY_CPU_REQUEST} + memory: ${WORKER_OCP_PENALTY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -1790,7 +2896,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_DOWNLOAD_REPLICAS}} + replicas: ${{WORKER_OCP_PENALTY_REPLICAS}} webServices: private: enabled: false @@ -1800,7 +2906,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-download-xl + name: clowder-worker-priority podSpec: command: - /bin/bash @@ -1863,7 +2969,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_DOWNLOAD_XL_WORKER_QUEUE} + value: ${WORKER_PRIORITY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -1878,6 +2984,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -1931,11 +3041,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_DOWNLOAD_XL_CPU_LIMIT} - memory: ${WORKER_DOWNLOAD_XL_MEMORY_LIMIT} + cpu: ${WORKER_PRIORITY_CPU_LIMIT} + memory: ${WORKER_PRIORITY_MEMORY_LIMIT} requests: - cpu: ${WORKER_DOWNLOAD_XL_CPU_REQUEST} - memory: ${WORKER_DOWNLOAD_XL_MEMORY_REQUEST} + cpu: ${WORKER_PRIORITY_CPU_REQUEST} + memory: ${WORKER_PRIORITY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -1976,7 +3086,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_DOWNLOAD_XL_REPLICAS}} + replicas: ${{WORKER_PRIORITY_REPLICAS}} webServices: private: enabled: false @@ -1986,7 +3096,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-ocp + name: clowder-worker-priority-xl podSpec: command: - /bin/bash @@ -2032,6 +3142,8 @@ objects: value: ${ENABLE_S3_ARCHIVING} - name: PARQUET_PROCESSING_BATCH_SIZE value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} - name: TRINO_DATE_STEP value: ${TRINO_DATE_STEP} - name: KOKU_ENABLE_SENTRY @@ -2047,7 +3159,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_OCP_WORKER_QUEUE} + value: ${WORKER_PRIORITY_XL_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2062,6 +3174,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2115,11 +3231,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_OCP_CPU_LIMIT} - memory: ${WORKER_OCP_MEMORY_LIMIT} + cpu: ${WORKER_PRIORITY_XL_CPU_LIMIT} + memory: ${WORKER_PRIORITY_XL_MEMORY_LIMIT} requests: - cpu: ${WORKER_OCP_CPU_REQUEST} - memory: ${WORKER_OCP_MEMORY_REQUEST} + cpu: ${WORKER_PRIORITY_XL_CPU_REQUEST} + memory: ${WORKER_PRIORITY_XL_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -2160,7 +3276,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_OCP_REPLICAS}} + replicas: ${{WORKER_PRIORITY_XL_REPLICAS}} webServices: private: enabled: false @@ -2170,7 +3286,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-ocp-xl + name: clowder-worker-priority-penalty podSpec: command: - /bin/bash @@ -2216,6 +3332,8 @@ objects: value: ${ENABLE_S3_ARCHIVING} - name: PARQUET_PROCESSING_BATCH_SIZE value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} - name: TRINO_DATE_STEP value: ${TRINO_DATE_STEP} - name: KOKU_ENABLE_SENTRY @@ -2231,7 +3349,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_OCP_XL_WORKER_QUEUE} + value: ${WORKER_PRIORITY_PENALTY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2246,6 +3364,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2299,11 +3421,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_OCP_XL_CPU_LIMIT} - memory: ${WORKER_OCP_XL_MEMORY_LIMIT} + cpu: ${WORKER_PRIORITY_PENALTY_CPU_LIMIT} + memory: ${WORKER_PRIORITY_PENALTY_MEMORY_LIMIT} requests: - cpu: ${WORKER_OCP_XL_CPU_REQUEST} - memory: ${WORKER_OCP_XL_MEMORY_REQUEST} + cpu: ${WORKER_PRIORITY_PENALTY_CPU_REQUEST} + memory: ${WORKER_PRIORITY_PENALTY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -2344,7 +3466,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_OCP_XL_REPLICAS}} + replicas: ${{WORKER_PRIORITY_PENALTY_REPLICAS}} webServices: private: enabled: false @@ -2354,7 +3476,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-priority + name: clowder-worker-refresh podSpec: command: - /bin/bash @@ -2400,8 +3522,6 @@ objects: value: ${ENABLE_S3_ARCHIVING} - name: PARQUET_PROCESSING_BATCH_SIZE value: ${PARQUET_PROCESSING_BATCH_SIZE} - - name: PANDAS_COLUMN_BATCH_SIZE - value: ${PANDAS_COLUMN_BATCH_SIZE} - name: TRINO_DATE_STEP value: ${TRINO_DATE_STEP} - name: KOKU_ENABLE_SENTRY @@ -2417,7 +3537,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_PRIORITY_WORKER_QUEUE} + value: ${WORKER_REFRESH_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2432,12 +3552,6 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} - - name: DELAYED_TASK_TIME - value: ${DELAYED_TASK_TIME} - - name: DELAYED_TASK_POLLING_MINUTES - value: ${DELAYED_TASK_POLLING_MINUTES} - - name: TAG_ENABLED_LIMIT - value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -2489,11 +3603,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_PRIORITY_CPU_LIMIT} - memory: ${WORKER_PRIORITY_MEMORY_LIMIT} + cpu: ${WORKER_REFRESH_CPU_LIMIT} + memory: ${WORKER_REFRESH_MEMORY_LIMIT} requests: - cpu: ${WORKER_PRIORITY_CPU_REQUEST} - memory: ${WORKER_PRIORITY_MEMORY_REQUEST} + cpu: ${WORKER_REFRESH_CPU_REQUEST} + memory: ${WORKER_REFRESH_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -2534,7 +3648,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_PRIORITY_REPLICAS}} + replicas: ${{WORKER_REFRESH_REPLICAS}} webServices: private: enabled: false @@ -2544,7 +3658,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-priority-xl + name: clowder-worker-refresh-xl podSpec: command: - /bin/bash @@ -2590,8 +3704,6 @@ objects: value: ${ENABLE_S3_ARCHIVING} - name: PARQUET_PROCESSING_BATCH_SIZE value: ${PARQUET_PROCESSING_BATCH_SIZE} - - name: PANDAS_COLUMN_BATCH_SIZE - value: ${PANDAS_COLUMN_BATCH_SIZE} - name: TRINO_DATE_STEP value: ${TRINO_DATE_STEP} - name: KOKU_ENABLE_SENTRY @@ -2607,7 +3719,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_PRIORITY_XL_WORKER_QUEUE} + value: ${WORKER_REFRESH_XL_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2622,12 +3734,6 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} - - name: DELAYED_TASK_TIME - value: ${DELAYED_TASK_TIME} - - name: DELAYED_TASK_POLLING_MINUTES - value: ${DELAYED_TASK_POLLING_MINUTES} - - name: TAG_ENABLED_LIMIT - value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -2679,11 +3785,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_PRIORITY_XL_CPU_LIMIT} - memory: ${WORKER_PRIORITY_XL_MEMORY_LIMIT} + cpu: ${WORKER_REFRESH_XL_CPU_LIMIT} + memory: ${WORKER_REFRESH_XL_MEMORY_LIMIT} requests: - cpu: ${WORKER_PRIORITY_XL_CPU_REQUEST} - memory: ${WORKER_PRIORITY_XL_MEMORY_REQUEST} + cpu: ${WORKER_REFRESH_XL_CPU_REQUEST} + memory: ${WORKER_REFRESH_XL_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -2724,7 +3830,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_PRIORITY_XL_REPLICAS}} + replicas: ${{WORKER_REFRESH_XL_REPLICAS}} webServices: private: enabled: false @@ -2734,7 +3840,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-refresh + name: clowder-worker-refresh-penalty podSpec: command: - /bin/bash @@ -2795,7 +3901,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_REFRESH_WORKER_QUEUE} + value: ${WORKER_REFRESH_PENALTY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2861,11 +3967,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_REFRESH_CPU_LIMIT} - memory: ${WORKER_REFRESH_MEMORY_LIMIT} + cpu: ${WORKER_REFRESH_PENALTY_CPU_LIMIT} + memory: ${WORKER_REFRESH_PENALTY_MEMORY_LIMIT} requests: - cpu: ${WORKER_REFRESH_CPU_REQUEST} - memory: ${WORKER_REFRESH_MEMORY_REQUEST} + cpu: ${WORKER_REFRESH_PENALTY_CPU_REQUEST} + memory: ${WORKER_REFRESH_PENALTY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -2906,7 +4012,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_REFRESH_REPLICAS}} + replicas: ${{WORKER_REFRESH_PENALTY_REPLICAS}} webServices: private: enabled: false @@ -2916,7 +4022,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-refresh-xl + name: clowder-worker-summary podSpec: command: - /bin/bash @@ -2977,7 +4083,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_REFRESH_XL_WORKER_QUEUE} + value: ${WORKER_SUMMARY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -2992,6 +4098,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -3043,11 +4153,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_REFRESH_XL_CPU_LIMIT} - memory: ${WORKER_REFRESH_XL_MEMORY_LIMIT} + cpu: ${WORKER_SUMMARY_CPU_LIMIT} + memory: ${WORKER_SUMMARY_MEMORY_LIMIT} requests: - cpu: ${WORKER_REFRESH_XL_CPU_REQUEST} - memory: ${WORKER_REFRESH_XL_MEMORY_REQUEST} + cpu: ${WORKER_SUMMARY_CPU_REQUEST} + memory: ${WORKER_SUMMARY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -3088,7 +4198,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_REFRESH_XL_REPLICAS}} + replicas: ${{WORKER_SUMMARY_REPLICAS}} webServices: private: enabled: false @@ -3098,7 +4208,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-summary + name: clowder-worker-summary-xl podSpec: command: - /bin/bash @@ -3159,7 +4269,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_SUMMARY_WORKER_QUEUE} + value: ${WORKER_SUMMARY_XL_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -3229,11 +4339,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_SUMMARY_CPU_LIMIT} - memory: ${WORKER_SUMMARY_MEMORY_LIMIT} + cpu: ${WORKER_SUMMARY_XL_CPU_LIMIT} + memory: ${WORKER_SUMMARY_XL_MEMORY_LIMIT} requests: - cpu: ${WORKER_SUMMARY_CPU_REQUEST} - memory: ${WORKER_SUMMARY_MEMORY_REQUEST} + cpu: ${WORKER_SUMMARY_XL_CPU_REQUEST} + memory: ${WORKER_SUMMARY_XL_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -3274,7 +4384,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_SUMMARY_REPLICAS}} + replicas: ${{WORKER_SUMMARY_XL_REPLICAS}} webServices: private: enabled: false @@ -3284,7 +4394,7 @@ objects: annotations: ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes - name: clowder-worker-summary-xl + name: clowder-worker-summary-penalty podSpec: command: - /bin/bash @@ -3345,7 +4455,7 @@ objects: - name: DEMO_ACCOUNTS value: ${DEMO_ACCOUNTS} - name: WORKER_QUEUES - value: ${WORKER_SUMMARY_XL_WORKER_QUEUE} + value: ${WORKER_SUMMARY_PENALTY_WORKER_QUEUE} - name: WORKER_PROC_ALIVE_TIMEOUT value: ${WORKER_PROC_ALIVE_TIMEOUT} - name: DATE_OVERRIDE @@ -3415,11 +4525,11 @@ objects: timeoutSeconds: 10 resources: limits: - cpu: ${WORKER_SUMMARY_XL_CPU_LIMIT} - memory: ${WORKER_SUMMARY_XL_MEMORY_LIMIT} + cpu: ${WORKER_SUMMARY_PENALTY_CPU_LIMIT} + memory: ${WORKER_SUMMARY_PENALTY_MEMORY_LIMIT} requests: - cpu: ${WORKER_SUMMARY_XL_CPU_REQUEST} - memory: ${WORKER_SUMMARY_XL_MEMORY_REQUEST} + cpu: ${WORKER_SUMMARY_PENALTY_CPU_REQUEST} + memory: ${WORKER_SUMMARY_PENALTY_MEMORY_REQUEST} terminationGracePeriodSeconds: 3600 volumeMounts: - mountPath: /etc/aws @@ -3460,7 +4570,7 @@ objects: - key: oci-config path: oci-config secretName: koku-oci - replicas: ${{WORKER_SUMMARY_XL_REPLICAS}} + replicas: ${{WORKER_SUMMARY_PENALTY_REPLICAS}} webServices: private: enabled: false @@ -4654,7 +5764,7 @@ parameters: - displayName: Worker Queue name: SCHEDULER_WORKER_QUEUE required: true - value: cost_model,cost_model_xl,download,download_xl,hcs,ocp,ocp_xl,priority,priority_xl,refresh,refresh_xl,summary,summary_xl + value: cost_model,cost_model_xl,cost_model_penalty,download,download_xl,download_penalty,hcs,ocp,ocp_xl,ocp_penalty,priority,priority_xl,priority_penalty,refresh,refresh_xl,refresh_penalty,summary,summary_xl,summary_penalty - displayName: Scheduler report checks name: SCHEDULE_REPORT_CHECKS value: "True" @@ -4781,6 +5891,30 @@ parameters: name: WORKER_COST_MODEL_XL_WORKER_QUEUE required: true value: cost_model_xl +- displayName: Minimum replicas + name: WORKER_COST_MODEL_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_COST_MODEL_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- displayName: Memory Limit + name: WORKER_COST_MODEL_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- displayName: CPU Request + name: WORKER_COST_MODEL_PENALTY_CPU_REQUEST + required: true + value: 100m +- displayName: CPU Limit + name: WORKER_COST_MODEL_PENALTY_CPU_LIMIT + required: true + value: 200m +- displayName: Worker Queue + name: WORKER_COST_MODEL_PENALTY_WORKER_QUEUE + required: true + value: cost_model_penalty - displayName: Minimum replicas name: WORKER_DOWNLOAD_REPLICAS required: true @@ -4829,6 +5963,30 @@ parameters: name: WORKER_DOWNLOAD_XL_WORKER_QUEUE required: true value: download_xl +- displayName: Minimum replicas + name: WORKER_DOWNLOAD_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_DOWNLOAD_PENALTY_MEMORY_REQUEST + required: true + value: 512Mi +- displayName: Memory Limit + name: WORKER_DOWNLOAD_PENALTY_MEMORY_LIMIT + required: true + value: 1Gi +- displayName: CPU Request + name: WORKER_DOWNLOAD_PENALTY_CPU_REQUEST + required: true + value: 200m +- displayName: CPU Limit + name: WORKER_DOWNLOAD_PENALTY_CPU_LIMIT + required: true + value: 400m +- displayName: Worker Queue + name: WORKER_DOWNLOAD_PENALTY_WORKER_QUEUE + required: true + value: download_penalty - displayName: Minimum replicas name: WORKER_OCP_REPLICAS required: true @@ -4877,6 +6035,30 @@ parameters: name: WORKER_OCP_XL_WORKER_QUEUE required: true value: ocp_xl +- displayName: Minimum replicas + name: WORKER_OCP_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_OCP_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- displayName: Memory Limit + name: WORKER_OCP_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- displayName: CPU Request + name: WORKER_OCP_PENALTY_CPU_REQUEST + required: true + value: 100m +- displayName: CPU Limit + name: WORKER_OCP_PENALTY_CPU_LIMIT + required: true + value: 200m +- displayName: Worker Queue + name: WORKER_OCP_PENALTY_WORKER_QUEUE + required: true + value: ocp_penalty - displayName: Minimum replicas name: WORKER_PRIORITY_REPLICAS required: true @@ -4925,6 +6107,30 @@ parameters: name: WORKER_PRIORITY_XL_WORKER_QUEUE required: true value: priority_xl +- displayName: Minimum replicas + name: WORKER_PRIORITY_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_PRIORITY_PENALTY_MEMORY_REQUEST + required: true + value: 400Mi +- displayName: Memory Limit + name: WORKER_PRIORITY_PENALTY_MEMORY_LIMIT + required: true + value: 750Mi +- displayName: CPU Request + name: WORKER_PRIORITY_PENALTY_CPU_REQUEST + required: true + value: 150m +- displayName: CPU Limit + name: WORKER_PRIORITY_PENALTY_CPU_LIMIT + required: true + value: 300m +- displayName: Worker Queue + name: WORKER_PRIORITY_PENALTY_WORKER_QUEUE + required: true + value: priority_penalty - displayName: Minimum replicas name: WORKER_REFRESH_REPLICAS required: true @@ -4973,6 +6179,30 @@ parameters: name: WORKER_REFRESH_XL_WORKER_QUEUE required: true value: refresh_xl +- displayName: Minimum replicas + name: WORKER_REFRESH_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_REFRESH_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- displayName: Memory Limit + name: WORKER_REFRESH_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- displayName: CPU Request + name: WORKER_REFRESH_PENALTY_CPU_REQUEST + required: true + value: 100m +- displayName: CPU Limit + name: WORKER_REFRESH_PENALTY_CPU_LIMIT + required: true + value: 200m +- displayName: Worker Queue + name: WORKER_REFRESH_PENALTY_WORKER_QUEUE + required: true + value: refresh_penalty - displayName: Minimum replicas name: WORKER_SUMMARY_REPLICAS required: true @@ -5021,6 +6251,30 @@ parameters: name: WORKER_SUMMARY_XL_WORKER_QUEUE required: true value: summary_xl +- displayName: Minimum replicas + name: WORKER_SUMMARY_PENALTY_REPLICAS + required: true + value: "2" +- displayName: Memory Request + name: WORKER_SUMMARY_PENALTY_MEMORY_REQUEST + required: true + value: 500Mi +- displayName: Memory Limit + name: WORKER_SUMMARY_PENALTY_MEMORY_LIMIT + required: true + value: 750Mi +- displayName: CPU Request + name: WORKER_SUMMARY_PENALTY_CPU_REQUEST + required: true + value: 100m +- displayName: CPU Limit + name: WORKER_SUMMARY_PENALTY_CPU_LIMIT + required: true + value: 200m +- displayName: Worker Queue + name: WORKER_SUMMARY_PENALTY_WORKER_QUEUE + required: true + value: summary_penalty - displayName: Minimum replicas name: WORKER_HCS_REPLICAS required: true diff --git a/deploy/kustomize/kustomization.yaml b/deploy/kustomize/kustomization.yaml index 196ee68675..356ed122c1 100644 --- a/deploy/kustomize/kustomization.yaml +++ b/deploy/kustomize/kustomization.yaml @@ -42,6 +42,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-cost-model-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-download.yaml target: version: v1 @@ -50,6 +54,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-download-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-ocp.yaml target: version: v1 @@ -58,6 +66,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-ocp-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-priority.yaml target: version: v1 @@ -66,6 +78,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-priority-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-refresh.yaml target: version: v1 @@ -74,6 +90,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-refresh-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-summary.yaml target: version: v1 @@ -82,6 +102,10 @@ patches: target: version: v1 kind: Template +- path: patches/worker-summary-penalty.yaml + target: + version: v1 + kind: Template - path: patches/worker-hcs.yaml target: version: v1 diff --git a/deploy/kustomize/patches/scheduler.yaml b/deploy/kustomize/patches/scheduler.yaml index f6ba571ee8..cc5be798e3 100644 --- a/deploy/kustomize/patches/scheduler.yaml +++ b/deploy/kustomize/patches/scheduler.yaml @@ -201,7 +201,7 @@ displayName: Worker Queue name: SCHEDULER_WORKER_QUEUE required: true - value: 'cost_model,cost_model_xl,download,download_xl,hcs,ocp,ocp_xl,priority,priority_xl,refresh,refresh_xl,summary,summary_xl' + value: 'cost_model,cost_model_xl,cost_model_penalty,download,download_xl,download_penalty,hcs,ocp,ocp_xl,ocp_penalty,priority,priority_xl,priority_penalty,refresh,refresh_xl,refresh_penalty,summary,summary_xl,summary_penalty' - op: add path: /parameters/- value: diff --git a/deploy/kustomize/patches/worker-cost-model-penalty.yaml b/deploy/kustomize/patches/worker-cost-model-penalty.yaml new file mode 100644 index 0000000000..092a09e2fc --- /dev/null +++ b/deploy/kustomize/patches/worker-cost-model-penalty.yaml @@ -0,0 +1,227 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-cost-model-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_COST_MODEL_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_COST_MODEL_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_COST_MODEL_PENALTY_CPU_LIMIT} + memory: ${WORKER_COST_MODEL_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_COST_MODEL_PENALTY_CPU_REQUEST} + memory: ${WORKER_COST_MODEL_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_COST_MODEL_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_COST_MODEL_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_COST_MODEL_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_COST_MODEL_PENALTY_CPU_REQUEST + required: true + value: 100m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_COST_MODEL_PENALTY_CPU_LIMIT + required: true + value: 200m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_COST_MODEL_PENALTY_WORKER_QUEUE + required: true + value: 'cost_model_penalty' diff --git a/deploy/kustomize/patches/worker-download-penalty.yaml b/deploy/kustomize/patches/worker-download-penalty.yaml new file mode 100644 index 0000000000..198d7d3678 --- /dev/null +++ b/deploy/kustomize/patches/worker-download-penalty.yaml @@ -0,0 +1,231 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-download-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_DOWNLOAD_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_DOWNLOAD_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_DOWNLOAD_PENALTY_CPU_LIMIT} + memory: ${WORKER_DOWNLOAD_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_DOWNLOAD_PENALTY_CPU_REQUEST} + memory: ${WORKER_DOWNLOAD_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_DOWNLOAD_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_DOWNLOAD_PENALTY_MEMORY_REQUEST + required: true + value: 512Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_DOWNLOAD_PENALTY_MEMORY_LIMIT + required: true + value: 1Gi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_DOWNLOAD_PENALTY_CPU_REQUEST + required: true + value: 200m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_DOWNLOAD_PENALTY_CPU_LIMIT + required: true + value: 400m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_DOWNLOAD_PENALTY_WORKER_QUEUE + required: true + value: 'download_penalty' diff --git a/deploy/kustomize/patches/worker-ocp-penalty.yaml b/deploy/kustomize/patches/worker-ocp-penalty.yaml new file mode 100644 index 0000000000..bfdd2b4c12 --- /dev/null +++ b/deploy/kustomize/patches/worker-ocp-penalty.yaml @@ -0,0 +1,229 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-ocp-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_OCP_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_OCP_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_OCP_PENALTY_CPU_LIMIT} + memory: ${WORKER_OCP_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_OCP_PENALTY_CPU_REQUEST} + memory: ${WORKER_OCP_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_OCP_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_OCP_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_OCP_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_OCP_PENALTY_CPU_REQUEST + required: true + value: 100m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_OCP_PENALTY_CPU_LIMIT + required: true + value: 200m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_OCP_PENALTY_WORKER_QUEUE + required: true + value: 'ocp_penalty' diff --git a/deploy/kustomize/patches/worker-priority-penalty.yaml b/deploy/kustomize/patches/worker-priority-penalty.yaml new file mode 100644 index 0000000000..1e91e79e4a --- /dev/null +++ b/deploy/kustomize/patches/worker-priority-penalty.yaml @@ -0,0 +1,235 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-priority-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_PRIORITY_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: PANDAS_COLUMN_BATCH_SIZE + value: ${PANDAS_COLUMN_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_PRIORITY_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} + - name: TAG_ENABLED_LIMIT + value: ${TAG_ENABLED_LIMIT} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_PRIORITY_PENALTY_CPU_LIMIT} + memory: ${WORKER_PRIORITY_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_PRIORITY_PENALTY_CPU_REQUEST} + memory: ${WORKER_PRIORITY_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_PRIORITY_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_PRIORITY_PENALTY_MEMORY_REQUEST + required: true + value: 400Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_PRIORITY_PENALTY_MEMORY_LIMIT + required: true + value: 750Mi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_PRIORITY_PENALTY_CPU_REQUEST + required: true + value: 150m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_PRIORITY_PENALTY_CPU_LIMIT + required: true + value: 300m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_PRIORITY_PENALTY_WORKER_QUEUE + required: true + value: 'priority_penalty' diff --git a/deploy/kustomize/patches/worker-refresh-penalty.yaml b/deploy/kustomize/patches/worker-refresh-penalty.yaml new file mode 100644 index 0000000000..94284a336b --- /dev/null +++ b/deploy/kustomize/patches/worker-refresh-penalty.yaml @@ -0,0 +1,227 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-refresh-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_REFRESH_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_REFRESH_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_REFRESH_PENALTY_CPU_LIMIT} + memory: ${WORKER_REFRESH_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_REFRESH_PENALTY_CPU_REQUEST} + memory: ${WORKER_REFRESH_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_REFRESH_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_REFRESH_PENALTY_MEMORY_REQUEST + required: true + value: 256Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_REFRESH_PENALTY_MEMORY_LIMIT + required: true + value: 512Mi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_REFRESH_PENALTY_CPU_REQUEST + required: true + value: 100m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_REFRESH_PENALTY_CPU_LIMIT + required: true + value: 200m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_REFRESH_PENALTY_WORKER_QUEUE + required: true + value: 'refresh_penalty' diff --git a/deploy/kustomize/patches/worker-summary-penalty.yaml b/deploy/kustomize/patches/worker-summary-penalty.yaml new file mode 100644 index 0000000000..f301847c2c --- /dev/null +++ b/deploy/kustomize/patches/worker-summary-penalty.yaml @@ -0,0 +1,231 @@ +- op: add + path: /objects/0/spec/deployments/- + value: + name: clowder-worker-summary-penalty + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + replicas: ${{WORKER_SUMMARY_PENALTY_REPLICAS}} + webServices: + public: + enabled: false + private: + enabled: false + podSpec: + metadata: + annotations: + ignore-check.kube-linter.io/minimum-three-replicas: This deployment uses 1 pod at times for cost saving purposes + image: ${IMAGE}:${IMAGE_TAG} + command: + - /bin/bash + - -c + - > # ${APP_HOME} is `/opt/koku/koku` which is defined in the Dockerfile + PYTHONPATH=${APP_HOME} + celery -A koku worker --without-gossip -E -l $CELERY_LOG_LEVEL -Q $WORKER_QUEUES + env: + - name: CLOWDER_ENABLED + value: ${CLOWDER_ENABLED} + - name: AWS_SHARED_CREDENTIALS_FILE + value: ${AWS_SHARED_CREDENTIALS_FILE} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: ${GOOGLE_APPLICATION_CREDENTIALS} + - name: OCI_SHARED_CREDENTIALS_FILE + value: ${OCI_SHARED_CREDENTIALS_FILE} + - name: OCI_CLI_KEY_FILE + value: ${OCI_CLI_KEY_FILE} + - name: OCI_PYTHON_SDK_NO_SERVICE_IMPORTS + value: "true" + - name: SOURCES_PSK + valueFrom: + secretKeyRef: + key: psk + name: ${SOURCES_PSK_SECRET_NAME} + optional: true + - name: APP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: DEVELOPMENT + value: ${DEVELOPMENT} + - name: CELERY_LOG_LEVEL + value: ${CELERY_LOG_LEVEL} + - name: KOKU_LOG_LEVEL + value: ${KOKU_LOG_LEVEL} + - name: UNLEASH_LOG_LEVEL + value: ${UNLEASH_LOG_LEVEL} + - name: PROMETHEUS_MULTIPROC_DIR + value: ${PROMETHEUS_DIR} + - name: REQUESTED_BUCKET + value: ${S3_BUCKET_NAME} + - name: ENABLE_S3_ARCHIVING + value: ${ENABLE_S3_ARCHIVING} + - name: PARQUET_PROCESSING_BATCH_SIZE + value: ${PARQUET_PROCESSING_BATCH_SIZE} + - name: TRINO_DATE_STEP + value: ${TRINO_DATE_STEP} + - name: KOKU_ENABLE_SENTRY + value: ${KOKU_ENABLE_SENTRY} + - name: KOKU_SENTRY_ENVIRONMENT + value: ${KOKU_SENTRY_ENV} + - name: KOKU_SENTRY_DSN + valueFrom: + secretKeyRef: + key: ${GLITCHTIP_KEY_NAME} + name: ${GLITCHTIP_SECRET_NAME} + optional: true + - name: DEMO_ACCOUNTS + value: ${DEMO_ACCOUNTS} + - name: WORKER_QUEUES + value: ${WORKER_SUMMARY_PENALTY_WORKER_QUEUE} + - name: WORKER_PROC_ALIVE_TIMEOUT + value: ${WORKER_PROC_ALIVE_TIMEOUT} + - name: DATE_OVERRIDE + value: ${DATE_OVERRIDE} + - name: RETAIN_NUM_MONTHS + value: ${RETAIN_NUM_MONTHS} + - name: INITIAL_INGEST_NUM_MONTHS + value: ${INITIAL_INGEST_NUM_MONTHS} + - name: INITIAL_INGEST_OVERRIDE + value: ${INITIAL_INGEST_OVERRIDE} + - name: POLLING_TIMER + value: ${POLLING_TIMER} + - name: POLLING_BATCH_SIZE + value: ${POLLING_BATCH_SIZE} + - name: DELAYED_TASK_TIME + value: ${DELAYED_TASK_TIME} + - name: DELAYED_TASK_POLLING_MINUTES + value: ${DELAYED_TASK_POLLING_MINUTES} + - name: TRINO_HOST + value: ${TRINO_HOST} + - name: TRINO_PORT + value: ${TRINO_PORT} + - name: AUTO_DATA_INGEST + value: ${AUTO_DATA_INGEST} + - name: REPORT_PROCESSING_BATCH_SIZE + value: ${REPORT_PROCESSING_BATCH_SIZE} + - name: PROMETHEUS_PUSHGATEWAY + value: ${PROMETHEUS_PUSHGATEWAY} + - name: SOURCES_API_PREFIX + value: ${SOURCES_API_PREFIX} + - name: UNLEASH_CACHE_DIR + value: ${UNLEASH_CACHE_DIR} + - name: WORKER_CACHE_TIMEOUT + value: ${WORKER_CACHE_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT + value: ${WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT} + - name: WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS + value: ${WORKER_CACHE_LARGE_CUSTOMER_CONCURRENT_TASKS} + - name: QE_SCHEMA + value: ${QE_SCHEMA} + - name: ENHANCED_ORG_ADMIN + value: ${ENHANCED_ORG_ADMIN} + livenessProbe: + httpGet: + path: /livez + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /readyz + port: metrics + scheme: HTTP + initialDelaySeconds: 30 + periodSeconds: 20 + successThreshold: 1 + failureThreshold: 5 + timeoutSeconds: 10 + terminationGracePeriodSeconds: 3600 + resources: + limits: + cpu: ${WORKER_SUMMARY_PENALTY_CPU_LIMIT} + memory: ${WORKER_SUMMARY_PENALTY_MEMORY_LIMIT} + requests: + cpu: ${WORKER_SUMMARY_PENALTY_CPU_REQUEST} + memory: ${WORKER_SUMMARY_PENALTY_MEMORY_REQUEST} + volumeMounts: + - name: aws-credentials + mountPath: /etc/aws + readOnly: true + - mountPath: /var/tmp/masu/ + name: koku-worker-data + - name: gcp-credentials + mountPath: /etc/gcp + readOnly: true + - name: oci-credentials + mountPath: /etc/oci + readOnly: true + - name: tmp-data + mountPath: ${TMP_DIR} + volumes: + - name: tmp-data + emptyDir: {} + - name: koku-worker-data + emptyDir: {} + - name: aws-credentials + secret: + items: + - key: aws-credentials + path: aws-credentials + secretName: koku-aws + - name: gcp-credentials + secret: + secretName: koku-gcp + items: + - key: gcp-credentials + path: gcp-credentials.json + - name: oci-credentials + secret: + secretName: koku-oci + items: + - key: oci-credentials + path: oci-credentials.pem + - key: oci-config + path: oci-config + +- op: add + path: /parameters/- + value: + displayName: Minimum replicas + name: WORKER_SUMMARY_PENALTY_REPLICAS + required: true + value: '2' +- op: add + path: /parameters/- + value: + displayName: Memory Request + name: WORKER_SUMMARY_PENALTY_MEMORY_REQUEST + required: true + value: 500Mi +- op: add + path: /parameters/- + value: + displayName: Memory Limit + name: WORKER_SUMMARY_PENALTY_MEMORY_LIMIT + required: true + value: 750Mi +- op: add + path: /parameters/- + value: + displayName: CPU Request + name: WORKER_SUMMARY_PENALTY_CPU_REQUEST + required: true + value: 100m +- op: add + path: /parameters/- + value: + displayName: CPU Limit + name: WORKER_SUMMARY_PENALTY_CPU_LIMIT + required: true + value: 200m +- op: add + path: /parameters/- + value: + displayName: Worker Queue + name: WORKER_SUMMARY_PENALTY_WORKER_QUEUE + required: true + value: 'summary_penalty' diff --git a/koku/api/settings/cost_groups/view.py b/koku/api/settings/cost_groups/view.py index dc28f67029..74d9139280 100644 --- a/koku/api/settings/cost_groups/view.py +++ b/koku/api/settings/cost_groups/view.py @@ -20,9 +20,8 @@ from api.settings.cost_groups.serializers import CostGroupProjectSerializer from api.settings.cost_groups.serializers import CostGroupQueryParamSerializer from api.utils import DateHelper -from masu.processor import is_customer_large -from masu.processor.tasks import OCP_QUEUE -from masu.processor.tasks import OCP_QUEUE_XL +from common.queues import get_customer_queue +from common.queues import OCPQueue from masu.processor.tasks import update_summary_tables from reporting.provider.ocp.models import OCPProject @@ -69,9 +68,7 @@ def get(self, request: Request, **kwargs) -> Response: def _summarize_current_month(self, schema_name: str, projects: list[dict[str, str]]) -> list[str]: """Resummarize OCP data for the current month.""" projects_to_summarize = [proj["project"] for proj in projects] - ocp_queue = OCP_QUEUE - if is_customer_large(schema_name): - ocp_queue = OCP_QUEUE_XL + ocp_queue = get_customer_queue(schema_name, OCPQueue) provider_uuids = ( OCPProject.objects.filter(project__in=projects_to_summarize) diff --git a/koku/api/settings/test/cost_groups/test_query_handler.py b/koku/api/settings/test/cost_groups/test_query_handler.py index d6f34ee244..e58ff1c902 100644 --- a/koku/api/settings/test/cost_groups/test_query_handler.py +++ b/koku/api/settings/test/cost_groups/test_query_handler.py @@ -16,9 +16,8 @@ from api.settings.cost_groups.query_handler import _remove_default_projects from api.settings.cost_groups.query_handler import put_openshift_namespaces from api.utils import DateHelper +from common.queues import OCPQueue from koku.koku_test_runner import OCP_ON_GCP_CLUSTER_ID -from masu.processor.tasks import OCP_QUEUE -from masu.processor.tasks import OCP_QUEUE_XL from reporting.provider.ocp.models import OCPProject from reporting.provider.ocp.models import OpenshiftCostCategory from reporting.provider.ocp.models import OpenshiftCostCategoryNamespace @@ -241,9 +240,9 @@ def test_put_catch_integrity_error(self): self.assertIn("IntegrityError", log_warning.records[0].getMessage()) @patch("api.settings.cost_groups.view.update_summary_tables.s") - @patch("api.settings.cost_groups.view.is_customer_large") - def test_add_new_records(self, mock_is_customer_large, mock_update_schedule): - mock_is_customer_large.return_value = False + @patch("api.settings.cost_groups.view.get_customer_queue") + def test_add_new_records(self, mock_get_customer_queue, mock_update_schedule): + mock_get_customer_queue.return_value = OCPQueue.DEFAULT with schema_context(self.schema_name): body = json.dumps(self.body_format) response = self.client.put(self.add_url, body, content_type="application/json", **self.headers) @@ -251,18 +250,18 @@ def test_add_new_records(self, mock_is_customer_large, mock_update_schedule): self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(current_count, 1) - mock_is_customer_large.assert_called_once_with(self.schema_name) + mock_get_customer_queue.assert_called_once_with(self.schema_name, OCPQueue) mock_update_schedule.assert_any_call( self.schema_name, provider_type=Provider.PROVIDER_OCP, provider_uuid=self.provider_uuid, start_date=DateHelper().this_month_start, ) - mock_update_schedule.return_value.apply_async.assert_called_with(queue=OCP_QUEUE) + mock_update_schedule.return_value.apply_async.assert_called_with(queue=OCPQueue.DEFAULT) @patch("api.settings.cost_groups.view.update_summary_tables.s") - @patch("api.settings.cost_groups.view.is_customer_large", return_value=False) - def test_move_project_to_different_cost_group(self, mock_is_customer_large, mock_update_schedule): + @patch("api.settings.cost_groups.view.get_customer_queue", return_value=False) + def test_move_project_to_different_cost_group(self, mock_get_customer_queue, mock_update_schedule): """Test moving an existing project to a different Cost Group""" with schema_context(self.schema_name): @@ -282,9 +281,9 @@ def test_move_project_to_different_cost_group(self, mock_is_customer_large, mock self.assertEqual(current_count, 1) @patch("api.settings.cost_groups.view.update_summary_tables.s") - @patch("api.settings.cost_groups.view.is_customer_large") - def test_add_new_records_large(self, mock_is_customer_large, mock_update_schedule): - mock_is_customer_large.return_value = True + @patch("api.settings.cost_groups.view.get_customer_queue") + def test_add_new_records_large(self, mock_get_customer_queue, mock_update_schedule): + mock_get_customer_queue.return_value = OCPQueue.XL with schema_context(self.schema_name): body = json.dumps(self.body_format) response = self.client.put(self.add_url, body, content_type="application/json", **self.headers) @@ -292,11 +291,11 @@ def test_add_new_records_large(self, mock_is_customer_large, mock_update_schedul self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(current_count, 1) - mock_is_customer_large.assert_called_once_with(self.schema_name) + mock_get_customer_queue.assert_called_once_with(self.schema_name, OCPQueue) mock_update_schedule.assert_any_call( self.schema_name, provider_type=Provider.PROVIDER_OCP, provider_uuid=self.provider_uuid, start_date=DateHelper().this_month_start, ) - mock_update_schedule.return_value.apply_async.assert_called_with(queue=OCP_QUEUE_XL) + mock_update_schedule.return_value.apply_async.assert_called_with(queue=OCPQueue.XL) diff --git a/koku/common/queues.py b/koku/common/queues.py new file mode 100644 index 0000000000..169ed85753 --- /dev/null +++ b/koku/common/queues.py @@ -0,0 +1,76 @@ +# +# Copyright 2024 Red Hat Inc. +# SPDX-License-Identifier: Apache-2.0 +# +"""Common place for queues""" +from common.enum import StrEnum +from masu.processor import is_customer_large +from masu.processor import is_customer_penalty + +DEFAULT = "celery" + + +class DownloadQueue(StrEnum): + DEFAULT = "download" + XL = "download_xl" + PENALTY_BOX = "download_penalty_box" + + +class OCPQueue(StrEnum): + DEFAULT = "ocp" + XL = "ocp_xl" + PENALTY_BOX = "ocp_penalty_box" + + +class PriorityQueue(StrEnum): + DEFAULT = "priority" + XL = "priority_xl" + PENALTY_BOX = "priority_penalty_box" + + +class SummaryQueue(StrEnum): + DEFAULT = "summary" + XL = "summary_xl" + PENALTY_BOX = "summary_penalty_box" + + +class CostModelQueue(StrEnum): + DEFAULT = "cost_model" + XL = "cost_model_xl" + PENALTY_BOX = "cost_model_penalty_box" + + +class RefreshQueue(StrEnum): + DEFAULT = "refresh" + XL = "refresh_xl" + PENALTY_BOX = "refresh_penalty_box" + + +# any additional queues should be added to this list +QUEUE_LIST = [ + DEFAULT, + DownloadQueue.DEFAULT, + DownloadQueue.XL, + DownloadQueue.PENALTY_BOX, + OCPQueue.DEFAULT, + OCPQueue.XL, + OCPQueue.PENALTY_BOX, + PriorityQueue.DEFAULT, + PriorityQueue.XL, + PriorityQueue.PENALTY_BOX, + CostModelQueue.DEFAULT, + CostModelQueue.XL, + CostModelQueue.PENALTY_BOX, + SummaryQueue.DEFAULT, + SummaryQueue.XL, + SummaryQueue.PENALTY_BOX, +] + + +def get_customer_queue(schema, queue_class=DownloadQueue): + queue = queue_class.DEFAULT + if is_customer_large(schema): + queue = queue_class.XL + if is_customer_penalty(schema): + queue = queue_class.PENALTY_BOX + return queue diff --git a/koku/cost_models/cost_model_manager.py b/koku/cost_models/cost_model_manager.py index bfcf4ff52e..5452f09843 100644 --- a/koku/cost_models/cost_model_manager.py +++ b/koku/cost_models/cost_model_manager.py @@ -11,11 +11,10 @@ from api.provider.models import Provider from api.utils import DateHelper +from common.queues import get_customer_queue +from common.queues import PriorityQueue from cost_models.models import CostModel from cost_models.models import CostModelMap -from masu.processor import is_customer_large -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL from masu.processor.tasks import update_cost_model_costs @@ -88,9 +87,7 @@ def update_provider_uuids(self, provider_uuids): else: if provider.active: schema_name = provider.customer.schema_name - fallback_queue = PRIORITY_QUEUE - if is_customer_large(schema_name): - fallback_queue = PRIORITY_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, PriorityQueue) # Because this is triggered from the UI, we use the priority queue LOG.info( f"provider {provider_uuid} update for cost model {self._cost_model_uuid} " diff --git a/koku/cost_models/test/test_cost_model_manager.py b/koku/cost_models/test/test_cost_model_manager.py index 76d3e6569f..e1ea66508c 100644 --- a/koku/cost_models/test/test_cost_model_manager.py +++ b/koku/cost_models/test/test_cost_model_manager.py @@ -12,11 +12,11 @@ from api.iam.test.iam_test_case import IamTestCase from api.metrics import constants as metric_constants from api.provider.models import Provider +from common.queues import PriorityQueue from cost_models.cost_model_manager import CostModelException from cost_models.cost_model_manager import CostModelManager from cost_models.models import CostModel from cost_models.models import CostModelMap -from masu.processor.tasks import PRIORITY_QUEUE_XL class MockResponse: @@ -220,9 +220,9 @@ def test_update_provider_uuids_with_XL_queue(self): with tenant_context(self.tenant): manager = CostModelManager(cost_model_uuid=cost_model_obj.uuid) with patch("cost_models.cost_model_manager.update_cost_model_costs") as mock_update: - with patch("cost_models.cost_model_manager.is_customer_large", return_value=True): + with patch("cost_models.cost_model_manager.get_customer_queue", return_value=PriorityQueue.XL): manager.update_provider_uuids(provider_uuids=[provider_uuid]) - mock_update.s.return_value.set.assert_called_with(queue=PRIORITY_QUEUE_XL) + mock_update.s.return_value.set.assert_called_with(queue=PriorityQueue.XL) def test_update_provider_uuids(self): """Test creating a cost model then update with a provider uuid.""" diff --git a/koku/masu/api/ingress_reports.py b/koku/masu/api/ingress_reports.py index 4f8f6899ff..429179a44d 100644 --- a/koku/masu/api/ingress_reports.py +++ b/koku/masu/api/ingress_reports.py @@ -18,9 +18,9 @@ from api.common.pagination import ListPaginator from api.ingress.reports.serializers import IngressReportsSerializer +from common.queues import DownloadQueue +from common.queues import QUEUE_LIST from masu.celery.tasks import check_report_updates -from masu.processor.tasks import GET_REPORT_FILES_QUEUE -from masu.processor.tasks import QUEUE_LIST from reporting.ingress.models import IngressReports LOG = logging.getLogger(__name__) @@ -40,7 +40,7 @@ def ingress_reports(request): provider_uuid = params.get("provider_uuid") schema_name = params.get("schema_name") - queue_name = params.get("queue") or GET_REPORT_FILES_QUEUE + queue_name = params.get("queue") or DownloadQueue.DEFAULT if schema_name is None: errmsg = "schema_name must be supplied as a parameter." return Response({"Error": errmsg}, status=status.HTTP_400_BAD_REQUEST) diff --git a/koku/masu/api/process_openshift_on_cloud.py b/koku/masu/api/process_openshift_on_cloud.py index c75175cc2b..e98ece4dc5 100644 --- a/koku/masu/api/process_openshift_on_cloud.py +++ b/koku/masu/api/process_openshift_on_cloud.py @@ -20,12 +20,11 @@ from api.provider.models import Provider from api.utils import DateHelper from api.utils import get_months_in_date_range -from masu.processor import is_customer_large -from masu.processor.tasks import GET_REPORT_FILES_QUEUE -from masu.processor.tasks import GET_REPORT_FILES_QUEUE_XL +from common.queues import DownloadQueue +from common.queues import get_customer_queue +from common.queues import QUEUE_LIST from masu.processor.tasks import process_daily_openshift_on_cloud as process_daily_openshift_on_cloud_task from masu.processor.tasks import process_openshift_on_cloud as process_openshift_on_cloud_task -from masu.processor.tasks import QUEUE_LIST LOG = logging.getLogger(__name__) REPORT_DATA_KEY = "process_openshift_on_cloud Task IDs" @@ -43,9 +42,7 @@ def process_openshift_on_cloud(request): schema_name = params.get("schema") start_date = params.get("start_date") end_date = params.get("end_date") - fallback_queue = GET_REPORT_FILES_QUEUE - if is_customer_large(schema_name): - fallback_queue = GET_REPORT_FILES_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, DownloadQueue) queue_name = params.get("queue") or fallback_queue if cloud_provider_uuid is None: diff --git a/koku/masu/api/report_data.py b/koku/masu/api/report_data.py index 3d1268ca18..4b96d018c0 100644 --- a/koku/masu/api/report_data.py +++ b/koku/masu/api/report_data.py @@ -18,10 +18,9 @@ from api.models import Provider from api.utils import get_months_in_date_range -from masu.processor import is_customer_large -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL -from masu.processor.tasks import QUEUE_LIST +from common.queues import get_customer_queue +from common.queues import PriorityQueue +from common.queues import QUEUE_LIST from masu.processor.tasks import remove_expired_data from masu.processor.tasks import update_all_summary_tables from masu.processor.tasks import update_summary_tables @@ -48,9 +47,7 @@ def report_data(request): end_date = params.get("end_date") invoice_month = params.get("invoice_month") provider = None - fallback_queue = PRIORITY_QUEUE - if is_customer_large(schema_name): - fallback_queue = PRIORITY_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, PriorityQueue) ocp_on_cloud = params.get("ocp_on_cloud", "true").lower() ocp_on_cloud = ocp_on_cloud == "true" diff --git a/koku/masu/api/update_cost_model_costs.py b/koku/masu/api/update_cost_model_costs.py index 6e36b50308..f0b762f2f7 100644 --- a/koku/masu/api/update_cost_model_costs.py +++ b/koku/masu/api/update_cost_model_costs.py @@ -17,10 +17,9 @@ from api.provider.models import Provider from api.utils import DateHelper from api.utils import get_months_in_date_range -from masu.processor import is_customer_large -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL -from masu.processor.tasks import QUEUE_LIST +from common.queues import get_customer_queue +from common.queues import PriorityQueue +from common.queues import QUEUE_LIST from masu.processor.tasks import update_cost_model_costs as cost_task LOG = logging.getLogger(__name__) @@ -41,9 +40,7 @@ def update_cost_model_costs(request): default_end_date = DateHelper().today.strftime("%Y-%m-%d") start_date = params.get("start_date", default=default_start_date) end_date = params.get("end_date", default=default_end_date) - fallback_queue = PRIORITY_QUEUE - if is_customer_large(schema_name): - fallback_queue = PRIORITY_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, PriorityQueue) queue_name = params.get("queue") or fallback_queue if provider_uuid is None or schema_name is None: diff --git a/koku/masu/api/update_openshift_on_cloud.py b/koku/masu/api/update_openshift_on_cloud.py index 6ecd241caf..298d9d50f2 100644 --- a/koku/masu/api/update_openshift_on_cloud.py +++ b/koku/masu/api/update_openshift_on_cloud.py @@ -20,13 +20,11 @@ from api.provider.models import Provider from api.utils import get_months_in_date_range -from masu.processor import is_customer_large -from masu.processor.ocp.ocp_cloud_parquet_summary_updater import DELETE_TABLE +from common.queues import get_customer_queue +from common.queues import PriorityQueue +from common.queues import QUEUE_LIST from masu.processor.ocp.ocp_cloud_parquet_summary_updater import OCPCloudParquetReportSummaryUpdater from masu.processor.tasks import delete_openshift_on_cloud_data -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL -from masu.processor.tasks import QUEUE_LIST from masu.processor.tasks import update_openshift_on_cloud as update_openshift_on_cloud_task LOG = logging.getLogger(__name__) @@ -47,9 +45,7 @@ def update_openshift_on_cloud(request): schema_name = params.get("schema") start_date = params.get("start_date") end_date = params.get("end_date") - fallback_queue = PRIORITY_QUEUE - if is_customer_large(schema_name): - fallback_queue = PRIORITY_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, PriorityQueue) queue_name = params.get("queue") or fallback_queue if openshift_provider_uuid is None: diff --git a/koku/masu/api/upgrade_trino/test/test_view.py b/koku/masu/api/upgrade_trino/test/test_view.py index 99190bf4c3..88720cb823 100644 --- a/koku/masu/api/upgrade_trino/test/test_view.py +++ b/koku/masu/api/upgrade_trino/test/test_view.py @@ -11,8 +11,8 @@ from django.urls import reverse from api.models import Provider +from common.queues import DownloadQueue from masu.api.upgrade_trino.util.task_handler import FixParquetTaskHandler -from masu.processor.tasks import GET_REPORT_FILES_QUEUE from masu.test import MasuTestCase @@ -65,4 +65,4 @@ def test_acceptable_parameters(self, _): "bill_date": self.bill_date, "cleaned_column_mapping": cleaned_column_mapping, } - patch_celery.assert_called_once_with((), async_kwargs, queue=GET_REPORT_FILES_QUEUE) + patch_celery.assert_called_once_with((), async_kwargs, queue=DownloadQueue.DEFAULT) diff --git a/koku/masu/api/upgrade_trino/util/task_handler.py b/koku/masu/api/upgrade_trino/util/task_handler.py index ed2b5158d1..6dc3e82b1d 100644 --- a/koku/masu/api/upgrade_trino/util/task_handler.py +++ b/koku/masu/api/upgrade_trino/util/task_handler.py @@ -10,12 +10,11 @@ from api.common import log_json from api.provider.models import Provider from api.utils import DateHelper +from common.queues import DownloadQueue +from common.queues import get_customer_queue from masu.api.upgrade_trino.util.constants import ConversionContextKeys from masu.api.upgrade_trino.util.state_tracker import StateTracker from masu.celery.tasks import fix_parquet_data_types -from masu.processor import is_customer_large -from masu.processor.tasks import GET_REPORT_FILES_QUEUE -from masu.processor.tasks import GET_REPORT_FILES_QUEUE_XL from masu.util.common import strip_characters_from_column_name from reporting.provider.aws.models import TRINO_REQUIRED_COLUMNS as AWS_TRINO_REQUIRED_COLUMNS from reporting.provider.azure.models import TRINO_REQUIRED_COLUMNS as AZURE_TRINO_REQUIRED_COLUMNS @@ -102,9 +101,7 @@ def build_celery_tasks(self): providers = Provider.objects.filter(active=True, paused=False, type=self.provider_type) for provider in providers: - queue_name = GET_REPORT_FILES_QUEUE - if is_customer_large(provider.account["schema_name"]): - queue_name = GET_REPORT_FILES_QUEUE_XL + queue_name = get_customer_queue(provider.account["schema_name"], DownloadQueue) account = copy.deepcopy(provider.account) conversion_metadata = provider.additional_context.get(ConversionContextKeys.metadata, {}) diff --git a/koku/masu/celery/tasks.py b/koku/masu/celery/tasks.py index 5d74a9fa64..42824c8ba9 100644 --- a/koku/masu/celery/tasks.py +++ b/koku/masu/celery/tasks.py @@ -29,6 +29,9 @@ from api.models import Provider from api.provider.models import Sources from api.utils import DateHelper +from common.queues import DownloadQueue +from common.queues import PriorityQueue +from common.queues import SummaryQueue from koku import celery_app from koku.notifications import NotificationService from masu.api.upgrade_trino.util.verify_parquet_files import VerifyParquetFiles @@ -41,9 +44,6 @@ from masu.processor.orchestrator import Orchestrator from masu.processor.tasks import autovacuum_tune_schema from masu.processor.tasks import DEFAULT -from masu.processor.tasks import GET_REPORT_FILES_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import REMOVE_EXPIRED_DATA_QUEUE from masu.prometheus_stats import QUEUES from masu.util.aws.common import get_s3_resource from masu.util.oci.common import OCI_REPORT_TYPES @@ -62,7 +62,7 @@ } -@celery_app.task(name="masu.celery.tasks.fix_parquet_data_types", queue=GET_REPORT_FILES_QUEUE) +@celery_app.task(name="masu.celery.tasks.fix_parquet_data_types", queue=DownloadQueue.DEFAULT) def fix_parquet_data_types(*args, **kwargs): verify_parquet = VerifyParquetFiles(*args, **kwargs) verify_parquet.retrieve_verify_reload_s3_parquet() @@ -183,7 +183,7 @@ def deleted_archived_with_prefix(s3_bucket_name, prefix): @celery_app.task( # noqa: C901 name="masu.celery.tasks.delete_archived_data", - queue=REMOVE_EXPIRED_DATA_QUEUE, + queue=SummaryQueue.DEFAULT, autoretry_for=(ClientError,), max_retries=10, retry_backoff=10, @@ -503,7 +503,7 @@ def check_for_stale_ocp_source(provider_uuid=None): ) -@celery_app.task(name="masu.celery.tasks.delete_provider_async", queue=PRIORITY_QUEUE) +@celery_app.task(name="masu.celery.tasks.delete_provider_async", queue=PriorityQueue.DEFAULT) def delete_provider_async(name, provider_uuid, schema_name): with schema_context(schema_name): LOG.info(f"Removing Provider without Source: {str(name)} ({str(provider_uuid)}") @@ -515,7 +515,7 @@ def delete_provider_async(name, provider_uuid, schema_name): ) -@celery_app.task(name="masu.celery.tasks.out_of_order_source_delete_async", queue=PRIORITY_QUEUE) +@celery_app.task(name="masu.celery.tasks.out_of_order_source_delete_async", queue=PriorityQueue.DEFAULT) def out_of_order_source_delete_async(source_id): LOG.info(f"Removing out of order delete Source (ID): {str(source_id)}") try: @@ -531,7 +531,7 @@ def out_of_order_source_delete_async(source_id): delete_source_helper(source) -@celery_app.task(name="masu.celery.tasks.missing_source_delete_async", queue=PRIORITY_QUEUE) +@celery_app.task(name="masu.celery.tasks.missing_source_delete_async", queue=PriorityQueue.DEFAULT) def missing_source_delete_async(source_id): LOG.info(f"Removing missing Source: {str(source_id)}") try: @@ -607,7 +607,7 @@ def get_celery_queue_items(self, queue_name=None, task_name=None): return decoded_tasks -@celery_app.task(name="masu.celery.tasks.trigger_delayed_tasks", queue=GET_REPORT_FILES_QUEUE) +@celery_app.task(name="masu.celery.tasks.trigger_delayed_tasks", queue=DownloadQueue.DEFAULT) def trigger_delayed_tasks(*args, **kwargs): """Removes the expired records starting the delayed celery tasks.""" DelayedCeleryTasks.trigger_delayed_tasks() diff --git a/koku/masu/external/kafka_msg_handler.py b/koku/masu/external/kafka_msg_handler.py index 6f9ac4b952..988131be40 100644 --- a/koku/masu/external/kafka_msg_handler.py +++ b/koku/masu/external/kafka_msg_handler.py @@ -32,6 +32,8 @@ from api.common import log_json from api.provider.models import Provider +from common.queues import get_customer_queue +from common.queues import OCPQueue from kafka_utils.utils import extract_from_header from kafka_utils.utils import get_consumer from kafka_utils.utils import get_producer @@ -42,12 +44,9 @@ from masu.database.report_manifest_db_accessor import ReportManifestDBAccessor from masu.external import UNCOMPRESSED from masu.external.ros_report_shipper import ROSReportShipper -from masu.processor import is_customer_large from masu.processor._tasks.process import _process_report_file from masu.processor.report_processor import ReportProcessorDBError from masu.processor.report_processor import ReportProcessorError -from masu.processor.tasks import OCP_QUEUE -from masu.processor.tasks import OCP_QUEUE_XL from masu.processor.tasks import record_all_manifest_files from masu.processor.tasks import record_report_status from masu.processor.tasks import summarize_reports @@ -622,9 +621,7 @@ def summarize_manifest(report_meta, manifest_uuid): "end_date": end_date, } - ocp_processing_queue = OCP_QUEUE - if is_customer_large(schema): - ocp_processing_queue = OCP_QUEUE_XL + ocp_processing_queue = get_customer_queue(schema, OCPQueue) if not MANIFEST_ACCESSOR.manifest_ready_for_summary(manifest_id): return diff --git a/koku/masu/management/commands/aws_null_bill_cleanup.py b/koku/masu/management/commands/aws_null_bill_cleanup.py index de6fd425a6..2dc481ea1b 100644 --- a/koku/masu/management/commands/aws_null_bill_cleanup.py +++ b/koku/masu/management/commands/aws_null_bill_cleanup.py @@ -11,11 +11,10 @@ from django_tenants.utils import schema_context from api.provider.models import Provider +from common.queues import get_customer_queue +from common.queues import PriorityQueue from koku.database import cascade_delete from koku.feature_flags import UNLEASH_CLIENT -from masu.processor import is_customer_large -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL from masu.processor.tasks import update_summary_tables from reporting.models import AWSCostEntryBill @@ -74,7 +73,7 @@ def cleanup_aws_bills(delete: bool) -> int: payer_account_id=None, billing_period_start=start_date, ): - queue_name = PRIORITY_QUEUE_XL if is_customer_large(schema) else PRIORITY_QUEUE + queue_name = get_customer_queue(schema, PriorityQueue) total_cleaned_bills += len(bills) if delete: formatted_start = start_date.strftime(DATE_FORMAT) diff --git a/koku/masu/processor/__init__.py b/koku/masu/processor/__init__.py index 3f4b9b6c50..4b62af9e03 100644 --- a/koku/masu/processor/__init__.py +++ b/koku/masu/processor/__init__.py @@ -79,6 +79,13 @@ def is_customer_large(account): # pragma: no cover return UNLEASH_CLIENT.is_enabled("cost-management.backend.large-customer", context) +def is_customer_penalty(account): # pragma: no cover + """Flag the customer as penalised.""" + account = convert_account(account) + context = {"schema": account} + return UNLEASH_CLIENT.is_enabled("cost-management.backend.penalty-customer", context) + + def is_rate_limit_customer_large(account): # pragma: no cover """Flag the customer as large and to be rate limited.""" account = convert_account(account) diff --git a/koku/masu/processor/orchestrator.py b/koku/masu/processor/orchestrator.py index 9790c988e5..bb0aa7fce7 100644 --- a/koku/masu/processor/orchestrator.py +++ b/koku/masu/processor/orchestrator.py @@ -17,23 +17,21 @@ from api.provider.models import check_provider_setup_complete from api.provider.models import Provider from api.utils import DateHelper +from common.queues import DownloadQueue +from common.queues import get_customer_queue +from common.queues import SummaryQueue from hcs.tasks import collect_hcs_report_data_from_manifest from hcs.tasks import HCS_QUEUE from masu.config import Config from masu.external.report_downloader import ReportDownloader from masu.external.report_downloader import ReportDownloaderError from masu.processor import is_cloud_source_processing_disabled -from masu.processor import is_customer_large from masu.processor import is_source_disabled from masu.processor.tasks import get_report_files -from masu.processor.tasks import GET_REPORT_FILES_QUEUE -from masu.processor.tasks import GET_REPORT_FILES_QUEUE_XL from masu.processor.tasks import record_all_manifest_files from masu.processor.tasks import record_report_status from masu.processor.tasks import remove_expired_data from masu.processor.tasks import summarize_reports -from masu.processor.tasks import SUMMARIZE_REPORTS_QUEUE -from masu.processor.tasks import SUMMARIZE_REPORTS_QUEUE_XL from masu.processor.worker_cache import WorkerCache from masu.util.aws.common import update_account_aliases from subs.tasks import extract_subs_data_from_reports @@ -189,12 +187,9 @@ def start_manifest_processing( # noqa: C901 REPORT_QUEUE = self.queue_name HCS_Q = self.queue_name else: - SUMMARY_QUEUE = SUMMARIZE_REPORTS_QUEUE - REPORT_QUEUE = GET_REPORT_FILES_QUEUE + SUMMARY_QUEUE = get_customer_queue(schema_name, SummaryQueue) + REPORT_QUEUE = get_customer_queue(schema_name, DownloadQueue) HCS_Q = HCS_QUEUE - if is_customer_large(schema_name): - SUMMARY_QUEUE = SUMMARIZE_REPORTS_QUEUE_XL - REPORT_QUEUE = GET_REPORT_FILES_QUEUE_XL reports_tasks_queued = False downloader = ReportDownloader( customer_name=customer_name, diff --git a/koku/masu/processor/tasks.py b/koku/masu/processor/tasks.py index b918af63a6..95cea41bea 100644 --- a/koku/masu/processor/tasks.py +++ b/koku/masu/processor/tasks.py @@ -26,6 +26,14 @@ from api.provider.models import Provider from api.utils import DateHelper from api.utils import get_months_in_date_range +from common.queues import CostModelQueue +from common.queues import DEFAULT +from common.queues import DownloadQueue +from common.queues import get_customer_queue +from common.queues import OCPQueue +from common.queues import PriorityQueue +from common.queues import RefreshQueue +from common.queues import SummaryQueue from koku import celery_app from koku.middleware import KokuTenantMiddleware from masu.config import Config @@ -35,7 +43,6 @@ from masu.exceptions import MasuProviderError from masu.external.downloader.report_downloader_base import ReportDownloaderWarning from masu.external.report_downloader import ReportDownloaderError -from masu.processor import is_customer_large from masu.processor import is_ocp_on_cloud_summary_disabled from masu.processor import is_rate_limit_customer_large from masu.processor import is_source_disabled @@ -67,58 +74,15 @@ from reporting_common.states import ManifestState from reporting_common.states import ManifestStep - LOG = logging.getLogger(__name__) -DEFAULT = "celery" -GET_REPORT_FILES_QUEUE = "download" -GET_REPORT_FILES_QUEUE_XL = "download_xl" -OCP_QUEUE = "ocp" -OCP_QUEUE_XL = "ocp_xl" -PRIORITY_QUEUE = "priority" -PRIORITY_QUEUE_XL = "priority_xl" -MARK_MANIFEST_COMPLETE_QUEUE = "priority" -MARK_MANIFEST_COMPLETE_QUEUE_XL = "priority_xl" -REMOVE_EXPIRED_DATA_QUEUE = "summary" -REMOVE_EXPIRED_DATA_QUEUE_XL = "summary_xl" -SUMMARIZE_REPORTS_QUEUE = "summary" -SUMMARIZE_REPORTS_QUEUE_XL = "summary_xl" -UPDATE_COST_MODEL_COSTS_QUEUE = "cost_model" -UPDATE_COST_MODEL_COSTS_QUEUE_XL = "cost_model_xl" -UPDATE_SUMMARY_TABLES_QUEUE = "summary" -UPDATE_SUMMARY_TABLES_QUEUE_XL = "summary_xl" -DELETE_TRUNCATE_QUEUE = "refresh" -DELETE_TRUNCATE_QUEUE_XL = "refresh_xl" - -# any additional queues should be added to this list -QUEUE_LIST = [ - DEFAULT, - GET_REPORT_FILES_QUEUE, - GET_REPORT_FILES_QUEUE_XL, - OCP_QUEUE, - OCP_QUEUE_XL, - PRIORITY_QUEUE, - PRIORITY_QUEUE_XL, - MARK_MANIFEST_COMPLETE_QUEUE, - MARK_MANIFEST_COMPLETE_QUEUE_XL, - REMOVE_EXPIRED_DATA_QUEUE, - REMOVE_EXPIRED_DATA_QUEUE_XL, - SUMMARIZE_REPORTS_QUEUE, - SUMMARIZE_REPORTS_QUEUE_XL, - UPDATE_COST_MODEL_COSTS_QUEUE, - UPDATE_COST_MODEL_COSTS_QUEUE_XL, - UPDATE_SUMMARY_TABLES_QUEUE, - UPDATE_SUMMARY_TABLES_QUEUE_XL, -] UPDATE_SUMMARY_TABLES_TASK = "masu.processor.tasks.update_summary_tables" def delayed_summarize_current_month(schema_name: str, provider_uuids: list, provider_type: str): """Delay Resummarize provider data for the current month.""" - queue = UPDATE_SUMMARY_TABLES_QUEUE - if is_customer_large(schema_name): - queue = UPDATE_SUMMARY_TABLES_QUEUE_XL + queue = get_customer_queue(schema_name, SummaryQueue) for provider_uuid in provider_uuids: id = DelayedCeleryTasks.create_or_reset_timeout( @@ -182,7 +146,7 @@ def record_report_status(manifest_id, file_name, tracing_id, context={}): return already_processed -@celery_app.task(name="masu.processor.tasks.get_report_files", queue=GET_REPORT_FILES_QUEUE, bind=True) # noqa: C901 +@celery_app.task(name="masu.processor.tasks.get_report_files", queue=DownloadQueue.DEFAULT, bind=True) # noqa: C901 def get_report_files( # noqa: C901 self, customer_name, @@ -346,7 +310,7 @@ def remove_expired_data(schema_name, provider, simulate, provider_uuid=None, que _remove_expired_data(schema_name, provider, simulate, provider_uuid) -@celery_app.task(name="masu.processor.tasks.summarize_reports", queue=SUMMARIZE_REPORTS_QUEUE) # noqa: C901 +@celery_app.task(name="masu.processor.tasks.summarize_reports", queue=SummaryQueue.DEFAULT) # noqa: C901 def summarize_reports( # noqa: C901 reports_to_summarize, queue_name=None, manifest_list=None, ingress_report_uuid=None ): @@ -425,9 +389,7 @@ def summarize_reports( # noqa: C901 # Updater classes for when full-month summarization is # required. with ReportManifestDBAccessor() as manifest_accesor: - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE - if is_customer_large(report.get("schema_name")): - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, SummaryQueue) tracing_id = report.get("tracing_id", report.get("manifest_uuid", "no-tracing-id")) if not manifest_accesor.manifest_ready_for_summary(report.get("manifest_id")): @@ -453,7 +415,7 @@ def summarize_reports( # noqa: C901 ).apply_async(queue=queue_name or fallback_queue) -@celery_app.task(name=UPDATE_SUMMARY_TABLES_TASK, queue=UPDATE_SUMMARY_TABLES_QUEUE) # noqa: C901 +@celery_app.task(name=UPDATE_SUMMARY_TABLES_TASK, queue=SummaryQueue.DEFAULT) # noqa: C901 def update_summary_tables( # noqa: C901 schema, provider_type, @@ -502,18 +464,13 @@ def update_summary_tables( # noqa: C901 cache_arg_date = start_date.strftime("%Y-%m") cache_args = [schema, provider_type, provider_uuid, cache_arg_date] ocp_on_cloud_infra_map = {} - is_large_customer = is_customer_large(schema) is_large_customer_rate_limited = is_rate_limit_customer_large(schema) - fallback_update_summary_tables_queue = UPDATE_SUMMARY_TABLES_QUEUE - fallback_delete_truncate_queue = DELETE_TRUNCATE_QUEUE - fallback_update_cost_model_queue = UPDATE_COST_MODEL_COSTS_QUEUE - fallback_mark_manifest_complete_queue = MARK_MANIFEST_COMPLETE_QUEUE + fallback_update_summary_tables_queue = get_customer_queue(schema, SummaryQueue) + fallback_delete_truncate_queue = get_customer_queue(schema, RefreshQueue) + fallback_update_cost_model_queue = get_customer_queue(schema, CostModelQueue) + fallback_mark_manifest_complete_queue = get_customer_queue(schema, PriorityQueue) timeout = settings.WORKER_CACHE_TIMEOUT - if is_large_customer: - fallback_update_summary_tables_queue = UPDATE_SUMMARY_TABLES_QUEUE_XL - fallback_delete_truncate_queue = DELETE_TRUNCATE_QUEUE_XL - fallback_update_cost_model_queue = UPDATE_COST_MODEL_COSTS_QUEUE_XL - fallback_mark_manifest_complete_queue = MARK_MANIFEST_COMPLETE_QUEUE_XL + if fallback_update_summary_tables_queue != SummaryQueue.DEFAULT: timeout = settings.WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT if not synchronous: @@ -684,7 +641,7 @@ def update_summary_tables( # noqa: C901 worker_cache.release_single_task(task_name, cache_args) -@celery_app.task(name="masu.processor.tasks.delete_openshift_on_cloud_data", queue=DELETE_TRUNCATE_QUEUE) # noqa: C901 +@celery_app.task(name="masu.processor.tasks.delete_openshift_on_cloud_data", queue=RefreshQueue.DEFAULT) # noqa: C901 def delete_openshift_on_cloud_data( schema_name, infrastructure_provider_uuid, @@ -709,7 +666,7 @@ def delete_openshift_on_cloud_data( bind=True, autoretry_for=(ReportSummaryUpdaterCloudError,), max_retries=settings.MAX_UPDATE_RETRIES, - queue=UPDATE_SUMMARY_TABLES_QUEUE, + queue=SummaryQueue.DEFAULT, ) def update_openshift_on_cloud( # noqa: C901 self, @@ -743,11 +700,10 @@ def update_openshift_on_cloud( # noqa: C901 worker_cache = WorkerCache() timeout = settings.WORKER_CACHE_TIMEOUT rate_limited = False - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE + fallback_queue = get_customer_queue(schema_name, SummaryQueue) if is_rate_limit_customer_large(schema_name): rate_limited = rate_limit_tasks(task_name, schema_name) - if is_customer_large(schema_name): - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE_XL + if fallback_queue != SummaryQueue.DEFAULT: timeout = settings.WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT if rate_limited or worker_cache.single_task_is_running(task_name, cache_args): msg = f"Task {task_name} already running for {cache_args}. Requeuing." @@ -793,9 +749,7 @@ def update_openshift_on_cloud( # noqa: C901 ) # Regardless of an attached cost model we must run an update for default distribution costs LOG.info(log_json(tracing_id, msg="updating cost model costs", context=ctx)) - fallback_queue = UPDATE_COST_MODEL_COSTS_QUEUE - if is_customer_large(schema_name): - fallback_queue = UPDATE_COST_MODEL_COSTS_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, CostModelQueue) update_cost_model_costs.s( schema_name, openshift_provider_uuid, start_date, end_date, tracing_id=tracing_id ).apply_async(queue=queue_name or fallback_queue) @@ -827,7 +781,7 @@ def update_openshift_on_cloud( # noqa: C901 worker_cache.release_single_task(task_name, cache_args) -@celery_app.task(name="masu.processor.tasks.update_all_summary_tables", queue=UPDATE_SUMMARY_TABLES_QUEUE) +@celery_app.task(name="masu.processor.tasks.update_all_summary_tables", queue=SummaryQueue.DEFAULT) def update_all_summary_tables(start_date, end_date=None): """Populate all the summary tables for reporting. @@ -852,18 +806,15 @@ def update_all_summary_tables(start_date, end_date=None): schema_name = account.get("schema_name") provider_type = account.get("provider_type") provider_uuid = account.get("provider_uuid") - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE - ocp_process_queue = OCP_QUEUE - if is_customer_large(schema_name): - fallback_queue = UPDATE_SUMMARY_TABLES_QUEUE_XL - ocp_process_queue = OCP_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, SummaryQueue) + ocp_process_queue = get_customer_queue(schema_name, OCPQueue) queue_name = ocp_process_queue if provider_type and provider_type.lower() == "ocp" else None update_summary_tables.s( schema_name, provider_type, provider_uuid, str(start_date), end_date, queue_name=queue_name ).apply_async(queue=queue_name or fallback_queue) -@celery_app.task(name="masu.processor.tasks.update_cost_model_costs", queue=UPDATE_COST_MODEL_COSTS_QUEUE) +@celery_app.task(name="masu.processor.tasks.update_cost_model_costs", queue=CostModelQueue.DEFAULT) def update_cost_model_costs( schema_name, provider_uuid, @@ -892,9 +843,7 @@ def update_cost_model_costs( cache_args = [schema_name, provider_uuid, start_date, end_date] if not synchronous: worker_cache = WorkerCache() - fallback_queue = UPDATE_COST_MODEL_COSTS_QUEUE - if is_customer_large(schema_name): - fallback_queue = UPDATE_COST_MODEL_COSTS_QUEUE_XL + fallback_queue = get_customer_queue(schema_name, CostModelQueue) if worker_cache.single_task_is_running(task_name, cache_args): msg = f"Task {task_name} already running for {cache_args}. Requeuing." LOG.debug(log_json(tracing_id, msg=msg)) @@ -934,7 +883,7 @@ def update_cost_model_costs( worker_cache.release_single_task(task_name, cache_args) -@celery_app.task(name="masu.processor.tasks.mark_manifest_complete", queue=MARK_MANIFEST_COMPLETE_QUEUE) +@celery_app.task(name="masu.processor.tasks.mark_manifest_complete", queue=PriorityQueue.DEFAULT) def mark_manifest_complete( schema, provider_type, @@ -1126,7 +1075,7 @@ def remove_stale_tenants(): LOG.info(f"Deleted tenant: {name}") -@celery_app.task(name="masu.processor.tasks.process_openshift_on_cloud", queue=GET_REPORT_FILES_QUEUE, bind=True) +@celery_app.task(name="masu.processor.tasks.process_openshift_on_cloud", queue=DownloadQueue.DEFAULT, bind=True) def process_openshift_on_cloud(self, schema_name, provider_uuid, bill_date, tracing_id=None): """Process OpenShift on Cloud parquet files using Trino.""" if is_source_disabled(provider_uuid): @@ -1186,7 +1135,7 @@ def process_openshift_on_cloud(self, schema_name, provider_uuid, bill_date, trac processor.process(file_name, [data_frame]) -@celery_app.task(name="masu.processor.tasks.process_openshift_on_cloud_daily", queue=GET_REPORT_FILES_QUEUE, bind=True) +@celery_app.task(name="masu.processor.tasks.process_openshift_on_cloud_daily", queue=DownloadQueue.DEFAULT, bind=True) def process_daily_openshift_on_cloud( self, schema_name, provider_uuid, bill_date, start_date, end_date, tracing_id=None ): diff --git a/koku/masu/test/api/test_process_openshift_on_cloud.py b/koku/masu/test/api/test_process_openshift_on_cloud.py index 21a749e0f4..b4d2edf35c 100644 --- a/koku/masu/test/api/test_process_openshift_on_cloud.py +++ b/koku/masu/test/api/test_process_openshift_on_cloud.py @@ -10,7 +10,7 @@ from django.urls import reverse from api.utils import DateHelper -from masu.processor.tasks import QUEUE_LIST +from common.queues import QUEUE_LIST from masu.test import MasuTestCase diff --git a/koku/masu/test/api/test_report_data.py b/koku/masu/test/api/test_report_data.py index f211803d0f..6d76a80a27 100644 --- a/koku/masu/test/api/test_report_data.py +++ b/koku/masu/test/api/test_report_data.py @@ -14,10 +14,9 @@ from api.models import Provider from api.utils import DateHelper -from masu.processor.tasks import OCP_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import PRIORITY_QUEUE_XL -from masu.processor.tasks import QUEUE_LIST +from common.queues import OCPQueue +from common.queues import PriorityQueue +from common.queues import QUEUE_LIST @override_settings(ROOT_URLCONF="masu.urls") @@ -63,7 +62,7 @@ def test_get_report_data(self, mock_update, _): params["provider_uuid"], params["start_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ) @@ -91,7 +90,7 @@ def test_get_report_data_sent_to_OCP_queue(self, mock_update, _): params["provider_uuid"], params["start_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=OCP_QUEUE, + queue_name=OCPQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ) @@ -107,7 +106,7 @@ def test_get_report_data_sent_to_XL_OCP_queue(self, mock_update, _): } expected_key = "Report Data Task IDs" - with patch("masu.api.report_data.is_customer_large", return_value=True): + with patch("masu.api.report_data.get_customer_queue", return_value=PriorityQueue.XL): response = self.client.get(reverse("report_data"), params) body = response.json() @@ -119,7 +118,7 @@ def test_get_report_data_sent_to_XL_OCP_queue(self, mock_update, _): params["provider_uuid"], params["start_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=PRIORITY_QUEUE_XL, + queue_name=PriorityQueue.XL, ocp_on_cloud=True, invoice_month=None, ) @@ -271,7 +270,7 @@ def test_get_report_data_with_end_date(self, mock_update, _): params["provider_uuid"], params["start_date"], params["end_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ) @@ -285,7 +284,7 @@ def test_get_report_data_with_end_date(self, mock_update, _): params["provider_uuid"], params["start_date"], params["start_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ), @@ -295,7 +294,7 @@ def test_get_report_data_with_end_date(self, mock_update, _): params["provider_uuid"], params["end_date"], params["end_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ), @@ -328,7 +327,7 @@ def test_get_report_data_with_only_provider_type(self, mock_update, _): None, params["start_date"], params["end_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ) @@ -342,7 +341,7 @@ def test_get_report_data_with_only_provider_type(self, mock_update, _): None, params["start_date"], params["start_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ), @@ -352,7 +351,7 @@ def test_get_report_data_with_only_provider_type(self, mock_update, _): None, params["end_date"], params["end_date"], - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=True, invoice_month=None, ), @@ -535,7 +534,7 @@ def test_get_report_data_ocp_on_cloud_false(self, mock_update, _): params["provider_uuid"], params["start_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=False, invoice_month=None, ) @@ -563,7 +562,7 @@ def test_get_report_data_gcp(self, mock_update, _): params["provider_uuid"], params["start_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=False, invoice_month=self.invoice, ) @@ -592,7 +591,7 @@ def test_get_report_data_gcp_end_date(self, mock_update, _): params["provider_uuid"], params["end_date"], DateHelper().today.date().strftime("%Y-%m-%d"), - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=False, invoice_month=self.invoice, ) @@ -623,7 +622,7 @@ def test_get_report_data_gcp_invoice_month(self, mock_update, _): params["provider_uuid"], self.start_date, end_date, - queue_name=PRIORITY_QUEUE, + queue_name=PriorityQueue.DEFAULT, ocp_on_cloud=False, invoice_month="202209", ) diff --git a/koku/masu/test/api/test_update_cost_model_costs.py b/koku/masu/test/api/test_update_cost_model_costs.py index e6aae68c2a..51b02b16eb 100644 --- a/koku/masu/test/api/test_update_cost_model_costs.py +++ b/koku/masu/test/api/test_update_cost_model_costs.py @@ -11,7 +11,7 @@ from django.test.utils import override_settings from django.urls import reverse -from masu.processor.tasks import QUEUE_LIST +from common.queues import QUEUE_LIST from masu.test import MasuTestCase diff --git a/koku/masu/test/api/test_update_openshift_on_cloud.py b/koku/masu/test/api/test_update_openshift_on_cloud.py index 1681971df0..8871ebe7a6 100644 --- a/koku/masu/test/api/test_update_openshift_on_cloud.py +++ b/koku/masu/test/api/test_update_openshift_on_cloud.py @@ -10,7 +10,7 @@ from django.urls import reverse from api.utils import DateHelper -from masu.processor.tasks import QUEUE_LIST +from common.queues import QUEUE_LIST from masu.test import MasuTestCase diff --git a/koku/masu/test/external/test_kafka_msg_handler.py b/koku/masu/test/external/test_kafka_msg_handler.py index b09200bde0..3191b28f0e 100644 --- a/koku/masu/test/external/test_kafka_msg_handler.py +++ b/koku/masu/test/external/test_kafka_msg_handler.py @@ -23,12 +23,11 @@ from requests.exceptions import HTTPError import masu.external.kafka_msg_handler as msg_handler +from common.queues import OCPQueue from kafka_utils.utils import UPLOAD_TOPIC from masu.config import Config from masu.external.kafka_msg_handler import KafkaMsgHandlerError from masu.processor.report_processor import ReportProcessorError -from masu.processor.tasks import OCP_QUEUE -from masu.processor.tasks import OCP_QUEUE_XL from masu.prometheus_stats import WORKER_REGISTRY from masu.test import MasuTestCase from masu.util.ocp import common as utils @@ -525,7 +524,7 @@ def test_summarize_manifest_dates(self): with patch("masu.external.kafka_msg_handler.MANIFEST_ACCESSOR.manifest_ready_for_summary", return_value=True): with patch("masu.external.kafka_msg_handler.summarize_reports.s") as mock_summarize_reports: msg_handler.summarize_manifest(report_meta, self.manifest_id) - mock_summarize_reports.assert_called_with([expected_meta], OCP_QUEUE) + mock_summarize_reports.assert_called_with([expected_meta], OCPQueue.DEFAULT) with patch("masu.external.kafka_msg_handler.MANIFEST_ACCESSOR.manifest_ready_for_summary", return_value=False): with patch("masu.external.kafka_msg_handler.summarize_reports.s") as mock_summarize_reports: @@ -869,9 +868,9 @@ def test_summarize_manifest_called_with_XL_queue(self): with patch("masu.external.kafka_msg_handler.ReportManifestDBAccessor") as mock_accessor: mock_accessor.return_value.__enter__.return_value = mock_manifest_accessor with patch("masu.external.kafka_msg_handler.summarize_reports.s") as mock_summarize_reports: - with patch("masu.external.kafka_msg_handler.is_customer_large", return_value=True): + with patch("masu.external.kafka_msg_handler.get_customer_queue", return_value=OCPQueue.XL): msg_handler.summarize_manifest(report_meta, self.manifest_id) - self.assertIn(OCP_QUEUE_XL, mock_summarize_reports.call_args.args) + self.assertIn(OCPQueue.XL, mock_summarize_reports.call_args.args) def test_extract_payload_content_and_process_cr(self): with tempfile.TemporaryDirectory() as tmp_dir: diff --git a/koku/masu/test/processor/test_tasks.py b/koku/masu/test/processor/test_tasks.py index bbf1c5a7d0..275acc9706 100644 --- a/koku/masu/test/processor/test_tasks.py +++ b/koku/masu/test/processor/test_tasks.py @@ -31,6 +31,8 @@ from api.iam.models import Tenant from api.models import Provider +from common.queues import PriorityQueue +from common.queues import SummaryQueue from koku.middleware import KokuTenantMiddleware from masu.config import Config from masu.database import AWS_CUR_TABLE_MAP @@ -51,7 +53,6 @@ from masu.processor.tasks import autovacuum_tune_schema from masu.processor.tasks import get_report_files from masu.processor.tasks import mark_manifest_complete -from masu.processor.tasks import MARK_MANIFEST_COMPLETE_QUEUE from masu.processor.tasks import normalize_table_options from masu.processor.tasks import process_daily_openshift_on_cloud from masu.processor.tasks import process_openshift_on_cloud @@ -64,7 +65,6 @@ from masu.processor.tasks import update_cost_model_costs from masu.processor.tasks import update_openshift_on_cloud from masu.processor.tasks import update_summary_tables -from masu.processor.tasks import UPDATE_SUMMARY_TABLES_QUEUE_XL from masu.processor.tasks import vacuum_schema from masu.processor.worker_cache import create_single_task_cache_key from masu.test import MasuTestCase @@ -385,9 +385,9 @@ def test_summarize_reports_processing_with_XL_queue(self, mock_update_summary): report_meta["provider_uuid"] = provider_uuid report_meta["manifest_id"] = 1 reports_to_summarize = [report_meta] - with patch("masu.processor.tasks.is_customer_large", return_value=True): + with patch("masu.processor.tasks.get_customer_queue", return_value=SummaryQueue.XL): summarize_reports(reports_to_summarize) - mock_update_summary.s.return_value.apply_async.assert_called_with(queue=UPDATE_SUMMARY_TABLES_QUEUE_XL) + mock_update_summary.s.return_value.apply_async.assert_called_with(queue=SummaryQueue.XL) @patch("masu.processor.tasks.update_summary_tables") def test_summarize_reports_processing_list_with_none(self, mock_update_summary): @@ -863,7 +863,7 @@ def test_update_summary_tables_remove_expired_data(self, mock_select_for_update, manifest_list=[manifest_id], ingress_report_uuid=None, tracing_id=tracing_id, - ).set(queue=MARK_MANIFEST_COMPLETE_QUEUE) + ).set(queue=PriorityQueue.DEFAULT) ) mock_chain.return_value.apply_async.assert_called() @@ -900,7 +900,7 @@ def test_update_summary_tables_remove_expired_data_gcp(self, mock_select_for_upd manifest_list=[manifest_id], ingress_report_uuid=None, tracing_id=tracing_id, - ).set(queue=MARK_MANIFEST_COMPLETE_QUEUE) + ).set(queue=PriorityQueue.DEFAULT) ) mock_chain.return_value.apply_async.assert_called() @@ -947,9 +947,9 @@ def test_get_report_data_for_all_providers(self, mock_update): def test_get_report_data_for_provider_with_XL_queue(self, mock_update): """Test GET report_data endpoint with provider and XL queue""" start_date = date.today() - with patch("masu.processor.tasks.is_customer_large", return_value=True): + with patch("masu.processor.tasks.get_customer_queue", return_value=SummaryQueue.XL): update_all_summary_tables(start_date) - mock_update.s.return_value.apply_async.assert_called_with(queue=UPDATE_SUMMARY_TABLES_QUEUE_XL) + mock_update.s.return_value.apply_async.assert_called_with(queue=SummaryQueue.XL) @patch("masu.processor.tasks.connection") def test_vacuum_schema(self, mock_conn): @@ -1381,8 +1381,7 @@ def test_update_summary_tables_worker_throttled( time.sleep(3) self.assertFalse(self.single_task_is_running(task_name, cache_args)) - with patch("masu.processor.tasks.is_customer_large") as mock_customer: - mock_customer.return_value = True + with patch("masu.processor.tasks.get_customer_queue", return_value=SummaryQueue.XL): with patch("masu.processor.tasks.rate_limit_tasks") as mock_rate_limit: mock_rate_limit.return_value = False mock_delay.reset_mock() @@ -1643,8 +1642,7 @@ def test_update_openshift_on_cloud_throttled( time.sleep(3) self.assertFalse(self.single_task_is_running(task_name, cache_args)) - with patch("masu.processor.tasks.is_customer_large") as mock_customer: - mock_customer.return_value = True + with patch("masu.processor.tasks.get_customer_queue", return_value=SummaryQueue.XL): with patch("masu.processor.tasks.rate_limit_tasks") as mock_rate_limit: mock_rate_limit.return_value = False mock_delay.reset_mock() diff --git a/koku/reporting_common/test_reporting_common.py b/koku/reporting_common/test_reporting_common.py index 8e9866c810..bc6a7a0b4c 100644 --- a/koku/reporting_common/test_reporting_common.py +++ b/koku/reporting_common/test_reporting_common.py @@ -11,9 +11,8 @@ from api.models import Provider from api.utils import DateHelper +from common.queues import SummaryQueue from masu.processor.tasks import delayed_summarize_current_month -from masu.processor.tasks import UPDATE_SUMMARY_TABLES_QUEUE -from masu.processor.tasks import UPDATE_SUMMARY_TABLES_QUEUE_XL from masu.processor.tasks import UPDATE_SUMMARY_TABLES_TASK from masu.test import MasuTestCase from reporting_common.models import CombinedChoices @@ -120,9 +119,9 @@ def test_set_failed_status(self): self.assertIsNotNone(stats.failed_status) self.assertEqual(stats.status, CombinedChoices.FAILED) - @patch("masu.processor.tasks.is_customer_large") - def test_delayed_summarize_current_month(self, mock_large_customer): - mock_large_customer.return_value = False + @patch("masu.processor.tasks.get_customer_queue") + def test_delayed_summarize_current_month(self, mock_get_customer_queue): + mock_get_customer_queue.return_value = SummaryQueue.DEFAULT test_matrix = { Provider.PROVIDER_AWS: self.aws_provider, Provider.PROVIDER_AZURE: self.azure_provider, @@ -149,15 +148,15 @@ def test_delayed_summarize_current_month(self, mock_large_customer): ) self.assertEqual(db_entry.task_args, [self.schema_name]) - self.assertEqual(db_entry.queue_name, UPDATE_SUMMARY_TABLES_QUEUE) + self.assertEqual(db_entry.queue_name, SummaryQueue.DEFAULT) - @patch("masu.processor.tasks.is_customer_large") - def test_large_customer(self, mock_large_customer): - mock_large_customer.return_value = True + @patch("masu.processor.tasks.get_customer_queue") + def test_large_customer(self, mock_get_customer_queue): + mock_get_customer_queue.return_value = SummaryQueue.XL delayed_summarize_current_month(self.schema_name, [self.aws_provider.uuid], Provider.PROVIDER_AWS) with schema_context(self.schema): db_entry = DelayedCeleryTasks.objects.get(provider_uuid=self.aws_provider.uuid) - self.assertEqual(db_entry.queue_name, UPDATE_SUMMARY_TABLES_QUEUE_XL) + self.assertEqual(db_entry.queue_name, SummaryQueue.XL) @patch("reporting_common.models.celery_app") def test_trigger_celery_task(self, mock_celery_app): diff --git a/koku/sources/tasks.py b/koku/sources/tasks.py index c9202b6592..addae5ef76 100644 --- a/koku/sources/tasks.py +++ b/koku/sources/tasks.py @@ -11,9 +11,9 @@ from api.common import log_json from api.provider.models import Sources from api.provider.provider_manager import ProviderProcessingError +from common.queues import PriorityQueue +from common.queues import SummaryQueue from koku import celery_app -from masu.processor.tasks import PRIORITY_QUEUE -from masu.processor.tasks import REMOVE_EXPIRED_DATA_QUEUE from sources.api.source_status import SourceStatus from sources.sources_provider_coordinator import SourcesProviderCoordinator from sources.storage import load_providers_to_delete @@ -29,7 +29,7 @@ autoretry_for=(ProviderProcessingError,), retry_backoff=True, max_retries=settings.MAX_SOURCE_DELETE_RETRIES, - queue=PRIORITY_QUEUE, + queue=PriorityQueue.DEFAULT, ) def delete_source(self, source_id, auth_header, koku_uuid, account_number, org_id): """Delete Provider and Source.""" @@ -41,7 +41,7 @@ def delete_source(self, source_id, auth_header, koku_uuid, account_number, org_i LOG.info(log_json(msg="deleted provider", provider_uuid=koku_uuid, source_id=source_id)) -@celery_app.task(name="sources.tasks.delete_source_beat", queue=REMOVE_EXPIRED_DATA_QUEUE) +@celery_app.task(name="sources.tasks.delete_source_beat", queue=SummaryQueue.DEFAULT) def delete_source_beat(): providers = load_providers_to_delete() for p in providers: @@ -51,7 +51,7 @@ def delete_source_beat(): ) -@celery_app.task(name="sources.tasks.source_status_beat", queue=PRIORITY_QUEUE) +@celery_app.task(name="sources.tasks.source_status_beat", queue=PriorityQueue.DEFAULT) def source_status_beat(): """Source Status push.""" sources_query = Sources.objects.filter(source_id__isnull=False).all() From c82c1cd461a99a324be8410b9b0734fd68aafc86 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Wed, 19 Jun 2024 18:39:22 +0100 Subject: [PATCH 03/26] Improve our logging readability (#5178) --- koku/api/common/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/koku/api/common/__init__.py b/koku/api/common/__init__.py index 5d5e301bc7..8de8d82129 100644 --- a/koku/api/common/__init__.py +++ b/koku/api/common/__init__.py @@ -21,6 +21,8 @@ def log_json(tracing_id="", *, msg, context=None, **kwargs): stmt |= context stmt |= kwargs for key, value in stmt.items(): + if key == "split_files": + stmt[key] = len(value) if isinstance(value, UUID): stmt[key] = str(value) return stmt From 38fbe5cf852eb54c248d69b6e8d55abb738cdfd0 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Thu, 20 Jun 2024 11:50:31 +0100 Subject: [PATCH 04/26] add prometheus metrics for new queues (#5179) --- ...ana-dashboard-insights-hccm.configmap.yaml | 1413 +++++++++++++++-- koku/common/queues.py | 12 +- koku/masu/prometheus_stats.py | 42 + 3 files changed, 1319 insertions(+), 148 deletions(-) diff --git a/dashboards/grafana-dashboard-insights-hccm.configmap.yaml b/dashboards/grafana-dashboard-insights-hccm.configmap.yaml index aa7d3db25b..11ace23bd9 100644 --- a/dashboards/grafana-dashboard-insights-hccm.configmap.yaml +++ b/dashboards/grafana-dashboard-insights-hccm.configmap.yaml @@ -623,7 +623,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_queue", + "description": "Cost Management celery download_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -735,7 +735,196 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_queue", + "description": "Cost Management celery download_xl_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 450 + }, + { + "color": "red", + "value": 550 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 22, + "y": 19 + }, + "id": 87, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "DL XL", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery download_penalty_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 450 + }, + { + "color": "red", + "value": 550 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 19 + }, + "id": 86, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "interval": "", + "legendFormat": "Queue Size", + "range": true, + "refId": "A" + } + ], + "title": "Download Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery download_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -768,9 +957,941 @@ data: "h": 8, "w": 2, "x": 22, - "y": 19 + "y": 19 + }, + "id": 87, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "DL Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "decimals": 0, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 27 + }, + "id": 69, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "expr": "koku:celery:summary_queue", + "refId": "A" + } + ], + "title": "Summary", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 0, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 10, + "y": 27 + }, + "id": 77, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "expr": "koku:celery:summary_queue", + "refId": "A" + } + ], + "title": "Summary", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_xl_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 27 + }, + "id": 84, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, + "refId": "A" + } + ], + "title": "Summary XL", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_xl_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 22, + "y": 27 + }, + "id": 85, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "Sum XL", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_penalty_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 27 + }, + "id": 84, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(summary_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, + "refId": "A" + } + ], + "title": "Summary Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery summary_penalty_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 200 + }, + { + "color": "red", + "value": 300 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 22, + "y": 27 + }, + "id": 85, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(summary_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "Sum Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 35 + }, + "id": 90, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "expr": "koku:celery:priority_queue", + "refId": "A" + } + ], + "title": "Priority", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 10, + "y": 35 + }, + "id": 91, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true + }, + "pluginVersion": "9.3.8", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "expr": "koku:celery:priority_queue", + "refId": "A" + } + ], + "title": "Priority", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_xl_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 35 + }, + "id": 70, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, + "refId": "A" + } + ], + "title": "Priority XL", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_xl_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 22, + "y": 35 }, - "id": 87, + "id": 80, "options": { "orientation": "auto", "reduceOptions": { @@ -791,12 +1912,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "DL XL", + "title": "Priority XL", "transformations": [ { "id": "seriesToColumns", @@ -812,7 +1933,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery priority_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -848,7 +1969,6 @@ data: "mode": "line+area" } }, - "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -860,11 +1980,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -875,10 +1995,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, - "y": 27 + "x": 12, + "y": 35 }, - "id": 69, + "id": 70, "options": { "legend": { "calcs": [ @@ -901,11 +2021,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:summary_queue", + "editorMode": "code", + "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Summary", + "title": "Priority Penalty", "transformations": [ { "id": "seriesToColumns", @@ -921,13 +2044,12 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery priority_penalty_queue", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, - "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -939,11 +2061,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -954,10 +2076,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, - "y": 27 + "x": 22, + "y": 35 }, - "id": 77, + "id": 80, "options": { "orientation": "auto", "reduceOptions": { @@ -977,11 +2099,13 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:summary_queue", + "editorMode": "code", + "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Summary", + "title": "Priority Penalty", "transformations": [ { "id": "seriesToColumns", @@ -997,7 +2121,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_queue", + "description": "Cost Management celery ocp_queue", "fieldConfig": { "defaults": { "color": { @@ -1059,10 +2183,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 27 + "x": 0, + "y": 43 }, - "id": 84, + "id": 74, "options": { "legend": { "calcs": [ @@ -1085,14 +2209,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", - "range": true, + "expr": "koku:celery:ocp_queue", "refId": "A" } ], - "title": "Summary XL", + "title": "OCP", "transformations": [ { "id": "seriesToColumns", @@ -1108,7 +2229,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_queue", + "description": "Cost Management celery ocp_queue", "fieldConfig": { "defaults": { "color": { @@ -1140,10 +2261,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 27 + "x": 10, + "y": 43 }, - "id": 85, + "id": 83, "options": { "orientation": "auto", "reduceOptions": { @@ -1163,13 +2284,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:ocp_queue", "refId": "A" } ], - "title": "Sum XL", + "title": "OCP", "transformations": [ { "id": "seriesToColumns", @@ -1185,7 +2304,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -1232,11 +2351,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1247,10 +2366,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, - "y": 35 + "x": 12, + "y": 43 }, - "id": 90, + "id": 88, "options": { "legend": { "calcs": [ @@ -1273,11 +2392,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:priority_queue", + "editorMode": "code", + "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Priority", + "title": "OCP XL", "transformations": [ { "id": "seriesToColumns", @@ -1293,7 +2415,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -1310,11 +2432,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1325,10 +2447,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, - "y": 35 + "x": 22, + "y": 43 }, - "id": 91, + "id": 89, "options": { "orientation": "auto", "reduceOptions": { @@ -1348,11 +2470,13 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:priority_queue", + "editorMode": "code", + "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Priority", + "title": "OCP XL", "transformations": [ { "id": "seriesToColumns", @@ -1368,7 +2492,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -1415,11 +2539,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1431,9 +2555,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 35 + "y": 43 }, - "id": 70, + "id": 88, "options": { "legend": { "calcs": [ @@ -1457,13 +2581,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Priority XL", + "title": "OCP Penalty", "transformations": [ { "id": "seriesToColumns", @@ -1479,7 +2603,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -1496,11 +2620,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1512,9 +2636,9 @@ data: "h": 8, "w": 2, "x": 22, - "y": 35 + "y": 43 }, - "id": 80, + "id": 89, "options": { "orientation": "auto", "reduceOptions": { @@ -1535,12 +2659,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Pri XL", + "title": "OCP Penalty", "transformations": [ { "id": "seriesToColumns", @@ -1556,7 +2680,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_queue", "fieldConfig": { "defaults": { "color": { @@ -1603,11 +2727,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -1619,9 +2743,9 @@ data: "h": 8, "w": 10, "x": 0, - "y": 43 + "y": 51 }, - "id": 74, + "id": 92, "options": { "legend": { "calcs": [ @@ -1644,11 +2768,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:ocp_queue", + "expr": "koku:celery:cost_model_queue", "refId": "A" } ], - "title": "OCP", + "title": "Cost Model", "transformations": [ { "id": "seriesToColumns", @@ -1664,7 +2788,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_queue", "fieldConfig": { "defaults": { "color": { @@ -1681,11 +2805,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -1697,9 +2821,9 @@ data: "h": 8, "w": 2, "x": 10, - "y": 43 + "y": 51 }, - "id": 83, + "id": 93, "options": { "orientation": "auto", "reduceOptions": { @@ -1719,11 +2843,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:ocp_queue", + "expr": "koku:celery:cost_model_queue", "refId": "A" } ], - "title": "OCP", + "title": "Cost Model", "transformations": [ { "id": "seriesToColumns", @@ -1739,7 +2863,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -1786,11 +2910,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -1802,9 +2926,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 43 + "y": 51 }, - "id": 88, + "id": 72, "options": { "legend": { "calcs": [ @@ -1828,13 +2952,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "OCP XL", + "title": "Cost Model XL", "transformations": [ { "id": "seriesToColumns", @@ -1850,7 +2974,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -1867,11 +2991,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -1883,9 +3007,9 @@ data: "h": 8, "w": 2, "x": 22, - "y": 43 + "y": 51 }, - "id": 89, + "id": 82, "options": { "orientation": "auto", "reduceOptions": { @@ -1906,12 +3030,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "OCP XL", + "title": "CM XL", "transformations": [ { "id": "seriesToColumns", @@ -1927,7 +3051,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery cost_model_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -1989,10 +3113,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, + "x": 12, "y": 51 }, - "id": 92, + "id": 72, "options": { "legend": { "calcs": [ @@ -2015,11 +3139,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:cost_model_queue", + "editorMode": "code", + "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Cost Model", + "title": "Cost Model Penalty", "transformations": [ { "id": "seriesToColumns", @@ -2035,7 +3162,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery cost_model_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -2067,10 +3194,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, + "x": 22, "y": 51 }, - "id": 93, + "id": 82, "options": { "orientation": "auto", "reduceOptions": { @@ -2090,11 +3217,13 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:cost_model_queue", + "editorMode": "code", + "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Cost Model", + "title": "CM Penalty", "transformations": [ { "id": "seriesToColumns", @@ -2110,7 +3239,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery refresh_queue", "fieldConfig": { "defaults": { "color": { @@ -2172,10 +3301,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 51 + "x": 0, + "y": 59 }, - "id": 72, + "id": 71, "options": { "legend": { "calcs": [ @@ -2198,14 +3327,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", - "range": true, + "expr": "koku:celery:refresh_queue", "refId": "A" } ], - "title": "Cost Model XL", + "title": "Refresh", "transformations": [ { "id": "seriesToColumns", @@ -2221,7 +3347,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery refresh_queue", "fieldConfig": { "defaults": { "color": { @@ -2253,10 +3379,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 51 + "x": 10, + "y": 59 }, - "id": 82, + "id": 78, "options": { "orientation": "auto", "reduceOptions": { @@ -2276,13 +3402,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:refresh_queue", "refId": "A" } ], - "title": "CM XL", + "title": "Refresh", "transformations": [ { "id": "seriesToColumns", @@ -2298,7 +3422,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery refresh_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -2360,10 +3484,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, + "x": 12, "y": 59 }, - "id": 71, + "id": 94, "options": { "legend": { "calcs": [ @@ -2386,11 +3510,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:refresh_queue", + "editorMode": "code", + "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Refresh", + "title": "Refresh XL", "transformations": [ { "id": "seriesToColumns", @@ -2406,7 +3533,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery refresh_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -2438,10 +3565,10 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, + "x": 22, "y": 59 }, - "id": 78, + "id": 95, "options": { "orientation": "auto", "reduceOptions": { @@ -2461,11 +3588,13 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:refresh_queue", + "editorMode": "code", + "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Refresh", + "title": "Ref XL", "transformations": [ { "id": "seriesToColumns", @@ -2481,7 +3610,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery refresh_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -2570,13 +3699,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Refresh XL", + "title": "Refresh Penalty", "transformations": [ { "id": "seriesToColumns", @@ -2592,7 +3721,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery refresh_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -2648,12 +3777,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Ref XL", + "title": "Ref Penalty", "transformations": [ { "id": "seriesToColumns", diff --git a/koku/common/queues.py b/koku/common/queues.py index 169ed85753..85c5cae1c2 100644 --- a/koku/common/queues.py +++ b/koku/common/queues.py @@ -13,37 +13,37 @@ class DownloadQueue(StrEnum): DEFAULT = "download" XL = "download_xl" - PENALTY_BOX = "download_penalty_box" + PENALTY_BOX = "download_penalty" class OCPQueue(StrEnum): DEFAULT = "ocp" XL = "ocp_xl" - PENALTY_BOX = "ocp_penalty_box" + PENALTY_BOX = "ocp_penalty" class PriorityQueue(StrEnum): DEFAULT = "priority" XL = "priority_xl" - PENALTY_BOX = "priority_penalty_box" + PENALTY_BOX = "priority_penalty" class SummaryQueue(StrEnum): DEFAULT = "summary" XL = "summary_xl" - PENALTY_BOX = "summary_penalty_box" + PENALTY_BOX = "summary_penalty" class CostModelQueue(StrEnum): DEFAULT = "cost_model" XL = "cost_model_xl" - PENALTY_BOX = "cost_model_penalty_box" + PENALTY_BOX = "cost_model_penalty" class RefreshQueue(StrEnum): DEFAULT = "refresh" XL = "refresh_xl" - PENALTY_BOX = "refresh_penalty_box" + PENALTY_BOX = "refresh_penalty" # any additional queues should be added to this list diff --git a/koku/masu/prometheus_stats.py b/koku/masu/prometheus_stats.py index b77e8add95..8a25c2c802 100644 --- a/koku/masu/prometheus_stats.py +++ b/koku/masu/prometheus_stats.py @@ -62,6 +62,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +DOWNLOAD_PENALTY_BACKLOG = Gauge( + "download_penalty_backlog", + "Number of celery tasks in the download_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) SUMMARY_BACKLOG = Gauge( "summary_backlog", "Number of celery tasks in the summary queue", @@ -74,6 +80,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +SUMMARY_PENALTY_BACKLOG = Gauge( + "summary_penalty_backlog", + "Number of celery tasks in the summary_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) PRIORITY_BACKLOG = Gauge( "priority_backlog", "Number of celery tasks in the priority queue", @@ -86,6 +98,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +PRIORITY_PENALTY_BACKLOG = Gauge( + "priority_penalty_backlog", + "Number of celery tasks in the priority_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) REFRESH_BACKLOG = Gauge( "refresh_backlog", "Number of celery tasks in the refresh queue", @@ -98,6 +116,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +REFRESH_PENALTY_BACKLOG = Gauge( + "refresh_penalty_backlog", + "Number of celery tasks in the refresh_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) COST_MODEL_BACKLOG = Gauge( "cost_model_backlog", "Number of celery tasks in the cost_model queue", @@ -110,6 +134,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +COST_MODEL_PENALTY_BACKLOG = Gauge( + "cost_model_penalty_backlog", + "Number of celery tasks in the cost_model_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) DEFAULT_BACKLOG = Gauge( "default_backlog", "Number of celery tasks in the default queue", @@ -125,6 +155,12 @@ registry=WORKER_REGISTRY, multiprocess_mode="livesum", ) +OCP_PENALTY_BACKLOG = Gauge( + "ocp_penalty_backlog", + "Number of celery tasks in the OCP_penalty queue", + registry=WORKER_REGISTRY, + multiprocess_mode="livesum", +) HCS_BACKLOG = Gauge( "hcs_backlog", "Number of celery tasks in the HCS queue", registry=WORKER_REGISTRY, multiprocess_mode="livesum" @@ -147,17 +183,23 @@ QUEUES = { "download": DOWNLOAD_BACKLOG, "download_xl": DOWNLOAD_XL_BACKLOG, + "download_penalty": DOWNLOAD_PENALTY_BACKLOG, "summary": SUMMARY_BACKLOG, "summary_xl": SUMMARY_XL_BACKLOG, + "summary_penalty": SUMMARY_PENALTY_BACKLOG, "priority": PRIORITY_BACKLOG, "priority_xl": PRIORITY_XL_BACKLOG, + "priority_penalty": PRIORITY_PENALTY_BACKLOG, "refresh": REFRESH_BACKLOG, "refresh_xl": REFRESH_XL_BACKLOG, + "refresh_penalty": REFRESH_PENALTY_BACKLOG, "cost_model": COST_MODEL_BACKLOG, "cost_model_xl": COST_MODEL_XL_BACKLOG, + "cost_model_penalty": COST_MODEL_PENALTY_BACKLOG, "celery": DEFAULT_BACKLOG, "ocp": OCP_BACKLOG, "ocp_xl": OCP_XL_BACKLOG, + "ocp_penalty": OCP_PENALTY_BACKLOG, "hcs": HCS_BACKLOG, "subs_extraction": SUBS_EXTRACTION_BACKLOG, "subs_transmission": SUBS_TRANSMISSION_BACKLOG, From a75fd52df6000d02bdab44dc4b149d5a435fc603 Mon Sep 17 00:00:00 2001 From: Michael Skarbek Date: Fri, 21 Jun 2024 10:57:19 -0400 Subject: [PATCH 05/26] add v3.3.0 operator commits (#5143) --- koku/masu/util/ocp/common.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/koku/masu/util/ocp/common.py b/koku/masu/util/ocp/common.py index 33e3b30d7f..87db5f2db0 100644 --- a/koku/masu/util/ocp/common.py +++ b/koku/masu/util/ocp/common.py @@ -36,6 +36,7 @@ class OCPReportTypes(Enum): OPERATOR_VERSIONS = { + "8c10aa090b2be3d2aea7553ce2cb62e78844ce6f": "costmanagement-metrics-operator:3.3.0", "212f944b3b1d7cfbf6e48a63c4ed74bfe942bbe1": "costmanagement-metrics-operator:3.2.1", "9d463e92ba69d82513d8ec53edc5242658623840": "costmanagement-metrics-operator:3.2.0", "e3ab976307639acff6cc86e25f90f242c45d7210": "costmanagement-metrics-operator:3.1.0", @@ -52,6 +53,7 @@ class OCPReportTypes(Enum): "084bca2e1c48caab18c237453c17ceef61747fe2": "costmanagement-metrics-operator:1.1.3", "6f10d07e3af3ea4f073d4ffda9019d8855f52e7f": "costmanagement-metrics-operator:1.1.0", "fd764dcd7e9b993025f3e05f7cd674bb32fad3be": "costmanagement-metrics-operator:1.0.0", + "1650a9fa9f353efee534dde6030ece40e6a9a1ee": "koku-metrics-operator:v3.3.0", "631434d278be57cfedaa5ad0000cb3a3dfb69a76": "koku-metrics-operator:v3.2.1", "06f3ed1c48b889f64ecec09e55f0bd7c2f09fe54": "koku-metrics-operator:v3.2.0", "b3525a536a402d5bed9b5bbd739fb6a89c8e92e0": "koku-metrics-operator:v3.1.0", From 5c332d44a07fa2772427d8b17e9c6b6f7e687957 Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Fri, 21 Jun 2024 17:06:53 -0400 Subject: [PATCH 06/26] [COST-5124] Improve Trino migration management command (#5163) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add exponential backoff and logging to retries * Change log level to reflect severity * Explicit SQL alias for clarity * Catch and log exception instead of exiting * Add return type hints * Return if unsuccessful No point in verifying if the SQL did not run correctly * Fine tune exponential backoff * Create action class for adding/verifying columns were added * Assign default list using default_fatory Instead of doing it in the post_init, which get’s a little weird. * Add drop column action * Quote items in logs for better legibility * Consolidate action classes We lose some of the action-specific logging messages, but there is less code overall. I’m not sure how this scale to the action related to dropping. * Change local variable name No need to add a prefix to differentiate it from the parameter name. * Use a set to prevent running on the same schema multiple times Co-authored-by: Cody Myers --- .../commands/migrate_trino_tables.py | 348 +++++++++++------- 1 file changed, 223 insertions(+), 125 deletions(-) diff --git a/koku/masu/management/commands/migrate_trino_tables.py b/koku/masu/management/commands/migrate_trino_tables.py index e7fe9ba80f..9ce04e4743 100644 --- a/koku/masu/management/commands/migrate_trino_tables.py +++ b/koku/masu/management/commands/migrate_trino_tables.py @@ -6,6 +6,11 @@ import json import logging import re +import secrets +import sys +import textwrap +import time +import typing as t from datetime import datetime from datetime import timedelta @@ -81,7 +86,7 @@ class CommaSeparatedArgs(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): + def __call__(self, parser, namespace, values, option_string=None) -> None: vals = values.split(",") if not all(VALID_CHARACTERS.match(v) for v in vals): raise ValueError(f"String should match pattern '{VALID_CHARACTERS.pattern}': {vals}") @@ -89,7 +94,7 @@ def __call__(self, parser, namespace, values, option_string=None): class JSONArgs(argparse.Action): - def __call__(self, parser, namespace, values, option_string=None): + def __call__(self, parser, namespace, values, option_string=None) -> None: setattr(namespace, self.dest, json.loads(values)) @@ -121,25 +126,125 @@ class ListDropPartitions(BaseModel): list: list[DropPartition] +class Action(BaseModel): + list_of_cols: t.Union[ListAddColumns, ListDropColumns] + schemas: t.Optional[list[str]] = Field(default_factory=list) + find_query: str + modify_query: str + + def model_post_init(self, *arg, **kwargs) -> None: + if not self.schemas: + try: + self.schemas = self.get_schemas() + except TrinoExternalError as exc: + LOG.error(exc) + + return self.schemas + + def get_schemas(self) -> set[str]: + LOG.info("Finding all schema for migration") + result = set() + for col in self.list_of_cols.list: + schemas = run_trino_sql(textwrap.dedent(self.find_query.format(col=col))) + schemas = [ + schema + for listed_schema in schemas + for schema in listed_schema + if schema not in ["default", "information_schema"] + ] + result.update(schemas) + + return result + + def run(self) -> None: + if not self.schemas: + LOG.info("No schemas to update found") + return + + LOG.info(f"Running against the following schemas: {self.schemas}") + for schema in self.schemas: + LOG.info(f"Modifying tables for schema {schema}") + for col in self.list_of_cols.list: + try: + result = run_trino_sql(textwrap.dedent(self.modify_query.format(col=col)), schema) + LOG.info(f"ALTER TABLE result: {result}") + except Exception as e: + LOG.error(e) + return + + self.validate() + LOG.info("Migration successful") + + def validate(self) -> None: + schemas = self.get_schemas() + LOG.info("Validating...") + if schemas: + LOG.error(f"Migration failed for the follow schemas: {schemas}") + sys.exit(1) + + +class AddColumnAction(Action): + @classmethod + def build(cls, list_of_cols: ListAddColumns, schemas: list[str]): + find_query = """ + SELECT t.table_schema + FROM information_schema.tables AS t + LEFT JOIN information_schema.columns AS c + ON t.table_schema = c.table_schema AND t.table_name = c.table_name AND c.column_name = '{col.column}' + WHERE t.table_name = '{col.table}' + AND c.column_name IS NULL + AND t.table_schema NOT IN ('information_schema', 'sys', 'mysql', 'performance_schema') + AND t.table_type = 'BASE TABLE' + """ + return cls( + list_of_cols=list_of_cols, + schemas=schemas, + find_query=find_query, + modify_query="ALTER TABLE IF EXISTS {col.table} ADD COLUMN IF NOT EXISTS {col.column} {col.datatype}", + ) + + +class DropColumnAction(Action): + @classmethod + def build(cls, list_of_cols: ListDropColumns, schemas: list[str]): + find_query = """ + SELECT t.table_schema + FROM information_schema.tables AS t + LEFT JOIN information_schema.columns AS c + ON t.table_schema = c.table_schema AND t.table_name = c.table_name AND c.column_name = '{col.column}' + WHERE t.table_name = '{col.table}' + AND c.column_name IS NOT NULL + AND t.table_schema NOT IN ('information_schema', 'sys', 'mysql', 'performance_schema') + AND t.table_type = 'BASE TABLE' + """ + return cls( + list_of_cols=list_of_cols, + schemas=schemas, + find_query=find_query, + modify_query="ALTER TABLE IF EXISTS {col.table} DROP COLUMN IF EXISTS {col.column}", + ) + + class Command(BaseCommand): help = "" - def add_arguments(self, parser: argparse.ArgumentParser): + def add_arguments(self, parser: argparse.ArgumentParser) -> None: + exclusive_group = parser.add_mutually_exclusive_group() parser.add_argument( "--schemas", action=CommaSeparatedArgs, help="a comma separated list of schemas to run the provided command against. default is all schema in db", default=[], ) - parser.add_argument( + exclusive_group.add_argument( "--drop-tables", action=CommaSeparatedArgs, default=[], help="a comma separated list of tables to drop", dest="tables_to_drop", ) - parser.add_argument( + exclusive_group.add_argument( "--drop-columns", action=JSONArgs, help=( @@ -148,7 +253,7 @@ def add_arguments(self, parser: argparse.ArgumentParser): ), dest="columns_to_drop", ) - parser.add_argument( + exclusive_group.add_argument( "--drop-partitions", action=JSONArgs, help=( @@ -157,7 +262,7 @@ def add_arguments(self, parser: argparse.ArgumentParser): ), dest="partitions_to_drop", ) - parser.add_argument( + exclusive_group.add_argument( "--add-columns", action=JSONArgs, help=( @@ -166,47 +271,35 @@ def add_arguments(self, parser: argparse.ArgumentParser): ), dest="columns_to_add", ) - parser.add_argument( + exclusive_group.add_argument( "--remove-expired-partitions", action=CommaSeparatedArgs, default=[], dest="remove_expired_partitions" ) + parser.add_argument( + "--rerun", + action="store_true", + help="a flag to indicate we should find schemas missing the migration", + dest="rerun", + ) - def handle(self, *args, **options): # noqa C901 - schemas = get_schemas(options["schemas"]) - if not schemas: - LOG.info("no schema in db to update") - return - LOG.info(f"running against the following schemas: {schemas}") - + def handle(self, *args, **options) -> None: if columns_to_add := options["columns_to_add"]: columns_to_add = ListAddColumns(list=columns_to_add) - if columns_to_drop := options["columns_to_drop"]: + action = AddColumnAction.build(list_of_cols=columns_to_add, schemas=options["schemas"]) + action.run() + elif columns_to_drop := options["columns_to_drop"]: columns_to_drop = ListDropColumns(list=columns_to_drop) - if partitions_to_drop := options["partitions_to_drop"]: + action = DropColumnAction.build(list_of_cols=columns_to_drop, schemas=options["schemas"]) + action.run() + elif partitions_to_drop := options["partitions_to_drop"]: partitions_to_drop = ListDropPartitions(list=partitions_to_drop) - tables_to_drop = options["tables_to_drop"] - expired_partition_tables = options["remove_expired_partitions"] - - for schema in schemas: - if tables_to_drop: - LOG.info(f"*** dropping tables {tables_to_drop} for schema {schema} ***") - drop_tables(tables_to_drop, schema) - if columns_to_add: - LOG.info(f"*** adding column to tables for schema {schema} ***") - add_columns_to_tables(columns_to_add, schema) - if columns_to_drop: - LOG.info(f"*** dropping column from tables for schema {schema} ***") - drop_columns_from_tables(columns_to_drop, schema) - if partitions_to_drop: - LOG.info(f"*** dropping partition from tables for schema {schema} ***") - drop_partitions_from_tables(partitions_to_drop, schema) - if expired_partition_tables: - LOG.info(f"** dropping expired partitions from table for {schema}") - drop_expired_partitions(expired_partition_tables, schema) - - -def get_schemas(schemas: None): - if schemas: - return schemas + drop_partitions_from_tables(partitions_to_drop, options["schemas"]) + elif tables_to_drop := options["tables_to_drop"]: + drop_tables(tables_to_drop, options["schemas"]) + elif expired_partition_tables := options["remove_expired_partitions"]: + drop_expired_partitions(expired_partition_tables, options["schemas"]) + + +def get_all_schemas() -> list[str]: sql = "SELECT schema_name FROM information_schema.schemata" schemas = run_trino_sql(sql) schemas = [ @@ -215,85 +308,84 @@ def get_schemas(schemas: None): for schema in listed_schema if schema not in ["default", "information_schema"] ] + if not schemas: + LOG.info("No schema in DB to update") + return schemas -def run_trino_sql(sql, schema=None): +def run_trino_sql(sql, schema=None) -> t.Optional[str]: retries = 5 - for i in range(retries): + for n in range(1, retries + 1): + attempt = n + remaining_retries = retries - n + # Exponential backoff with a little bit of randomness and a + # minimum wait of 0.5 and max wait of 7 + wait = (min(2**n, 7) + secrets.randbelow(1000) / 1000) or 0.5 try: with trino_db.connect(schema=schema) as conn: cur = conn.cursor() cur.execute(sql) return cur.fetchall() except TrinoExternalError as err: - if err.error_name == "HIVE_METASTORE_ERROR" and i < (retries - 1): - continue + if err.error_name == "HIVE_METASTORE_ERROR" and n < (retries): + LOG.warn( + f"{err.message}. Attempt number {attempt} of {retries} failed. " + f"Trying {remaining_retries} more time{'s' if remaining_retries > 1 else ''} " + f"after waiting {wait:.2f}s." + ) + time.sleep(wait) else: raise err except TrinoUserError as err: - LOG.info(err.message) - return + LOG.error(err.message) + return schema -def drop_tables(tables, schema): +def drop_tables(tables, schemas) -> None: """drop specified tables""" + if not schemas: + schemas = get_all_schemas() if not set(tables).issubset(EXTERNAL_TABLES): raise ValueError("Attempting to drop non-external table, revise the list of tables to drop.", tables) - for table_name in tables: - LOG.info(f"dropping table {table_name}") - sql = f"DROP TABLE IF EXISTS {table_name}" - try: - result = run_trino_sql(sql, schema) - LOG.info(f"DROP TABLE result: {result}") - except Exception as e: - LOG.info(e) - - -def add_columns_to_tables(list_of_cols: ListAddColumns, schema: str): - """add specified columns with datatypes to the tables""" - for col in list_of_cols.list: - LOG.info(f"adding column {col.column} of type {col.datatype} to table {col.table}") - sql = f"ALTER TABLE IF EXISTS {col.table} ADD COLUMN IF NOT EXISTS {col.column} {col.datatype}" - try: - result = run_trino_sql(sql, schema) - LOG.info(f"ALTER TABLE result: {result}") - except Exception as e: - LOG.info(e) - - -def drop_columns_from_tables(list_of_cols: ListDropColumns, schema: str): - """drop specified columns from tables""" - for col in list_of_cols.list: - LOG.info(f"dropping column {col.column} from table {col.table}") - sql = f"ALTER TABLE IF EXISTS {col.table} DROP COLUMN IF EXISTS {col.column}" - try: - result = run_trino_sql(sql, schema) - LOG.info(f"ALTER TABLE result: {result}") - except Exception as e: - LOG.info(e) + LOG.info(f"Running against the following schemas: {schemas}") + for schema in schemas: + LOG.info(f"Dropping tables {tables} for schema {schema}") + for table_name in tables: + LOG.info(f"Dropping table {table_name}") + sql = f"DROP TABLE IF EXISTS {table_name}" + try: + result = run_trino_sql(sql, schema) + LOG.info(f"DROP TABLE result: {result}") + except Exception as e: + LOG.error(e) -def drop_partitions_from_tables(list_of_partitions: ListDropPartitions, schema: str): +def drop_partitions_from_tables(list_of_partitions: ListDropPartitions, schemas: list) -> None: """drop specified partitions from tables""" - for part in list_of_partitions.list: - sql = f"SELECT count(DISTINCT {part.partition_column}) FROM {part.table}" - try: - result = run_trino_sql(sql, schema) - partition_count = result[0][0] - limit = 10000 - for i in range(0, partition_count, limit): - sql = f"SELECT DISTINCT {part.partition_column} FROM {part.table} OFFSET {i} LIMIT {limit}" + if not schemas: + schemas = get_all_schemas() + LOG.info(f"Running against the following schemas: {schemas}") + for schema in schemas: + LOG.info(f"Dropping partition from tables for schema {schema}") + for part in list_of_partitions.list: + sql = f"SELECT count(DISTINCT {part.partition_column}) FROM {part.table}" + try: result = run_trino_sql(sql, schema) - partitions = [res[0] for res in result] - - for partition in partitions: - LOG.info(f"*** Deleting {part.table} partition {part.partition_column} = {partition} ***") - sql = f"DELETE FROM {part.table} WHERE {part.partition_column} = '{partition}'" + partition_count = result[0][0] + limit = 10000 + for i in range(0, partition_count, limit): + sql = f"SELECT DISTINCT {part.partition_column} FROM {part.table} OFFSET {i} LIMIT {limit}" result = run_trino_sql(sql, schema) - LOG.info(f"DELETE PARTITION result: {result}") - except Exception as e: - LOG.info(e) + partitions = [res[0] for res in result] + + for partition in partitions: + LOG.info(f"Deleting {part.table} partition {part.partition_column} = {partition}") + sql = f"DELETE FROM {part.table} WHERE {part.partition_column} = '{partition}'" + result = run_trino_sql(sql, schema) + LOG.info(f"DELETE PARTITION result: {result}") + except Exception as e: + LOG.error(e) def check_table_exists(schema, table): @@ -335,33 +427,39 @@ def find_expired_partitions(schema, months, table, source_column_param): return run_trino_sql(expired_partitions_query, schema) -def drop_expired_partitions(tables, schema): +def drop_expired_partitions(tables, schemas): """Drop expired partitions""" + if not schemas: + schemas = get_all_schemas() + LOG.info(f"running against the following schemas: {schemas}") + for schema in schemas: + for table in tables: + if table in MANAGED_TABLES: + months = 5 + else: + LOG.info("Only supported for managed tables at the moment") + continue - for table in tables: - if table in MANAGED_TABLES: - months = 5 - else: - LOG.info("Only supported for managed tables at the moment") - return - source_column_param = manage_table_mapping[table] - if not check_table_exists(schema, table): - LOG.info(f"{table} does not exist for {schema}") - continue - expired_partitions = find_expired_partitions(schema, months, table, source_column_param) - if not expired_partitions: - LOG.info(f"No expired partitions found for {table} {schema}") - continue - LOG.info(f"Found {len(expired_partitions)}") - for partition in expired_partitions: - year, month, source = partition - LOG.info(f"Removing partition for {source} {year}-{month}") - # Using same query as what we use in db accessor - delete_partition_query = f""" - DELETE FROM hive.{schema}.{table} - WHERE {source_column_param} = '{source}' - AND year = '{year}' - AND (month = replace(ltrim(replace('{month}', '0', ' ')),' ', '0') OR month = '{month}') - """ - result = run_trino_sql(delete_partition_query, schema) - LOG.info(f"DELETE PARTITION result: {result}") + source_column_param = manage_table_mapping[table] + if not check_table_exists(schema, table): + LOG.info(f"{table} does not exist for {schema}") + continue + + expired_partitions = find_expired_partitions(schema, months, table, source_column_param) + if not expired_partitions: + LOG.info(f"No expired partitions found for {table} {schema}") + continue + + LOG.info(f"Found {len(expired_partitions)}") + for partition in expired_partitions: + year, month, source = partition + LOG.info(f"Removing partition for {source} {year}-{month}") + # Using same query as what we use in db accessor + delete_partition_query = f""" + DELETE FROM hive.{schema}.{table} + WHERE {source_column_param} = '{source}' + AND year = '{year}' + AND (month = replace(ltrim(replace('{month}', '0', ' ')),' ', '0') OR month = '{month}') + """ + result = run_trino_sql(delete_partition_query, schema) + LOG.info(f"DELETE PARTITION result: {result}") From fa07f9cf7b9bf0033573826edc878d1996bcb828 Mon Sep 17 00:00:00 2001 From: Corey Goodfred Date: Mon, 24 Jun 2024 07:53:00 -0400 Subject: [PATCH 07/26] Filter accounts by matching criteria during subs processing to prevent unnecessary SQL from running (#5184) --- koku/subs/trino_sql/aws/determine_ids_for_provider.sql | 2 ++ koku/subs/trino_sql/azure/determine_ids_for_provider.sql | 3 +++ 2 files changed, 5 insertions(+) diff --git a/koku/subs/trino_sql/aws/determine_ids_for_provider.sql b/koku/subs/trino_sql/aws/determine_ids_for_provider.sql index 00fe502ae8..518975e111 100644 --- a/koku/subs/trino_sql/aws/determine_ids_for_provider.sql +++ b/koku/subs/trino_sql/aws/determine_ids_for_provider.sql @@ -3,6 +3,8 @@ FROM hive.{{schema | sqlsafe}}.aws_line_items WHERE source={{source_uuid}} AND year={{year}} AND month={{month}} + AND lineitem_productcode = 'AmazonEC2' + AND strpos(lower(resourcetags), 'com_redhat_rhel') > 0 {% if excluded_ids %} AND lineitem_usageaccountid NOT IN {{excluded_ids | inclause}} {% endif %} diff --git a/koku/subs/trino_sql/azure/determine_ids_for_provider.sql b/koku/subs/trino_sql/azure/determine_ids_for_provider.sql index e80bf13289..cea4912e39 100644 --- a/koku/subs/trino_sql/azure/determine_ids_for_provider.sql +++ b/koku/subs/trino_sql/azure/determine_ids_for_provider.sql @@ -3,6 +3,9 @@ FROM hive.{{schema | sqlsafe}}.azure_line_items WHERE source={{source_uuid}} AND year={{year}} AND month={{month}} + AND metercategory = 'Virtual Machines' + AND json_extract_scalar(lower(additionalinfo), '$.vcpus') IS NOT NULL + AND json_extract_scalar(lower(tags), '$.com_redhat_rhel') IS NOT NULL {% if excluded_ids %} AND COALESCE(NULLIF(subscriptionid, ''), subscriptionguid) NOT IN {{excluded_ids | inclause}} {% endif %} From da16b3387b5fb5e5aaeda986011c2d49a4c2d2b3 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Mon, 24 Jun 2024 15:33:57 +0100 Subject: [PATCH 08/26] Update tasks.py (#5185) --- koku/masu/processor/tasks.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/koku/masu/processor/tasks.py b/koku/masu/processor/tasks.py index 95cea41bea..c3ff9e4c47 100644 --- a/koku/masu/processor/tasks.py +++ b/koku/masu/processor/tasks.py @@ -836,9 +836,6 @@ def update_cost_model_costs( None """ - # Override cost model start date str to calculate costs for full month - LOG.info("overriding cost model start date to process full month") - start_date = DateHelper().month_start(start_date).strftime("%Y-%m-%d") task_name = "masu.processor.tasks.update_cost_model_costs" cache_args = [schema_name, provider_uuid, start_date, end_date] if not synchronous: From aaf1182dafbad2fea7159551db97629adef2675d Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Mon, 24 Jun 2024 16:48:14 +0100 Subject: [PATCH 09/26] clean up grafana dashboard (#5183) --- ...ana-dashboard-insights-hccm.configmap.yaml | 1262 ++++++++++++----- 1 file changed, 880 insertions(+), 382 deletions(-) diff --git a/dashboards/grafana-dashboard-insights-hccm.configmap.yaml b/dashboards/grafana-dashboard-insights-hccm.configmap.yaml index 11ace23bd9..d9f5509994 100644 --- a/dashboards/grafana-dashboard-insights-hccm.configmap.yaml +++ b/dashboards/grafana-dashboard-insights-hccm.configmap.yaml @@ -27,8 +27,6 @@ data: "editable": true, "fiscalYearStartMonth": 0, "graphTooltip": 0, - "id": 69016, - "iteration": 1675977639896, "links": [], "liveNow": false, "panels": [ @@ -98,7 +96,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -186,7 +184,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -282,7 +280,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -370,7 +368,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -447,6 +445,7 @@ data: "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -460,6 +459,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -585,6 +585,8 @@ data: }, "id": 76, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -594,9 +596,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -623,13 +626,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_xl_queue", + "description": "Cost Management celery summary_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -643,6 +647,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -659,6 +664,7 @@ data: "mode": "line+area" } }, + "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -670,11 +676,11 @@ data: }, { "color": "#EAB839", - "value": 450 + "value": 200 }, { "color": "red", - "value": 550 + "value": 300 } ] }, @@ -688,7 +694,7 @@ data: "x": 12, "y": 19 }, - "id": 86, + "id": 69, "options": { "legend": { "calcs": [ @@ -711,15 +717,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "interval": "", - "legendFormat": "Queue Size", - "range": true, + "expr": "koku:celery:summary_queue", "refId": "A" } ], - "title": "Download XL", + "title": "Summary", "transformations": [ { "id": "seriesToColumns", @@ -735,12 +737,13 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_xl_queue", + "description": "Cost Management celery summary_queue", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, + "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -752,11 +755,11 @@ data: }, { "color": "#EAB839", - "value": 450 + "value": 200 }, { "color": "red", - "value": 550 + "value": 300 } ] }, @@ -770,8 +773,10 @@ data: "x": 22, "y": 19 }, - "id": 87, + "id": 77, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -781,22 +786,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:summary_queue", "refId": "A" } ], - "title": "DL XL", + "title": "Summary", "transformations": [ { "id": "seriesToColumns", @@ -812,13 +816,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_penalty_queue", + "description": "Cost Management celery download_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -832,6 +837,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -874,8 +880,8 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 19 + "x": 0, + "y": 27 }, "id": 86, "options": { @@ -901,14 +907,14 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "interval": "", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Download Penalty", + "title": "Download XL", "transformations": [ { "id": "seriesToColumns", @@ -924,7 +930,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery download_penalty_queue", + "description": "Cost Management celery download_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -956,11 +962,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 19 + "x": 10, + "y": 27 }, "id": 87, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -970,9 +978,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -980,12 +989,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(download_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "DL Penalty", + "title": "DL XL", "transformations": [ { "id": "seriesToColumns", @@ -1001,13 +1010,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery summary_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1021,6 +1031,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1037,7 +1048,6 @@ data: "mode": "line+area" } }, - "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -1064,10 +1074,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, + "x": 12, "y": 27 }, - "id": 69, + "id": 84, "options": { "legend": { "calcs": [ @@ -1090,11 +1100,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:summary_queue", + "editorMode": "code", + "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Summary", + "title": "Summary XL", "transformations": [ { "id": "seriesToColumns", @@ -1110,13 +1123,12 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_queue", + "description": "Cost Management celery summary_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "thresholds" }, - "decimals": 0, "displayName": "", "mappings": [], "thresholds": { @@ -1143,11 +1155,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, + "x": 22, "y": 27 }, - "id": 77, + "id": 85, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1157,20 +1171,23 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:summary_queue", + "editorMode": "code", + "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Summary", + "title": "Sum XL", "transformations": [ { "id": "seriesToColumns", @@ -1186,13 +1203,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_xl_queue", + "description": "Cost Management celery download_penalty_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1206,6 +1224,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1233,11 +1252,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 450 }, { "color": "red", - "value": 300 + "value": 550 } ] }, @@ -1248,10 +1267,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 27 + "x": 0, + "y": 35 }, - "id": 84, + "id": 96, "options": { "legend": { "calcs": [ @@ -1275,13 +1294,14 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "interval": "", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Summary XL", + "title": "Download Penalty", "transformations": [ { "id": "seriesToColumns", @@ -1297,7 +1317,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery summary_xl_queue", + "description": "Cost Management celery download_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -1314,11 +1334,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 450 }, { "color": "red", - "value": 300 + "value": 550 } ] }, @@ -1329,11 +1349,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 27 + "x": 10, + "y": 35 }, - "id": 85, + "id": 97, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1343,9 +1365,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1353,12 +1376,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(summary_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(download_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Sum XL", + "title": "DL Penalty", "transformations": [ { "id": "seriesToColumns", @@ -1381,6 +1404,7 @@ data: "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1394,6 +1418,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1437,9 +1462,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 27 + "y": 35 }, - "id": 84, + "id": 98, "options": { "legend": { "calcs": [ @@ -1518,10 +1543,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 27 + "y": 35 }, - "id": 85, + "id": 99, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1531,9 +1558,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -1562,13 +1590,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1582,6 +1611,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1609,11 +1639,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1625,9 +1655,9 @@ data: "h": 8, "w": 10, "x": 0, - "y": 35 + "y": 43 }, - "id": 90, + "id": 74, "options": { "legend": { "calcs": [ @@ -1650,11 +1680,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:priority_queue", + "expr": "koku:celery:ocp_queue", "refId": "A" } ], - "title": "Priority", + "title": "OCP", "transformations": [ { "id": "seriesToColumns", @@ -1670,7 +1700,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_queue", + "description": "Cost Management celery ocp_queue", "fieldConfig": { "defaults": { "color": { @@ -1687,11 +1717,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1703,10 +1733,12 @@ data: "h": 8, "w": 2, "x": 10, - "y": 35 + "y": 43 }, - "id": 91, + "id": 83, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1716,20 +1748,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:priority_queue", + "expr": "koku:celery:ocp_queue", "refId": "A" } ], - "title": "Priority", + "title": "OCP", "transformations": [ { "id": "seriesToColumns", @@ -1745,13 +1778,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_xl_queue", + "description": "Cost Management celery cost_model_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1765,6 +1799,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1808,9 +1843,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 35 + "y": 43 }, - "id": 70, + "id": 92, "options": { "legend": { "calcs": [ @@ -1833,14 +1868,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", - "range": true, + "expr": "koku:celery:cost_model_queue", "refId": "A" } ], - "title": "Priority XL", + "title": "Cost Model", "transformations": [ { "id": "seriesToColumns", @@ -1856,7 +1888,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_xl_queue", + "description": "Cost Management celery cost_model_queue", "fieldConfig": { "defaults": { "color": { @@ -1889,10 +1921,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 35 + "y": 43 }, - "id": 80, + "id": 93, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -1902,22 +1936,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:cost_model_queue", "refId": "A" } ], - "title": "Priority XL", + "title": "Cost Model", "transformations": [ { "id": "seriesToColumns", @@ -1933,13 +1966,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_penalty_queue", + "description": "Cost Management celery ocp_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1953,6 +1987,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1980,11 +2015,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -1995,10 +2030,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 35 + "x": 0, + "y": 51 }, - "id": 70, + "id": 88, "options": { "legend": { "calcs": [ @@ -2022,13 +2057,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Priority Penalty", + "title": "OCP XL", "transformations": [ { "id": "seriesToColumns", @@ -2044,7 +2079,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery priority_penalty_queue", + "description": "Cost Management celery ocp_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -2061,11 +2096,11 @@ data: }, { "color": "#EAB839", - "value": 90 + "value": 200 }, { "color": "red", - "value": 100 + "value": 300 } ] }, @@ -2076,11 +2111,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 35 + "x": 10, + "y": 51 }, - "id": 80, + "id": 89, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2090,9 +2127,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -2100,12 +2138,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Priority Penalty", + "title": "OCP XL", "transformations": [ { "id": "seriesToColumns", @@ -2121,13 +2159,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2141,6 +2180,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2168,11 +2208,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -2183,10 +2223,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, - "y": 43 + "x": 12, + "y": 51 }, - "id": 74, + "id": 72, "options": { "legend": { "calcs": [ @@ -2209,11 +2249,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:ocp_queue", + "editorMode": "code", + "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "OCP", + "title": "Cost Model XL", "transformations": [ { "id": "seriesToColumns", @@ -2229,7 +2272,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_queue", + "description": "Cost Management celery cost_model_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -2246,11 +2289,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -2261,11 +2304,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, - "y": 43 + "x": 22, + "y": 51 }, - "id": 83, + "id": 82, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2275,20 +2320,23 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:ocp_queue", + "editorMode": "code", + "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "OCP", + "title": "CM XL", "transformations": [ { "id": "seriesToColumns", @@ -2304,13 +2352,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_xl_queue", + "description": "Cost Management celery ocp_penalty_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2324,6 +2373,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2366,10 +2416,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 43 + "x": 0, + "y": 59 }, - "id": 88, + "id": 102, "options": { "legend": { "calcs": [ @@ -2393,13 +2443,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "OCP XL", + "title": "OCP Penalty", "transformations": [ { "id": "seriesToColumns", @@ -2415,7 +2465,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_xl_queue", + "description": "Cost Management celery ocp_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -2447,11 +2497,396 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 43 + "x": 10, + "y": 59 }, - "id": 89, + "id": 103, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "10.4.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "OCP Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery cost_model_penalty_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 12, + "y": 59 + }, + "id": 104, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, + "refId": "A" + } + ], + "title": "Cost Model Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery cost_model_penalty_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 22, + "y": 59 + }, + "id": 105, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "10.4.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "editorMode": "code", + "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, + "refId": "A" + } + ], + "title": "CM Penalty", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 30, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "line+area" + } + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 10, + "x": 0, + "y": 67 + }, + "id": 90, + "options": { + "legend": { + "calcs": [ + "min", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "8.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "expr": "koku:celery:priority_queue", + "refId": "A" + } + ], + "title": "Priority", + "transformations": [ + { + "id": "seriesToColumns", + "options": { + "reducers": [] + } + } + ], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${Datasource}" + }, + "description": "Cost Management celery priority_queue", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "displayName": "", + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "#EAB839", + "value": 90 + }, + { + "color": "red", + "value": 100 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 2, + "x": 10, + "y": 67 + }, + "id": 91, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2461,22 +2896,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(ocp_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:priority_queue", "refId": "A" } ], - "title": "OCP XL", + "title": "Priority", "transformations": [ { "id": "seriesToColumns", @@ -2492,13 +2926,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_penalty_queue", + "description": "Cost Management celery refresh_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2512,6 +2947,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2539,11 +2975,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -2555,9 +2991,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 43 + "y": 67 }, - "id": 88, + "id": 71, "options": { "legend": { "calcs": [ @@ -2580,14 +3016,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", - "range": true, + "expr": "koku:celery:refresh_queue", "refId": "A" } ], - "title": "OCP Penalty", + "title": "Refresh", "transformations": [ { "id": "seriesToColumns", @@ -2603,7 +3036,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery ocp_penalty_queue", + "description": "Cost Management celery refresh_queue", "fieldConfig": { "defaults": { "color": { @@ -2620,11 +3053,11 @@ data: }, { "color": "#EAB839", - "value": 200 + "value": 90 }, { "color": "red", - "value": 300 + "value": 100 } ] }, @@ -2636,10 +3069,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 43 + "y": 67 }, - "id": 89, + "id": 78, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2649,22 +3084,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "editorMode": "code", - "expr": "max(ocp_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "range": true, + "expr": "koku:celery:refresh_queue", "refId": "A" } ], - "title": "OCP Penalty", + "title": "Refresh", "transformations": [ { "id": "seriesToColumns", @@ -2680,13 +3114,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery priority_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2700,6 +3135,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2743,9 +3179,9 @@ data: "h": 8, "w": 10, "x": 0, - "y": 51 + "y": 75 }, - "id": 92, + "id": 70, "options": { "legend": { "calcs": [ @@ -2768,11 +3204,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:cost_model_queue", + "editorMode": "code", + "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Cost Model", + "title": "Priority XL", "transformations": [ { "id": "seriesToColumns", @@ -2788,7 +3227,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_queue", + "description": "Cost Management celery priority_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -2821,10 +3260,12 @@ data: "h": 8, "w": 2, "x": 10, - "y": 51 + "y": 75 }, - "id": 93, + "id": 80, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -2834,20 +3275,23 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:cost_model_queue", + "editorMode": "code", + "expr": "max(priority_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Cost Model", + "title": "Priority XL", "transformations": [ { "id": "seriesToColumns", @@ -2863,13 +3307,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_xl_queue", + "description": "Cost Management celery refresh_xl_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -2883,6 +3328,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2926,9 +3372,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 51 + "y": 75 }, - "id": 72, + "id": 94, "options": { "legend": { "calcs": [ @@ -2952,13 +3398,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Cost Model XL", + "title": "Refresh XL", "transformations": [ { "id": "seriesToColumns", @@ -2974,7 +3420,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_xl_queue", + "description": "Cost Management celery refresh_xl_queue", "fieldConfig": { "defaults": { "color": { @@ -3007,10 +3453,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 51 + "y": 75 }, - "id": 82, + "id": 95, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3020,9 +3468,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -3030,12 +3479,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(cost_model_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "CM XL", + "title": "Ref XL", "transformations": [ { "id": "seriesToColumns", @@ -3051,13 +3500,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_penalty_queue", + "description": "Cost Management celery priority_penalty_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -3071,6 +3521,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3113,10 +3564,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 51 + "x": 0, + "y": 83 }, - "id": 72, + "id": 100, "options": { "legend": { "calcs": [ @@ -3140,13 +3591,13 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "legendFormat": "Queue Size", "range": true, "refId": "A" } ], - "title": "Cost Model Penalty", + "title": "Priority Penalty", "transformations": [ { "id": "seriesToColumns", @@ -3162,7 +3613,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery cost_model_penalty_queue", + "description": "Cost Management celery priority_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -3194,11 +3645,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 51 + "x": 10, + "y": 83 }, - "id": 82, + "id": 101, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3208,9 +3661,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -3218,12 +3672,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(cost_model_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(priority_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "CM Penalty", + "title": "Priority Penalty", "transformations": [ { "id": "seriesToColumns", @@ -3239,13 +3693,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_queue", + "description": "Cost Management celery refresh_penalty_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -3259,6 +3714,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3301,10 +3757,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 0, - "y": 59 + "x": 12, + "y": 83 }, - "id": 71, + "id": 106, "options": { "legend": { "calcs": [ @@ -3327,11 +3783,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:refresh_queue", + "editorMode": "code", + "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "legendFormat": "Queue Size", + "range": true, "refId": "A" } ], - "title": "Refresh", + "title": "Refresh Penalty", "transformations": [ { "id": "seriesToColumns", @@ -3347,7 +3806,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_queue", + "description": "Cost Management celery refresh_penalty_queue", "fieldConfig": { "defaults": { "color": { @@ -3379,11 +3838,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 10, - "y": 59 + "x": 22, + "y": 83 }, - "id": 78, + "id": 107, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3393,20 +3854,23 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:refresh_queue", + "editorMode": "code", + "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "range": true, "refId": "A" } ], - "title": "Refresh", + "title": "Ref Penalty", "transformations": [ { "id": "seriesToColumns", @@ -3422,13 +3886,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_xl_queue", + "description": "Cost Management celery subs_extraction", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -3442,6 +3907,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3484,10 +3950,10 @@ data: "gridPos": { "h": 8, "w": 10, - "x": 12, - "y": 59 + "x": 0, + "y": 91 }, - "id": 94, + "id": 111, "options": { "legend": { "calcs": [ @@ -3511,13 +3977,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", + "expr": "max(subs_transmission_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Refresh XL", + "title": "subs_transmission", "transformations": [ { "id": "seriesToColumns", @@ -3533,7 +3998,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_xl_queue", + "description": "Cost Management celery subs transmission queue", "fieldConfig": { "defaults": { "color": { @@ -3565,11 +4030,13 @@ data: "gridPos": { "h": 8, "w": 2, - "x": 22, - "y": 59 + "x": 10, + "y": 91 }, - "id": 95, + "id": 112, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3579,9 +4046,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -3589,12 +4057,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_xl_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(subs_transmission_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Ref XL", + "title": "Subs Transmissoin", "transformations": [ { "id": "seriesToColumns", @@ -3610,13 +4078,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_penalty_queue", + "description": "Cost Management celery subs_extraction", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -3630,6 +4099,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3673,9 +4143,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 59 + "y": 91 }, - "id": 94, + "id": 109, "options": { "legend": { "calcs": [ @@ -3699,13 +4169,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", - "legendFormat": "Queue Size", + "expr": "max(subs_extraction_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Refresh Penalty", + "title": "subs_extraction", "transformations": [ { "id": "seriesToColumns", @@ -3721,7 +4190,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery refresh_penalty_queue", + "description": "Cost Management celery subs extraction queue", "fieldConfig": { "defaults": { "color": { @@ -3754,10 +4223,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 59 + "y": 91 }, - "id": 95, + "id": 110, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3767,9 +4238,10 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -3777,12 +4249,12 @@ data: "uid": "${Datasource}" }, "editorMode": "code", - "expr": "max(refresh_penalty_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", + "expr": "max(subs_extraction_backlog{namespace=\"${namespace}\", pod=~\".+worker.+\"})", "range": true, "refId": "A" } ], - "title": "Ref Penalty", + "title": "Subs Extraction", "transformations": [ { "id": "seriesToColumns", @@ -3798,13 +4270,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery default_celery_queue", + "description": "Cost Management celery hcs_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -3818,6 +4291,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3861,9 +4335,9 @@ data: "h": 8, "w": 10, "x": 0, - "y": 67 + "y": 99 }, - "id": 75, + "id": 73, "options": { "legend": { "calcs": [ @@ -3886,11 +4360,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:default_celery_queue", + "expr": "koku:celery:hcs_queue", "refId": "A" } ], - "title": "Default", + "title": "HCS", "transformations": [ { "id": "seriesToColumns", @@ -3906,7 +4380,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery default_celery_queue", + "description": "Cost Management celery hcs_queue", "fieldConfig": { "defaults": { "color": { @@ -3939,10 +4413,12 @@ data: "h": 8, "w": 2, "x": 10, - "y": 67 + "y": 99 }, - "id": 81, + "id": 79, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -3952,20 +4428,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:default_celery_queue", + "expr": "koku:celery:hcs_queue", "refId": "A" } ], - "title": "Default", + "title": "HCS", "transformations": [ { "id": "seriesToColumns", @@ -3981,13 +4458,14 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery hcs_queue", + "description": "Cost Management celery default_celery_queue", "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -4001,6 +4479,7 @@ data: "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -4044,9 +4523,9 @@ data: "h": 8, "w": 10, "x": 12, - "y": 67 + "y": 99 }, - "id": 73, + "id": 75, "options": { "legend": { "calcs": [ @@ -4069,11 +4548,11 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:hcs_queue", + "expr": "koku:celery:default_celery_queue", "refId": "A" } ], - "title": "HCS", + "title": "Default", "transformations": [ { "id": "seriesToColumns", @@ -4089,7 +4568,7 @@ data: "type": "prometheus", "uid": "${Datasource}" }, - "description": "Cost Management celery hcs_queue", + "description": "Cost Management celery default_celery_queue", "fieldConfig": { "defaults": { "color": { @@ -4122,10 +4601,12 @@ data: "h": 8, "w": 2, "x": 22, - "y": 67 + "y": 99 }, - "id": 79, + "id": 81, "options": { + "minVizHeight": 75, + "minVizWidth": 75, "orientation": "auto", "reduceOptions": { "calcs": [ @@ -4135,20 +4616,21 @@ data: "values": false }, "showThresholdLabels": false, - "showThresholdMarkers": true + "showThresholdMarkers": true, + "sizing": "auto" }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { "type": "prometheus", "uid": "${Datasource}" }, - "expr": "koku:celery:hcs_queue", + "expr": "koku:celery:default_celery_queue", "refId": "A" } ], - "title": "HCS", + "title": "Default", "transformations": [ { "id": "seriesToColumns", @@ -4159,32 +4641,6 @@ data: ], "type": "gauge" }, - { - "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "iLX_mMBnk" - }, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 75 - }, - "id": 28, - "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "iLX_mMBnk" - }, - "refId": "A" - } - ], - "title": "RDS Database", - "type": "row" - }, { "aliasColors": {}, "bars": false, @@ -4205,7 +4661,7 @@ data: "h": 8, "w": 12, "x": 0, - "y": 76 + "y": 107 }, "hiddenSeries": false, "id": 24, @@ -4225,7 +4681,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -4341,7 +4797,7 @@ data: "h": 8, "w": 12, "x": 12, - "y": 76 + "y": 107 }, "hiddenSeries": false, "id": 22, @@ -4361,7 +4817,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -4438,7 +4894,7 @@ data: "h": 8, "w": 12, "x": 0, - "y": 84 + "y": 115 }, "hiddenSeries": false, "id": 20, @@ -4458,7 +4914,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -4530,7 +4986,7 @@ data: "h": 8, "w": 12, "x": 12, - "y": 84 + "y": 115 }, "hiddenSeries": false, "id": 26, @@ -4550,7 +5006,7 @@ data: "alertThreshold": true }, "percentage": true, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -4642,7 +5098,33 @@ data: "h": 1, "w": 24, "x": 0, - "y": 92 + "y": 123 + }, + "id": 28, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "iLX_mMBnk" + }, + "refId": "A" + } + ], + "title": "RDS Database", + "type": "row" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "iLX_mMBnk" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 124 }, "id": 32, "panels": [], @@ -4678,7 +5160,7 @@ data: "h": 7, "w": 24, "x": 0, - "y": 93 + "y": 125 }, "hiddenSeries": false, "id": 30, @@ -4698,7 +5180,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -4756,7 +5238,7 @@ data: "h": 1, "w": 24, "x": 0, - "y": 100 + "y": 132 }, "id": 40, "panels": [], @@ -4799,7 +5281,7 @@ data: "h": 6, "w": 4, "x": 0, - "y": 101 + "y": 133 }, "id": 42, "options": { @@ -4814,9 +5296,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -4857,7 +5341,7 @@ data: "h": 6, "w": 5, "x": 4, - "y": 101 + "y": 133 }, "id": 44, "options": { @@ -4872,9 +5356,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -4915,7 +5401,7 @@ data: "h": 6, "w": 4, "x": 9, - "y": 101 + "y": 133 }, "id": 46, "options": { @@ -4930,9 +5416,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -4962,7 +5450,7 @@ data: "h": 6, "w": 11, "x": 13, - "y": 101 + "y": 133 }, "id": 54, "legend": { @@ -4970,7 +5458,6 @@ data: "values": true }, "legendType": "Under graph", - "links": [], "nullPointMode": "connected", "pieType": "pie", "strokeWidth": 1, @@ -5016,7 +5503,7 @@ data: "h": 7, "w": 4, "x": 0, - "y": 107 + "y": 139 }, "id": 48, "options": { @@ -5031,9 +5518,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -5067,7 +5556,7 @@ data: "h": 7, "w": 9, "x": 4, - "y": 107 + "y": 139 }, "hiddenSeries": false, "id": 50, @@ -5087,7 +5576,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5155,7 +5644,7 @@ data: "h": 7, "w": 11, "x": 13, - "y": 107 + "y": 139 }, "hiddenSeries": false, "id": 52, @@ -5175,7 +5664,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5234,7 +5723,8 @@ data: "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -5249,7 +5739,7 @@ data: "h": 7, "w": 4, "x": 0, - "y": 114 + "y": 146 }, "id": 56, "options": { @@ -5264,9 +5754,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -5300,7 +5792,7 @@ data: "h": 7, "w": 9, "x": 4, - "y": 114 + "y": 146 }, "hiddenSeries": false, "id": 57, @@ -5320,7 +5812,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5389,7 +5881,7 @@ data: "h": 7, "w": 11, "x": 13, - "y": 114 + "y": 146 }, "hiddenSeries": false, "id": 58, @@ -5409,7 +5901,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5468,7 +5960,8 @@ data: "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -5483,7 +5976,7 @@ data: "h": 7, "w": 4, "x": 0, - "y": 121 + "y": 153 }, "id": 59, "options": { @@ -5498,9 +5991,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -5534,7 +6029,7 @@ data: "h": 7, "w": 9, "x": 4, - "y": 121 + "y": 153 }, "hiddenSeries": false, "id": 60, @@ -5554,7 +6049,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5623,7 +6118,7 @@ data: "h": 7, "w": 11, "x": 13, - "y": 121 + "y": 153 }, "hiddenSeries": false, "id": 61, @@ -5643,7 +6138,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5702,7 +6197,8 @@ data: "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -5717,7 +6213,7 @@ data: "h": 7, "w": 4, "x": 0, - "y": 128 + "y": 160 }, "id": 63, "options": { @@ -5732,9 +6228,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -5768,7 +6266,7 @@ data: "h": 7, "w": 9, "x": 4, - "y": 128 + "y": 160 }, "hiddenSeries": false, "id": 65, @@ -5783,13 +6281,12 @@ data: }, "lines": true, "linewidth": 1, - "links": [], "nullPointMode": "null", "options": { "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5858,7 +6355,7 @@ data: "h": 7, "w": 11, "x": 13, - "y": 128 + "y": 160 }, "hiddenSeries": false, "id": 66, @@ -5878,7 +6375,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -5937,7 +6434,8 @@ data: "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -5952,7 +6450,7 @@ data: "h": 7, "w": 4, "x": 0, - "y": 135 + "y": 167 }, "id": 64, "options": { @@ -5967,9 +6465,11 @@ data: "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -6003,7 +6503,7 @@ data: "h": 7, "w": 9, "x": 4, - "y": 135 + "y": 167 }, "hiddenSeries": false, "id": 67, @@ -6018,13 +6518,12 @@ data: }, "lines": true, "linewidth": 1, - "links": [], "nullPointMode": "null", "options": { "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -6093,7 +6592,7 @@ data: "h": 7, "w": 11, "x": 13, - "y": 135 + "y": 167 }, "hiddenSeries": false, "id": 68, @@ -6113,7 +6612,7 @@ data: "alertThreshold": true }, "percentage": false, - "pluginVersion": "9.3.8", + "pluginVersion": "10.4.1", "pointradius": 2, "points": false, "renderer": "flot", @@ -6162,9 +6661,8 @@ data: } } ], - "refresh": "1m", - "schemaVersion": 37, - "style": "dark", + "refresh": "", + "schemaVersion": 39, "tags": [], "templating": { "list": [ @@ -6285,7 +6783,7 @@ data: "timezone": "", "title": "Cost Management", "uid": "R0HueuFGk", - "version": 4, + "version": 3, "weekStart": "" } kind: ConfigMap From 1917175526888d0f22d0263eda396dc6ec4ba03c Mon Sep 17 00:00:00 2001 From: Corey Goodfred Date: Mon, 24 Jun 2024 17:05:51 -0400 Subject: [PATCH 10/26] Skip OCPCloud tag SQL if key is present in cache but value is None (#5186) --- koku/koku/cache.py | 20 +++++++++ koku/koku/test_cache.py | 45 +++++++++++++------ .../ocp_cloud_parquet_report_processor.py | 14 +++--- ...test_ocp_cloud_parquet_report_processor.py | 4 +- 4 files changed, 62 insertions(+), 21 deletions(-) diff --git a/koku/koku/cache.py b/koku/koku/cache.py index d7e19742b5..9a5e0a2df9 100644 --- a/koku/koku/cache.py +++ b/koku/koku/cache.py @@ -123,6 +123,26 @@ def invalidate_view_cache_for_tenant_and_all_source_types(schema_name): invalidate_view_cache_for_tenant_and_source_type(schema_name, source_type) +def get_value_from_cache(cache_key, cache_choice="default"): + cache = caches[cache_choice] + return cache.get(cache_key) + + +def set_value_in_cache(cache_key, cache_value, cache_choice="default"): + cache = caches[cache_choice] + cache.set(cache_key, cache_value) + + +def is_key_in_cache(cache_key, cache_choice="default"): + cache = caches[cache_choice] + return cache.has_key(cache_key) + + +def build_matching_tags_key(schema_name, provider_type): + """Return the key for matching tags""" + return f"OCP-on-{provider_type}:{schema_name}:matching-tags" + + def get_cached_matching_tags(schema_name, provider_type): """Return cached OCP on Cloud matched tags if exists.""" cache = caches["default"] diff --git a/koku/koku/test_cache.py b/koku/koku/test_cache.py index 3d9c0381cf..bc005887d0 100644 --- a/koku/koku/test_cache.py +++ b/koku/koku/test_cache.py @@ -12,21 +12,23 @@ from api.provider.models import Provider from koku.cache import AWS_CACHE_PREFIX from koku.cache import AZURE_CACHE_PREFIX +from koku.cache import build_matching_tags_key from koku.cache import get_cached_infra_map -from koku.cache import get_cached_matching_tags from koku.cache import get_cached_tag_rate_map +from koku.cache import get_value_from_cache from koku.cache import invalidate_view_cache_for_tenant_and_all_source_types from koku.cache import invalidate_view_cache_for_tenant_and_cache_key from koku.cache import invalidate_view_cache_for_tenant_and_source_type from koku.cache import invalidate_view_cache_for_tenant_and_source_types +from koku.cache import is_key_in_cache from koku.cache import KokuCacheError from koku.cache import OPENSHIFT_ALL_CACHE_PREFIX from koku.cache import OPENSHIFT_AWS_CACHE_PREFIX from koku.cache import OPENSHIFT_AZURE_CACHE_PREFIX from koku.cache import OPENSHIFT_CACHE_PREFIX from koku.cache import set_cached_infra_map -from koku.cache import set_cached_matching_tags from koku.cache import set_cached_tag_rate_map +from koku.cache import set_value_in_cache CACHE_PREFIXES = ( @@ -232,18 +234,6 @@ def test_invalidate_view_cache_for_tenant_and_all_source_type(self): for key in cache_data: self.assertIsNone(self.cache.get(key)) - def test_matching_tags_cache(self): - """Test that getting/setting matching tags works.""" - provider_type = Provider.PROVIDER_AWS - initial = get_cached_matching_tags(self.schema_name, provider_type) - self.assertIsNone(initial) - - matched_tags = [{"tag_one": "value_one"}, {"tag_two": "value_bananas"}] - set_cached_matching_tags(self.schema_name, provider_type, matched_tags) - - cached = get_cached_matching_tags(self.schema_name, provider_type) - self.assertEqual(cached, matched_tags) - def test_infra_map_cache(self): """Test that getting/setting infra_map works.""" provider_type = Provider.PROVIDER_AWS @@ -263,3 +253,30 @@ def test_tag_rate_map_cache(self): self.assertIsNone(initial) cached = get_cached_tag_rate_map(schema) self.assertEqual(cached, tag_map) + + def test_build_matching_tags_key(self): + """Test that the matching tags key is constructed properly.""" + provider_type = Provider.PROVIDER_AWS + expected = f"OCP-on-{provider_type}:{self.schema_name}:matching-tags" + actual = build_matching_tags_key(self.schema_name, provider_type) + self.assertEqual(expected, actual) + + def test_get_and_set_value_from_cache(self): + """Test that getting a value from the cache and setting a value in the cache works as intended.""" + cache_key = "my-fake-key-get-test" + expected_value = "my-fake-value" + + actual = get_value_from_cache(cache_key) + self.assertIsNone(actual) + + set_value_in_cache(cache_key, expected_value) + actual = get_value_from_cache(cache_key) + self.assertEqual(expected_value, actual) + + def test_is_key_in_cache(self): + """Test that checking the cache for key presence works as intended""" + cache_key = "fake-key-for-bool-test" + self.assertFalse(is_key_in_cache(cache_key)) + + set_value_in_cache(cache_key, "fake-value") + self.assertTrue(is_key_in_cache(cache_key)) diff --git a/koku/masu/processor/parquet/ocp_cloud_parquet_report_processor.py b/koku/masu/processor/parquet/ocp_cloud_parquet_report_processor.py index ac23459bd3..45d0ff1349 100644 --- a/koku/masu/processor/parquet/ocp_cloud_parquet_report_processor.py +++ b/koku/masu/processor/parquet/ocp_cloud_parquet_report_processor.py @@ -13,8 +13,10 @@ from api.common import log_json from api.provider.models import Provider from api.utils import DateHelper -from koku.cache import get_cached_matching_tags -from koku.cache import set_cached_matching_tags +from koku.cache import build_matching_tags_key +from koku.cache import get_value_from_cache +from koku.cache import is_key_in_cache +from koku.cache import set_value_in_cache from masu.database.aws_report_db_accessor import AWSReportDBAccessor from masu.database.azure_report_db_accessor import AzureReportDBAccessor from masu.database.gcp_report_db_accessor import GCPReportDBAccessor @@ -147,13 +149,15 @@ def create_ocp_on_cloud_parquet(self, data_frame, parquet_base_filename): def get_matched_tags(self, ocp_provider_uuids): """Get tags that match between OCP and the cloud source.""" # Get matching tags - matched_tags = get_cached_matching_tags(self.schema_name, self.provider_type) + cache_key = build_matching_tags_key(self.schema_name, self.provider_type) + matched_tags = get_value_from_cache(cache_key) ctx = { "schema": self.schema_name, "provider_uuid": self.provider_uuid, "provider_type": self.provider_type, } - if matched_tags: + # If the key is in the cache but the value is None, there are no matching tags + if matched_tags or is_key_in_cache(cache_key): LOG.info(log_json(msg="retreived matching tags from cache", context=ctx)) return matched_tags if self.has_enabled_ocp_labels: @@ -171,7 +175,7 @@ def get_matched_tags(self, ocp_provider_uuids): self.end_date, invoice_month_date=self.invoice_month_date, ) - set_cached_matching_tags(self.schema_name, self.provider_type, matched_tags) + set_value_in_cache(cache_key, matched_tags) return matched_tags def create_partitioned_ocp_on_cloud_parquet(self, data_frame, parquet_base_filename): diff --git a/koku/masu/test/processor/parquet/test_ocp_cloud_parquet_report_processor.py b/koku/masu/test/processor/parquet/test_ocp_cloud_parquet_report_processor.py index bd812fe30a..365ce42e82 100644 --- a/koku/masu/test/processor/parquet/test_ocp_cloud_parquet_report_processor.py +++ b/koku/masu/test/processor/parquet/test_ocp_cloud_parquet_report_processor.py @@ -415,7 +415,7 @@ def test_get_matched_tags(self, mock_has_enabled, mock_matching_enabled, mock_ge matched_tags = [{"tag_one": "value_one"}, {"tag_two": "value_bananas"}] mock_get_tags.reset_mock() with patch( - "masu.processor.parquet.ocp_cloud_parquet_report_processor.get_cached_matching_tags", + "masu.processor.parquet.ocp_cloud_parquet_report_processor.get_value_from_cache", return_value=matched_tags, ): self.report_processor.get_matched_tags([]) @@ -439,7 +439,7 @@ def test_get_matched_tags_trino(self, mock_has_enabled, mock_matching_enabled, m matched_tags = [{"tag_one": "value_one"}, {"tag_two": "value_bananas"}] mock_get_tags.reset_mock() with patch( - "masu.processor.parquet.ocp_cloud_parquet_report_processor.get_cached_matching_tags", + "masu.processor.parquet.ocp_cloud_parquet_report_processor.get_value_from_cache", return_value=matched_tags, ): self.report_processor.get_matched_tags([]) From ca8ac2db9dd7c62a0203957985febb23e7f7feea Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Tue, 25 Jun 2024 14:44:10 +0100 Subject: [PATCH 11/26] [COST-5196] - Send OCP tasks to correct queues (#5187) * [COST-5196] - Send OCP tasks to correct queues --- koku/masu/processor/tasks.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/koku/masu/processor/tasks.py b/koku/masu/processor/tasks.py index c3ff9e4c47..e008c39e43 100644 --- a/koku/masu/processor/tasks.py +++ b/koku/masu/processor/tasks.py @@ -465,10 +465,11 @@ def update_summary_tables( # noqa: C901 cache_args = [schema, provider_type, provider_uuid, cache_arg_date] ocp_on_cloud_infra_map = {} is_large_customer_rate_limited = is_rate_limit_customer_large(schema) + # Fallback should only be used for non-ocp processing fallback_update_summary_tables_queue = get_customer_queue(schema, SummaryQueue) - fallback_delete_truncate_queue = get_customer_queue(schema, RefreshQueue) - fallback_update_cost_model_queue = get_customer_queue(schema, CostModelQueue) - fallback_mark_manifest_complete_queue = get_customer_queue(schema, PriorityQueue) + delete_truncate_queue = get_customer_queue(schema, RefreshQueue) + update_cost_model_queue = get_customer_queue(schema, CostModelQueue) + mark_manifest_complete_queue = get_customer_queue(schema, PriorityQueue) timeout = settings.WORKER_CACHE_TIMEOUT if fallback_update_summary_tables_queue != SummaryQueue.DEFAULT: timeout = settings.WORKER_CACHE_LARGE_CUSTOMER_TIMEOUT @@ -577,7 +578,7 @@ def update_summary_tables( # noqa: C901 operation, manifest_id=manifest_id, tracing_id=tracing_id, - ).set(queue=queue_name or fallback_delete_truncate_queue) + ).set(queue=delete_truncate_queue) ) signature_list = [] @@ -596,7 +597,7 @@ def update_summary_tables( # noqa: C901 queue_name=queue_name, synchronous=synchronous, tracing_id=tracing_id, - ).set(queue=queue_name or fallback_update_summary_tables_queue) + ).set(queue=fallback_update_summary_tables_queue) ) # Apply OCP on Cloud tasks @@ -619,10 +620,10 @@ def update_summary_tables( # noqa: C901 LOG.info(log_json(tracing_id, msg="updating cost model costs", context=context)) linked_tasks = update_cost_model_costs.s( schema, provider_uuid, start_date, end_date, tracing_id=tracing_id - ).set(queue=queue_name or fallback_update_cost_model_queue) | mark_manifest_complete.si( + ).set(queue=update_cost_model_queue) | mark_manifest_complete.si( schema, provider_type, provider_uuid, manifest_list=manifest_list, tracing_id=tracing_id ).set( - queue=queue_name or fallback_mark_manifest_complete_queue + queue=mark_manifest_complete_queue ) else: LOG.info(log_json(tracing_id, msg="skipping cost model updates", context=context)) @@ -633,7 +634,7 @@ def update_summary_tables( # noqa: C901 manifest_list=manifest_list, ingress_report_uuid=ingress_report_uuid, tracing_id=tracing_id, - ).set(queue=queue_name or fallback_mark_manifest_complete_queue) + ).set(queue=mark_manifest_complete_queue) chain(linked_tasks).apply_async() From 1de521aaf8bcc0d348535c5bf049a7662a221935 Mon Sep 17 00:00:00 2001 From: David Nakabaale Date: Tue, 25 Jun 2024 11:04:50 -0400 Subject: [PATCH 12/26] [COST-5176] correctly pass context dictionary within log_json function call (#5182) * [COST-5176] correctly pass context dictionary within log_json function call * add unittests for exceptions in generate_report --- koku/hcs/daily_report.py | 4 ++-- koku/hcs/test/test_daily_report.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/koku/hcs/daily_report.py b/koku/hcs/daily_report.py index 592281eef0..556958ddf7 100644 --- a/koku/hcs/daily_report.py +++ b/koku/hcs/daily_report.py @@ -51,9 +51,9 @@ def generate_report(self, start_date, end_date, finalize=False): ) except HCSTableNotFoundError as tnfe: - LOG.info(log_json(self._tracing_id, msg=f"{tnfe}, skipping..."), context=self._ctx) + LOG.info(log_json(self._tracing_id, msg=f"{tnfe}, skipping...", context=self._ctx)) except Exception as e: LOG.warning( - log_json(self._tracing_id, msg="get_hcs_daily_summary exception"), context=self._ctx, exc_info=e + log_json(self._tracing_id, msg="get_hcs_daily_summary exception", context=self._ctx), exc_info=e ) diff --git a/koku/hcs/test/test_daily_report.py b/koku/hcs/test/test_daily_report.py index cd0b1c1be9..42bf7cd7ec 100644 --- a/koku/hcs/test/test_daily_report.py +++ b/koku/hcs/test/test_daily_report.py @@ -9,6 +9,7 @@ from api.provider.models import Provider from api.utils import DateHelper from hcs.daily_report import ReportHCS +from hcs.exceptions import HCSTableNotFoundError from hcs.test import HCSTestCase @@ -40,3 +41,32 @@ def test_bad_schema(self, mock_schema, mock_daily_summary): reporter = ReportHCS(self.schema, self.aws_provider_type, self.aws_provider_uuid, self.tracing_id) reporter.generate_report(self.yesterday, self.today) mock_daily_summary.assert_not_called() + + @patch("hcs.database.report_db_accessor.HCSReportDBAccessor.get_hcs_daily_summary") + @patch("hcs.database.report_db_accessor.HCSReportDBAccessor.schema_exists_trino") + def test_hcs_table_not_found_error(self, mock_schema, mock_daily_summary): + """Test that HCSTableNotFoundError is handled and logged correctly.""" + mock_schema.return_value = True + expected_error_msg = "Table not found" + mock_daily_summary.side_effect = HCSTableNotFoundError(expected_error_msg) + + with self.assertLogs("hcs.daily_report", level="INFO") as log: + reporter = ReportHCS(self.schema, self.aws_provider_type, self.aws_provider_uuid, self.tracing_id) + reporter.generate_report(self.yesterday, self.today) + + self.assertIn(expected_error_msg, log.output[0]) + + @patch("hcs.database.report_db_accessor.HCSReportDBAccessor.get_hcs_daily_summary") + @patch("hcs.database.report_db_accessor.HCSReportDBAccessor.schema_exists_trino") + def test_general_exception(self, mock_schema, mock_daily_summary): + """Test that a general exception is handled and logged correctly.""" + mock_schema.return_value = True + expected_error_msg = "General exception" + mock_daily_summary.side_effect = Exception(expected_error_msg) + + with self.assertLogs("hcs.daily_report", level="WARNING") as log: + reporter = ReportHCS(self.schema, self.aws_provider_type, self.aws_provider_uuid, self.tracing_id) + reporter.generate_report(self.yesterday, self.today) + + self.assertIn(expected_error_msg, log.output[0]) + self.assertIn("get_hcs_daily_summary exception", log.output[0]) From c7eb09dc6ece6dc1a0860bde0252199a6402c1a4 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Tue, 25 Jun 2024 17:35:08 +0100 Subject: [PATCH 13/26] batch delete S3 files (#5180) --- koku/masu/celery/tasks.py | 26 ++++---------- .../downloader/aws/aws_report_downloader.py | 17 ++++++++-- .../azure/azure_report_downloader.py | 10 ++++-- koku/masu/test/celery/test_tasks.py | 20 +++++------ koku/masu/test/util/aws/test_common.py | 34 ++++++++++++++++--- koku/masu/util/aws/common.py | 33 ++++++++++-------- 6 files changed, 86 insertions(+), 54 deletions(-) diff --git a/koku/masu/celery/tasks.py b/koku/masu/celery/tasks.py index 42824c8ba9..c23f19c524 100644 --- a/koku/masu/celery/tasks.py +++ b/koku/masu/celery/tasks.py @@ -5,7 +5,6 @@ """Asynchronous tasks.""" import json import logging -import math import re import requests @@ -45,6 +44,7 @@ from masu.processor.tasks import autovacuum_tune_schema from masu.processor.tasks import DEFAULT from masu.prometheus_stats import QUEUES +from masu.util.aws.common import delete_s3_objects from masu.util.aws.common import get_s3_resource from masu.util.oci.common import OCI_REPORT_TYPES from masu.util.ocp.common import OCP_REPORT_TYPES @@ -119,9 +119,8 @@ def purge_s3_files(prefix, schema_name, provider_type, provider_uuid): LOG.info(message) LOG.info("Attempting to delete our archived data in S3 under %s", prefix) - remaining_objects = deleted_archived_with_prefix(settings.S3_BUCKET_NAME, prefix) - LOG.info(f"Deletion complete. Remaining objects: {remaining_objects}") - return remaining_objects + deleted_archived_with_prefix(settings.S3_BUCKET_NAME, prefix) + LOG.info("Deletion complete") @celery_app.task(name="masu.celery.tasks.purge_manifest_records", queue=DEFAULT) @@ -162,23 +161,12 @@ def deleted_archived_with_prefix(s3_bucket_name, prefix): s3_bucket_name (str): The s3 bucket name prefix (str): The prefix for deletion """ + context = {"service_task": "purge_old_data"} s3_resource = get_s3_resource(settings.S3_ACCESS_KEY, settings.S3_SECRET, settings.S3_REGION) s3_bucket = s3_resource.Bucket(s3_bucket_name) - object_keys = [{"Key": s3_object.key} for s3_object in s3_bucket.objects.filter(Prefix=prefix)] - LOG.info(f"Starting objects: {len(object_keys)}") - batch_size = 1000 # AWS S3 delete API limits to 1000 objects per request. - for batch_number in range(math.ceil(len(object_keys) / batch_size)): - batch_start = batch_size * batch_number - batch_end = batch_start + batch_size - object_keys_batch = object_keys[batch_start:batch_end] - s3_bucket.delete_objects(Delete={"Objects": object_keys_batch}) - - remaining_objects = list(s3_bucket.objects.filter(Prefix=prefix)) - if remaining_objects: - LOG.warning( - "Found %s objects after attempting to delete all objects with prefix %s", len(remaining_objects), prefix - ) - return remaining_objects + object_keys = [s3_object.key for s3_object in s3_bucket.objects.filter(Prefix=prefix)] + LOG.info(f"starting objects: {len(object_keys)}") + delete_s3_objects("purge masu endpoint", object_keys, context) @celery_app.task( # noqa: C901 diff --git a/koku/masu/external/downloader/aws/aws_report_downloader.py b/koku/masu/external/downloader/aws/aws_report_downloader.py index f5a261ad5e..7fd5ff2682 100644 --- a/koku/masu/external/downloader/aws/aws_report_downloader.py +++ b/koku/masu/external/downloader/aws/aws_report_downloader.py @@ -42,7 +42,15 @@ class AWSReportDownloaderNoFileError(Exception): def get_processing_date( - local_file, s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id + local_file, + s3_csv_path, + manifest_id, + provider_uuid, + start_date, + end_date, + context, + tracing_id, + ingress_reports=None, ): """ Fetch initial dataframe from CSV plus processing date and time_inteval. @@ -70,8 +78,9 @@ def get_processing_date( base_cols = copy.deepcopy(utils.RECOMMENDED_ALT_COLUMNS) | copy.deepcopy(utils.OPTIONAL_ALT_COLS) data_frame = pd.read_csv(local_file, usecols=[invoice_bill], nrows=1) use_cols = com_utils.fetch_optional_columns(local_file, base_cols, optional_cols, tracing_id, context) + # ingress custom filter flow should always reprocess everything if ( - data_frame[invoice_bill].any() and start_date.month != DateHelper().now_utc.month + data_frame[invoice_bill].any() and start_date.month != DateHelper().now_utc.month or ingress_reports ) or not check_provider_setup_complete(provider_uuid): ReportManifestDBAccessor().mark_s3_parquet_to_be_cleared(manifest_id) process_date = ReportManifestDBAccessor().set_manifest_daily_start_date(manifest_id, start_date) @@ -91,6 +100,7 @@ def create_daily_archives( # noqa C901 manifest_id, start_date, context, + ingress_reports=None, ): """ Create daily CSVs from incoming report and archive to S3. @@ -113,7 +123,7 @@ def create_daily_archives( # noqa C901 account, Provider.PROVIDER_AWS, provider_uuid, start_date, Config.CSV_DATA_TYPE ) use_cols, time_interval, process_date = get_processing_date( - local_file, s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id + local_file, s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id, ingress_reports ) try: LOG.info(log_json(tracing_id, msg="pandas read csv with following usecols", usecols=use_cols, context=context)) @@ -437,6 +447,7 @@ def download_file(self, key, stored_etag=None, manifest_id=None, start_date=None manifest_id, start_date, self.context, + self.ingress_reports, ) msg = f"Download complete for {key}" diff --git a/koku/masu/external/downloader/azure/azure_report_downloader.py b/koku/masu/external/downloader/azure/azure_report_downloader.py index c05a59379b..900060c153 100644 --- a/koku/masu/external/downloader/azure/azure_report_downloader.py +++ b/koku/masu/external/downloader/azure/azure_report_downloader.py @@ -41,7 +41,9 @@ class AzureReportDownloaderNoFileError(Exception): """Azure Report Downloader error for missing file.""" -def get_processing_date(s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id): +def get_processing_date( + s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id, ingress_reports=None +): """ Fetch initial dataframe from CSV plus start_delta and time_inteval. @@ -56,12 +58,14 @@ def get_processing_date(s3_csv_path, manifest_id, provider_uuid, start_date, end """ dh = DateHelper() # Azure does not have an invoice column so we have to do some guessing here + # Ingres reports should always clear and process everything if ( start_date.year < dh.today.year and dh.today.day > 1 or start_date.month < dh.today.month and dh.today.day > 1 or not check_provider_setup_complete(provider_uuid) + or ingress_reports ): process_date = start_date ReportManifestDBAccessor().mark_s3_parquet_to_be_cleared(manifest_id) @@ -82,6 +86,7 @@ def create_daily_archives( manifest_id, start_date, context, + ingress_reports=None, ): """ Create daily CSVs from incoming report and archive to S3. @@ -103,7 +108,7 @@ def create_daily_archives( account, Provider.PROVIDER_AZURE, provider_uuid, start_date, Config.CSV_DATA_TYPE ) process_date = get_processing_date( - s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id + s3_csv_path, manifest_id, provider_uuid, start_date, end_date, context, tracing_id, ingress_reports ) time_interval = pd.read_csv(local_file, nrows=0).columns.intersection( {"UsageDateTime", "Date", "date", "usagedatetime"} @@ -456,6 +461,7 @@ def download_file(self, key, stored_etag=None, manifest_id=None, start_date=None manifest_id, start_date, self.context, + self.ingress_reports, ) msg = f"Download complete for {key}" diff --git a/koku/masu/test/celery/test_tasks.py b/koku/masu/test/celery/test_tasks.py index 04ed2685bf..68d917573a 100644 --- a/koku/masu/test/celery/test_tasks.py +++ b/koku/masu/test/celery/test_tasks.py @@ -200,26 +200,22 @@ def test_delete_archived_data_bad_inputs_exception(self): self.assertIn("provider_uuid", str(e.exception)) @patch("masu.celery.tasks.get_s3_resource") - def test_deleted_archived_with_prefix_success(self, mock_resource): + @patch("masu.celery.tasks.delete_s3_objects") + def test_deleted_archived_with_prefix_success(self, mock_delete, mock_resource): """Test that delete_archived_data correctly interacts with AWS S3.""" expected_prefix = "data/csv/10001/00000000-0000-0000-0000-000000000001/" # Generate enough fake objects to expect calling the S3 delete api twice. mock_bucket = mock_resource.return_value.Bucket.return_value bucket_objects = [DummyS3Object(key=fake.file_path()) for _ in range(1234)] - expected_keys = [{"Key": bucket_object.key} for bucket_object in bucket_objects] - - # Leave one object mysteriously not deleted to cover the LOG.warning use case. - mock_bucket.objects.filter.side_effect = [bucket_objects, bucket_objects[:1]] - - with self.assertLogs("masu.celery.tasks", "WARNING") as captured_logs: + expected_len = len(bucket_objects) + mock_bucket.objects.filter.side_effect = [bucket_objects] + with self.assertLogs("masu") as captured_logs: tasks.deleted_archived_with_prefix(mock_bucket, expected_prefix) mock_resource.assert_called() - mock_bucket.delete_objects.assert_has_calls( - [call(Delete={"Objects": expected_keys[:1000]}), call(Delete={"Objects": expected_keys[1000:]})] - ) - mock_bucket.objects.filter.assert_has_calls([call(Prefix=expected_prefix), call(Prefix=expected_prefix)]) - self.assertIn("Found 1 objects after attempting", captured_logs.output[-1]) + mock_bucket.objects.filter.assert_called_with(Prefix=expected_prefix) + mock_delete.assert_called() + self.assertIn(f"starting objects: {expected_len}", captured_logs.output[-1]) @patch("masu.celery.tasks.deleted_archived_with_prefix") def test_delete_archived_data_success(self, mock_delete): diff --git a/koku/masu/test/util/aws/test_common.py b/koku/masu/test/util/aws/test_common.py index d1a12a4735..393b844ad9 100644 --- a/koku/masu/test/util/aws/test_common.py +++ b/koku/masu/test/util/aws/test_common.py @@ -4,8 +4,10 @@ # SPDX-License-Identifier: Apache-2.0 # import random +from collections import namedtuple from datetime import datetime from unittest import TestCase +from unittest.mock import call from unittest.mock import Mock from unittest.mock import mock_open from unittest.mock import patch @@ -36,6 +38,7 @@ AWS_REGIONS = list(filter(lambda reg: not reg.startswith("cn-"), AWS_REGIONS)) REGION = random.choice(AWS_REGIONS) +KEY = Faker() NAME = Faker().word() BUCKET = Faker().word() PREFIX = Faker().word() @@ -62,6 +65,8 @@ } MOCK_BOTO_CLIENT.assume_role.return_value = response +DummyS3Object = namedtuple("DummyS3Object", "key") + class TestAWSUtils(MasuTestCase): """Tests for AWS utilities.""" @@ -467,6 +472,27 @@ def test_filter_s3_objects_less_than_with_error(self): ) self.assertListEqual(filtered, []) + @patch("masu.util.aws.common.get_s3_resource") + def test_batch_delete_s3_objects(self, mock_resource): + """Test that delete_archived_data correctly interacts with AWS S3.""" + context = {"test_delete": "testing"} + # Generate enough fake objects to expect calling the S3 delete api twice. + mock_bucket = mock_resource.return_value.Bucket.return_value + bucket_objects = [DummyS3Object(key=KEY.file_path()) for _ in range(1234)] + keys = [bucket_object.key for bucket_object in bucket_objects] + expected_keys = [{"Key": bucket_object.key} for bucket_object in bucket_objects] + + # Leave one object mysteriously not deleted to cover the LOG.warning use case. + mock_bucket.objects.filter.side_effect = [bucket_objects, bucket_objects[:1]] + + with self.assertLogs("masu.util.aws.common") as captured_logs: + utils.delete_s3_objects("request_id", keys, context) + mock_resource.assert_called() + mock_bucket.delete_objects.assert_has_calls( + [call(Delete={"Objects": expected_keys[:1000]}), call(Delete={"Objects": expected_keys[1000:]})] + ) + self.assertIn("removed files from s3 bucket", captured_logs.output[-1]) + def test_remove_s3_objects_not_matching_metadata(self): """Test remove_s3_objects_not_matching_metadata.""" metadata_key = "manifestid" @@ -496,7 +522,7 @@ def test_remove_s3_objects_not_matching_metadata(self): removed = utils.delete_s3_objects_not_matching_metadata( "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value ) - self.assertListEqual(removed, [expected_key]) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: client_error_object = Mock() @@ -514,7 +540,7 @@ def test_remove_s3_objects_not_matching_metadata(self): "masu.util.aws.common.get_s3_resource" ) as mock_s3: mock_s3.return_value.Object.return_value.delete.side_effect = ClientError({}, "Error") - mock_get_objects.return_value = [expected_key] + mock_get_objects.return_value = [] removed = utils.delete_s3_objects_not_matching_metadata( "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value ) @@ -604,7 +630,7 @@ def test_remove_s3_objects_matching_metadata(self): removed = utils.delete_s3_objects_matching_metadata( "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value ) - self.assertListEqual(removed, [expected_key]) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: client_error_object = Mock() @@ -622,7 +648,7 @@ def test_remove_s3_objects_matching_metadata(self): "masu.util.aws.common.get_s3_resource" ) as mock_s3: mock_s3.return_value.Object.return_value.delete.side_effect = ClientError({}, "Error") - mock_get_objects.return_value = [expected_key] + mock_get_objects.return_value = [] removed = utils.delete_s3_objects_matching_metadata( "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value ) diff --git a/koku/masu/util/aws/common.py b/koku/masu/util/aws/common.py index 339564e804..e562db13bf 100644 --- a/koku/masu/util/aws/common.py +++ b/koku/masu/util/aws/common.py @@ -6,6 +6,7 @@ import contextlib import datetime import logging +import math import re import time import typing as t @@ -790,23 +791,27 @@ def delete_s3_objects_not_matching_metadata( def delete_s3_objects(request_id, keys_to_delete, context) -> list[str]: + keys_to_delete = [{"Key": key} for key in keys_to_delete] + LOG.info(log_json(request_id, msg="attempting to batch delete s3 files", context=context)) s3_resource = get_s3_resource(settings.S3_ACCESS_KEY, settings.S3_SECRET, settings.S3_REGION) + s3_bucket = s3_resource.Bucket(settings.S3_BUCKET_NAME) try: - removed = [] - for key in keys_to_delete: - s3_resource.Object(settings.S3_BUCKET_NAME, key).delete() - removed.append(key) - if removed: - LOG.info( - log_json( - request_id, - msg="removed files from s3 bucket", - context=context, - bucket=settings.S3_BUCKET_NAME, - file_list=removed, - ) + batch_size = 1000 # AWS S3 delete API limits to 1000 objects per request. + for batch_number in range(math.ceil(len(keys_to_delete) / batch_size)): + batch_start = batch_size * batch_number + batch_end = batch_start + batch_size + object_keys_batch = keys_to_delete[batch_start:batch_end] + s3_bucket.delete_objects(Delete={"Objects": object_keys_batch}) + LOG.info( + log_json( + request_id, + msg="removed files from s3 bucket", + context=context, + bucket=settings.S3_BUCKET_NAME, + file_list=keys_to_delete, ) - return removed + ) + return keys_to_delete except (EndpointConnectionError, ClientError) as err: LOG.warning( log_json( From 99f116b8c70b060e1b85af6336551bc1aa05a411 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Jun 2024 01:06:44 +0300 Subject: [PATCH 14/26] Bump urllib3 from 1.26.18 to 1.26.19 in the pip group across 1 directory (#5172) Bumps the pip group with 1 update in the / directory: [urllib3](https://github.com/urllib3/urllib3). Updates `urllib3` from 1.26.18 to 1.26.19 - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/1.26.19/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.18...1.26.19) --- updated-dependencies: - dependency-name: urllib3 dependency-type: indirect dependency-group: pip ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Pipfile.lock | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index 66f8bc0895..6e0cfc5155 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1698,11 +1698,12 @@ }, "urllib3": { "hashes": [ - "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", - "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" + "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3", + "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429" ], + "index": "pypi", "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.18" + "version": "==1.26.19" }, "vine": { "hashes": [ From 011ec69537906e4d20d79db471948ad3efaf484f Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Wed, 26 Jun 2024 10:02:54 -0400 Subject: [PATCH 15/26] Add flower as a dev dependency (#5189) * Add docs --- Pipfile | 1 + Pipfile.lock | 99 +++++++++++++++++++++++++++++++++++++++++++++++- docs/devtools.md | 18 +++++++++ 3 files changed, 117 insertions(+), 1 deletion(-) diff --git a/Pipfile b/Pipfile index 9d35bd29b6..eb948c3172 100644 --- a/Pipfile +++ b/Pipfile @@ -64,6 +64,7 @@ sqlparse = "*" [dev-packages] argh = ">=0.26.2" astroid = ">=2.3" +flower = "*" coverage = ">=5.0" crc-bonfire = "*" debugpy = ">=1.3.0" diff --git a/Pipfile.lock b/Pipfile.lock index 6e0cfc5155..2c7772b475 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "420c125a8ecfaebc012aab840f5461a14e57c51ae3e5213ea2341b46ace1d86b" + "sha256": "b67b765f3e4de6c57aed4affb209e2fef66468fcded4fa762d5147bb829dc685" }, "pipfile-spec": 6, "requires": { @@ -1738,6 +1738,14 @@ } }, "develop": { + "amqp": { + "hashes": [ + "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637", + "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd" + ], + "markers": "python_version >= '3.6'", + "version": "==5.2.0" + }, "anytree": { "hashes": [ "sha256:244def434ccf31b668ed282954e5d315b4e066c4940b94aff4a7962d85947830", @@ -1794,6 +1802,13 @@ "index": "pypi", "version": "==12.20.0" }, + "billiard": { + "hashes": [ + "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", + "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b" + ], + "version": "==3.6.4.0" + }, "boto3": { "hashes": [ "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280", @@ -1825,6 +1840,15 @@ "index": "pypi", "version": "==5.3.3" }, + "celery": { + "hashes": [ + "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14", + "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==5.2.7" + }, "certifi": { "hashes": [ "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", @@ -2016,6 +2040,29 @@ "markers": "python_version >= '3.7'", "version": "==8.1.7" }, + "click-didyoumean": { + "hashes": [ + "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", + "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c" + ], + "markers": "python_full_version >= '3.6.2'", + "version": "==0.3.1" + }, + "click-plugins": { + "hashes": [ + "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", + "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" + ], + "version": "==1.1.1" + }, + "click-repl": { + "hashes": [ + "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", + "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812" + ], + "markers": "python_version >= '3.6'", + "version": "==0.3.0" + }, "contourpy": { "hashes": [ "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2", @@ -2252,6 +2299,15 @@ "index": "pypi", "version": "==7.0.0" }, + "flower": { + "hashes": [ + "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0", + "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==2.0.1" + }, "fonttools": { "hashes": [ "sha256:00d9abf4b400f98fb895566eb298f60432b4b29048e3dc02807427b09a06604e", @@ -2502,6 +2558,14 @@ ], "version": "==1.62.2" }, + "humanize": { + "hashes": [ + "sha256:582a265c931c683a7e9b8ed9559089dea7edcf6cc95be39a3cbc2c5d5ac2bcfa", + "sha256:ce284a76d5b1377fd8836733b983bfb0b76f1aa1c090de2566fcf008d7f6ab16" + ], + "markers": "python_version >= '3.8'", + "version": "==4.9.0" + }, "identify": { "hashes": [ "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa", @@ -2674,6 +2738,15 @@ "index": "pypi", "version": "==4.5.2" }, + "kombu": { + "hashes": [ + "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610", + "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4" + ], + "index": "pypi", + "markers": "python_version >= '3.7'", + "version": "==5.2.4" + }, "kubernetes": { "hashes": [ "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e", @@ -3084,6 +3157,23 @@ "index": "pypi", "version": "==3.7.1" }, + "prometheus-client": { + "hashes": [ + "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89", + "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==0.20.0" + }, + "prompt-toolkit": { + "hashes": [ + "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10", + "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.0.47" + }, "proto-plus": { "hashes": [ "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2", @@ -3468,6 +3558,13 @@ "index": "pypi", "version": "==4.0.1" }, + "wcwidth": { + "hashes": [ + "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", + "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" + ], + "version": "==0.2.13" + }, "websocket-client": { "hashes": [ "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", diff --git a/docs/devtools.md b/docs/devtools.md index a42c35bad0..8e0b59bc94 100644 --- a/docs/devtools.md +++ b/docs/devtools.md @@ -313,3 +313,21 @@ Examples commands and results: 'schema1': {'PG': {'reporting_awscostentrylineitem_daily_summary': 'All data complete for table: reporting_awscostentrylineitem_daily_summary'}, 'TRINO': 'PG data complete so skipping trino query'}}, 'schema3': {'PG': {'reporting_awscostentrylineitem_daily_summary': 'All data complete for table: reporting_awscostentrylineitem_daily_summary'}, 'TRINO': 'PG data complete so skipping trino query'}} ``` + +## Monitoring Celery ## + +[Flower] is a tool for monitoring Celery clusters. It provides detailed information about the status of workers and tasks. + +Flower is installed with the dev dependencies but it is not run by default. To start Flower, run + +``` +celery -A koku flower +``` + +Open http://localhost:5555 to see Celery details. + +See the [Flower documentation][flower] for detailed usage information. + + + +[flower]: https://flower.readthedocs.io/en/latest/index.html From 25e3062ce27f09cee396b97df5f53114a6552142 Mon Sep 17 00:00:00 2001 From: Cody Myers Date: Thu, 27 Jun 2024 15:37:31 -0400 Subject: [PATCH 16/26] [COST-4844] Serializer update for ordering by storageclass (#5174) --- koku/api/report/ocp/serializers.py | 18 +++++-- .../report/test/ocp/test_ocp_query_handler.py | 47 ++++++++++--------- 2 files changed, 38 insertions(+), 27 deletions(-) diff --git a/koku/api/report/ocp/serializers.py b/koku/api/report/ocp/serializers.py index c13a598d51..e9cfc386f9 100644 --- a/koku/api/report/ocp/serializers.py +++ b/koku/api/report/ocp/serializers.py @@ -17,11 +17,19 @@ DISTRIBUTED_COST_INTERNAL = {"distributed_cost": "cost_total_distributed"} -def order_by_field_requires_group_by(data, order_name, group_by_key): +def order_by_field_requires_group_by(data, order_name, group_by_keys): error = {} - if order_name in data.get("order_by", {}) and group_by_key not in data.get("group_by", {}): - error["order_by"] = gettext(f"Cannot order by field {order_name} without grouping by {group_by_key}.") - raise serializers.ValidationError(error) + if order_name in data.get("order_by", {}): + # Ensure group_by_keys is a list of keys to check + if not isinstance(group_by_keys, list): + group_by_keys = [group_by_keys] + + # Check if none of the required group_by keys are present + if not any(key in data.get("group_by", {}) for key in group_by_keys): + error["order_by"] = gettext( + f"Cannot order by field {order_name} without grouping by one of {', '.join(group_by_keys)}." + ) + raise serializers.ValidationError(error) class OCPGroupBySerializer(GroupSerializer): @@ -165,7 +173,7 @@ def validate(self, data): error["order_by"] = gettext("Cannot order by delta without a delta param") raise serializers.ValidationError(error) order_by_field_requires_group_by(data, DISTRIBUTED_COST_INTERNAL["distributed_cost"], "project") - order_by_field_requires_group_by(data, "storage_class", "persistentvolumeclaim") + order_by_field_requires_group_by(data, "storage_class", ["persistentvolumeclaim", "storageclass"]) order_by_field_requires_group_by(data, "persistentvolumeclaim", "persistentvolumeclaim") if data.get("delta") == DISTRIBUTED_COST_INTERNAL["distributed_cost"] and "project" not in data.get( "group_by", {} diff --git a/koku/api/report/test/ocp/test_ocp_query_handler.py b/koku/api/report/test/ocp/test_ocp_query_handler.py index 28f1d93d74..8e500257f5 100644 --- a/koku/api/report/test/ocp/test_ocp_query_handler.py +++ b/koku/api/report/test/ocp/test_ocp_query_handler.py @@ -131,28 +131,31 @@ def test_cpu_memory_order_bys(self): def test_storage_class_order_bys(self): """Test that we can order by the pvc values.""" - url = "?group_by[project]=*&group_by[persistentvolumeclaim]=*&order_by[storage_class]=desc" - query_params = self.mocked_query_params(url, OCPVolumeView) - handler = OCPReportQueryHandler(query_params) - query_data = handler.execute_query() - self.assertIsNotNone(query_data.get("data")) - self.assertIsNotNone(query_data.get("total")) - self.assertIsNotNone(query_data["total"].get("storage_class")) - first_date = query_data["data"][0] - tested = False - for cluster in first_date.get("projects", []): - pvc_list = cluster.get("persistentvolumeclaims") - storage_class_order_result = [] - expected = None - for pvc in pvc_list: - for pvc_value in pvc.get("values", []): - storage_class_order_result.append(pvc_value.get("storage_class")) - if not expected: - expected = deepcopy(storage_class_order_result) - expected.sort(reverse=True) - self.assertEqual(storage_class_order_result, expected) - tested = True - self.assertTrue(tested) + group_bys = ["persistentvolumeclaim", "storageclass"] + for group_by in group_bys: + with self.subTest(group_by=group_by): + url = f"?group_by[project]=*&group_by[{group_by}]=*&order_by[storage_class]=desc" + query_params = self.mocked_query_params(url, OCPVolumeView) + handler = OCPReportQueryHandler(query_params) + query_data = handler.execute_query() + self.assertIsNotNone(query_data.get("data")) + self.assertIsNotNone(query_data.get("total")) + self.assertIsNotNone(query_data["total"].get("storage_class")) + first_date = query_data["data"][0] + tested = False + for project in first_date.get("projects", []): + group_list = project.get(f"{group_by}s") + storage_class_order_result = [] + expected = None + for element in group_list: + for element_value in element.get("values", []): + storage_class_order_result.append(element_value.get("storage_class")) + if not expected: + expected = deepcopy(storage_class_order_result) + expected.sort(reverse=True) + self.assertEqual(storage_class_order_result, expected) + tested = True + self.assertTrue(tested) def test_persistentvolumeclaim_order_by(self): """Test that we can order by the pvc values.""" From 1810fa18693397453674d38b5c4cc6c6e581ca89 Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Thu, 27 Jun 2024 17:11:46 -0400 Subject: [PATCH 17/26] Switch to using podman in build_deploy (#5193) The VM used in CI is now RHEL 8 --- build_deploy.sh | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/build_deploy.sh b/build_deploy.sh index 1cc11cce96..0ad5bc8989 100755 --- a/build_deploy.sh +++ b/build_deploy.sh @@ -30,13 +30,9 @@ function job_cleanup() { trap job_cleanup EXIT ERR SIGINT SIGTERM -DOCKER_CONF="$TMP_JOB_DIR/.docker" -mkdir -p "$DOCKER_CONF" -docker --config="$DOCKER_CONF" login -u="$QUAY_USER" -p="$QUAY_TOKEN" quay.io -docker --config="$DOCKER_CONF" build --build-arg GIT_COMMIT="$GIT_COMMIT" -t "${IMAGE}:${IMAGE_TAG}" . -docker --config="$DOCKER_CONF" push "${IMAGE}:${IMAGE_TAG}" +podman login -u="$QUAY_USER" -p="$QUAY_TOKEN" quay.io +podman build --build-arg GIT_COMMIT="$GIT_COMMIT" -t "${IMAGE}:${IMAGE_TAG}" . +podman push "${IMAGE}:${IMAGE_TAG}" -docker --config="$DOCKER_CONF" tag "${IMAGE}:${IMAGE_TAG}" "${IMAGE}:latest" -docker --config="$DOCKER_CONF" push "${IMAGE}:latest" - -docker --config="$DOCKER_CONF" logout +podman tag "${IMAGE}:${IMAGE_TAG}" "${IMAGE}:latest" +podman push "${IMAGE}:latest" From 4baa51101f0cf7e33a5c447c8f38df646a9ebadf Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Fri, 28 Jun 2024 14:01:02 +0100 Subject: [PATCH 18/26] skip polling providers still processing (#5181) * skip polling providers that are still processing --- .env.example | 2 + deploy/clowdapp.yaml | 94 +++++++++++++++++++ deploy/kustomize/base/base.yaml | 6 ++ deploy/kustomize/patches/masu.yaml | 4 + deploy/kustomize/patches/scheduler.yaml | 4 + deploy/kustomize/patches/worker-celery.yaml | 4 + .../patches/worker-cost-model-penalty.yaml | 4 + .../patches/worker-cost-model-xl.yaml | 4 + .../kustomize/patches/worker-cost-model.yaml | 4 + .../patches/worker-download-penalty.yaml | 4 + .../kustomize/patches/worker-download-xl.yaml | 4 + deploy/kustomize/patches/worker-download.yaml | 4 + deploy/kustomize/patches/worker-hcs.yaml | 4 + .../kustomize/patches/worker-ocp-penalty.yaml | 4 + deploy/kustomize/patches/worker-ocp-xl.yaml | 4 + deploy/kustomize/patches/worker-ocp.yaml | 4 + .../patches/worker-priority-penalty.yaml | 4 + .../kustomize/patches/worker-priority-xl.yaml | 4 + deploy/kustomize/patches/worker-priority.yaml | 4 + .../patches/worker-refresh-penalty.yaml | 4 + .../kustomize/patches/worker-refresh-xl.yaml | 4 + deploy/kustomize/patches/worker-refresh.yaml | 4 + .../patches/worker-summary-penalty.yaml | 4 + .../kustomize/patches/worker-summary-xl.yaml | 4 + deploy/kustomize/patches/worker-summary.yaml | 4 + docker-compose.yml | 2 + koku/koku/settings.py | 3 + koku/masu/processor/orchestrator.py | 50 +++++++++- koku/masu/test/processor/test_orchestrator.py | 94 ++++++++++++++++++- 29 files changed, 332 insertions(+), 7 deletions(-) diff --git a/.env.example b/.env.example index a5c2d2bca9..7b0f1d1ab5 100644 --- a/.env.example +++ b/.env.example @@ -23,6 +23,8 @@ PROMETHEUS_MULTIPROC_DIR='/tmp' CURRENCY_URL=https://open.er-api.com/v6/latest/USD UNLEASH_TOKEN='*:*.dbffffc83b1f92eeaf133a7eb878d4c58231acc159b5e1478ce53cfc' POLLING_TIMER=60 # Set how often you can trigger downloads per provider +PROCESSING_WAIT_TIMER=3 # Set how many days before a long processing provider can be polled again +LARGE_PROCESSING_WAIT_TIMER=7 # Set how many days before a long LARGE customer processing provider can be polled again MAX_GROUP_BY_OVERRIDE=3 # Set maximum aloud group bys TAG_ENABLED_LIMIT=200 # Set the max amount of tags per account DELAYED_TASK_TIME=30 # Set the seconds before a delayed summary task should expire diff --git a/deploy/clowdapp.yaml b/deploy/clowdapp.yaml index 56c7730cf5..716c968a52 100644 --- a/deploy/clowdapp.yaml +++ b/deploy/clowdapp.yaml @@ -492,6 +492,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: KAFKA_CONNECT @@ -671,6 +675,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -1150,6 +1158,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -1326,6 +1338,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -1508,6 +1524,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -1690,6 +1710,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -1874,6 +1898,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2060,6 +2088,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2246,6 +2278,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2430,6 +2466,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2614,6 +2654,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2798,6 +2842,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST @@ -2984,6 +3032,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -3174,6 +3226,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -3364,6 +3420,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -3552,6 +3612,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -3734,6 +3798,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -3916,6 +3984,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -4098,6 +4170,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -4284,6 +4360,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -4470,6 +4550,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES @@ -4658,6 +4742,10 @@ objects: value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT @@ -5554,6 +5642,12 @@ parameters: - displayName: Provider count for batch polling name: POLLING_BATCH_SIZE value: "100" +- displayName: Timer to prevent triggering tasks while still processing in days + name: PROCESSING_WAIT_TIMER + value: "3" +- displayName: Timer to prevent triggering tasks while still processing in days + name: LARGE_PROCESSING_WAIT_TIMER + value: "7" - displayName: Enable Tags Limit name: TAG_ENABLED_LIMIT value: "200" diff --git a/deploy/kustomize/base/base.yaml b/deploy/kustomize/base/base.yaml index 88d518c0e1..874dcb9de5 100644 --- a/deploy/kustomize/base/base.yaml +++ b/deploy/kustomize/base/base.yaml @@ -515,6 +515,12 @@ parameters: - displayName: Provider count for batch polling name: POLLING_BATCH_SIZE value: "100" +- displayName: Timer to prevent triggering tasks while still processing in days + name: PROCESSING_WAIT_TIMER + value: "3" +- displayName: Timer to prevent triggering tasks while still processing in days + name: LARGE_PROCESSING_WAIT_TIMER + value: "7" - displayName: Enable Tags Limit name: TAG_ENABLED_LIMIT value: "200" diff --git a/deploy/kustomize/patches/masu.yaml b/deploy/kustomize/patches/masu.yaml index 324e554c7c..637ae00a81 100644 --- a/deploy/kustomize/patches/masu.yaml +++ b/deploy/kustomize/patches/masu.yaml @@ -106,6 +106,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: KAFKA_CONNECT diff --git a/deploy/kustomize/patches/scheduler.yaml b/deploy/kustomize/patches/scheduler.yaml index cc5be798e3..15a6288154 100644 --- a/deploy/kustomize/patches/scheduler.yaml +++ b/deploy/kustomize/patches/scheduler.yaml @@ -101,6 +101,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-celery.yaml b/deploy/kustomize/patches/worker-celery.yaml index 3088e39b2f..45450a98ae 100644 --- a/deploy/kustomize/patches/worker-celery.yaml +++ b/deploy/kustomize/patches/worker-celery.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-cost-model-penalty.yaml b/deploy/kustomize/patches/worker-cost-model-penalty.yaml index 092a09e2fc..1ae06fe991 100644 --- a/deploy/kustomize/patches/worker-cost-model-penalty.yaml +++ b/deploy/kustomize/patches/worker-cost-model-penalty.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-cost-model-xl.yaml b/deploy/kustomize/patches/worker-cost-model-xl.yaml index b819c7cf34..d684d7550e 100644 --- a/deploy/kustomize/patches/worker-cost-model-xl.yaml +++ b/deploy/kustomize/patches/worker-cost-model-xl.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-cost-model.yaml b/deploy/kustomize/patches/worker-cost-model.yaml index c183fb38cd..2376f0345b 100644 --- a/deploy/kustomize/patches/worker-cost-model.yaml +++ b/deploy/kustomize/patches/worker-cost-model.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-download-penalty.yaml b/deploy/kustomize/patches/worker-download-penalty.yaml index 198d7d3678..106ceb3ddc 100644 --- a/deploy/kustomize/patches/worker-download-penalty.yaml +++ b/deploy/kustomize/patches/worker-download-penalty.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-download-xl.yaml b/deploy/kustomize/patches/worker-download-xl.yaml index 1a75c990ab..dafef6e3e1 100644 --- a/deploy/kustomize/patches/worker-download-xl.yaml +++ b/deploy/kustomize/patches/worker-download-xl.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-download.yaml b/deploy/kustomize/patches/worker-download.yaml index 5aaf37b5aa..113ae31235 100644 --- a/deploy/kustomize/patches/worker-download.yaml +++ b/deploy/kustomize/patches/worker-download.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-hcs.yaml b/deploy/kustomize/patches/worker-hcs.yaml index 0704ba63d2..d0897b5d16 100644 --- a/deploy/kustomize/patches/worker-hcs.yaml +++ b/deploy/kustomize/patches/worker-hcs.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-ocp-penalty.yaml b/deploy/kustomize/patches/worker-ocp-penalty.yaml index bfdd2b4c12..1e71be955e 100644 --- a/deploy/kustomize/patches/worker-ocp-penalty.yaml +++ b/deploy/kustomize/patches/worker-ocp-penalty.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-ocp-xl.yaml b/deploy/kustomize/patches/worker-ocp-xl.yaml index be4e779af2..1917bd82c4 100644 --- a/deploy/kustomize/patches/worker-ocp-xl.yaml +++ b/deploy/kustomize/patches/worker-ocp-xl.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-ocp.yaml b/deploy/kustomize/patches/worker-ocp.yaml index b5eec46b87..db4149251a 100644 --- a/deploy/kustomize/patches/worker-ocp.yaml +++ b/deploy/kustomize/patches/worker-ocp.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TAG_ENABLED_LIMIT value: ${TAG_ENABLED_LIMIT} - name: TRINO_HOST diff --git a/deploy/kustomize/patches/worker-priority-penalty.yaml b/deploy/kustomize/patches/worker-priority-penalty.yaml index 1e91e79e4a..376ebab80b 100644 --- a/deploy/kustomize/patches/worker-priority-penalty.yaml +++ b/deploy/kustomize/patches/worker-priority-penalty.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/deploy/kustomize/patches/worker-priority-xl.yaml b/deploy/kustomize/patches/worker-priority-xl.yaml index 095e60a09d..79be85cde0 100644 --- a/deploy/kustomize/patches/worker-priority-xl.yaml +++ b/deploy/kustomize/patches/worker-priority-xl.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/deploy/kustomize/patches/worker-priority.yaml b/deploy/kustomize/patches/worker-priority.yaml index 60b5d476e4..65a682c1af 100644 --- a/deploy/kustomize/patches/worker-priority.yaml +++ b/deploy/kustomize/patches/worker-priority.yaml @@ -93,6 +93,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/deploy/kustomize/patches/worker-refresh-penalty.yaml b/deploy/kustomize/patches/worker-refresh-penalty.yaml index 94284a336b..dc5084cc9a 100644 --- a/deploy/kustomize/patches/worker-refresh-penalty.yaml +++ b/deploy/kustomize/patches/worker-refresh-penalty.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-refresh-xl.yaml b/deploy/kustomize/patches/worker-refresh-xl.yaml index dd42f06940..fe73826a92 100644 --- a/deploy/kustomize/patches/worker-refresh-xl.yaml +++ b/deploy/kustomize/patches/worker-refresh-xl.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-refresh.yaml b/deploy/kustomize/patches/worker-refresh.yaml index bcc458b889..823736e95e 100644 --- a/deploy/kustomize/patches/worker-refresh.yaml +++ b/deploy/kustomize/patches/worker-refresh.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: TRINO_HOST value: ${TRINO_HOST} - name: TRINO_PORT diff --git a/deploy/kustomize/patches/worker-summary-penalty.yaml b/deploy/kustomize/patches/worker-summary-penalty.yaml index f301847c2c..a9f86ca64b 100644 --- a/deploy/kustomize/patches/worker-summary-penalty.yaml +++ b/deploy/kustomize/patches/worker-summary-penalty.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/deploy/kustomize/patches/worker-summary-xl.yaml b/deploy/kustomize/patches/worker-summary-xl.yaml index a05f113cdf..85a1836f80 100644 --- a/deploy/kustomize/patches/worker-summary-xl.yaml +++ b/deploy/kustomize/patches/worker-summary-xl.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/deploy/kustomize/patches/worker-summary.yaml b/deploy/kustomize/patches/worker-summary.yaml index 3922dee54d..2ad497df3a 100644 --- a/deploy/kustomize/patches/worker-summary.yaml +++ b/deploy/kustomize/patches/worker-summary.yaml @@ -91,6 +91,10 @@ value: ${POLLING_TIMER} - name: POLLING_BATCH_SIZE value: ${POLLING_BATCH_SIZE} + - name: PROCESSING_WAIT_TIMER + value: ${PROCESSING_WAIT_TIMER} + - name: LARGE_PROCESSING_WAIT_TIMER + value: ${LARGE_PROCESSING_WAIT_TIMER} - name: DELAYED_TASK_TIME value: ${DELAYED_TASK_TIME} - name: DELAYED_TASK_POLLING_MINUTES diff --git a/docker-compose.yml b/docker-compose.yml index c62247e40d..6be74caf71 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -195,6 +195,8 @@ services: - INITIAL_INGEST_OVERRIDE=${INITIAL_INGEST_OVERRIDE-False} - INITIAL_INGEST_NUM_MONTHS=${INITIAL_INGEST_NUM_MONTHS-2} - POLLING_TIMER=${POLLING_TIMER-60} + - PROCESSING_WAIT_TIMER=${PROCESSING_WAIT_TIMER-3} + - LARGE_PROCESSING_WAIT_TIMER=${LARGE_PROCESSING_WAIT_TIMER-7} - POLLING_BATCH_SIZE=${POLLING_BATCH_SIZE-100} - DELAYED_TASK_TIME=${DELAYED_TASK_TIME-20} - DELAYED_TASK_POLLING_MINUTES=${DELAYED_TASK_POLLING_MINUTES-5} diff --git a/koku/koku/settings.py b/koku/koku/settings.py index 8fd63235cd..94521acdeb 100644 --- a/koku/koku/settings.py +++ b/koku/koku/settings.py @@ -540,6 +540,9 @@ AUTO_DATA_INGEST = ENVIRONMENT.bool("AUTO_DATA_INGEST", default=True) POLLING_BATCH_SIZE = ENVIRONMENT.int("POLLING_BATCH_SIZE", default=100) POLLING_TIMER = ENVIRONMENT.int("POLLING_TIMER", default=86400) +# PROCESSING_WAIT_TIMER, used to prevent queuing new tasks until previous ones are complete +PROCESSING_WAIT_TIMER = ENVIRONMENT.int("PROCESSING_WAIT_TIMER", default=3) +LARGE_PROCESSING_WAIT_TIMER = ENVIRONMENT.int("LARGE_PROCESSING_WAIT_TIMER", default=7) QE_SCHEMA = ENVIRONMENT.get_value("QE_SCHEMA", default=None) # Flag for maximum retries for source delete before proceeding diff --git a/koku/masu/processor/orchestrator.py b/koku/masu/processor/orchestrator.py index bb0aa7fce7..b537fcc60a 100644 --- a/koku/masu/processor/orchestrator.py +++ b/koku/masu/processor/orchestrator.py @@ -6,6 +6,7 @@ import copy import logging from datetime import datetime +from datetime import timedelta from celery import chord from celery import group @@ -26,6 +27,7 @@ from masu.external.report_downloader import ReportDownloader from masu.external.report_downloader import ReportDownloaderError from masu.processor import is_cloud_source_processing_disabled +from masu.processor import is_customer_large from masu.processor import is_source_disabled from masu.processor.tasks import get_report_files from masu.processor.tasks import record_all_manifest_files @@ -69,6 +71,34 @@ def get_billing_months(number_of_months): return months +def check_currently_processing(schema, provider): + result = False + if provider.polling_timestamp: + # Set processing delta wait time + process_wait_delta = datetime.now(tz=settings.UTC) - timedelta(days=settings.PROCESSING_WAIT_TIMER) + if is_customer_large(schema): + process_wait_delta = datetime.now(tz=settings.UTC) - timedelta(days=settings.LARGE_PROCESSING_WAIT_TIMER) + # Check processing, if polling timestamp more recent than updated timestamp skip polling + if provider.data_updated_timestamp: + if provider.polling_timestamp > provider.data_updated_timestamp: + result = True + # Check failed processing, if updated timestamp not updated in x days we should polling again + if process_wait_delta > provider.data_updated_timestamp: + result = False + # Fallback to creation timestamp if its a new provider + else: + # Dont trigger provider that has no updated timestamp + result = True + # Enable initial ingest for new providers + if datetime.now(tz=settings.UTC) - timedelta(days=1) < provider.created_timestamp: + result = False + # Reprocess new providers that may have fialed to complete their first download + if process_wait_delta > provider.created_timestamp: + result = False + + return result + + class Orchestrator: """ Orchestrator for report processing. @@ -108,9 +138,25 @@ def get_polling_batch(self): batch = [] for provider in providers: - provider.polling_timestamp = self.dh.now - provider.save(update_fields=["polling_timestamp"]) schema_name = provider.account.get("schema_name") + # Check processing delta wait and skip polling if provider not completed processing + if check_currently_processing(schema_name, provider): + # We still need to update the timestamp between runs + LOG.info( + log_json( + "get_polling_batch", + msg="processing currently in progress for provider", + schema=schema_name, + provider=provider.uuid, + ) + ) + provider.polling_timestamp = self.dh.now_utc + provider.save(update_fields=["polling_timestamp"]) + continue + # This needs to happen after the first check since we use the original polling_timestamp + provider.polling_timestamp = self.dh.now_utc + provider.save(update_fields=["polling_timestamp"]) + # If a source is disabled/re-enabled it may not be collected till after the process_wait_delta expires if is_cloud_source_processing_disabled(schema_name): LOG.info(log_json("get_polling_batch", msg="processing disabled for schema", schema=schema_name)) continue diff --git a/koku/masu/test/processor/test_orchestrator.py b/koku/masu/test/processor/test_orchestrator.py index 19b51a77e0..f2ddeb1fea 100644 --- a/koku/masu/test/processor/test_orchestrator.py +++ b/koku/masu/test/processor/test_orchestrator.py @@ -7,6 +7,8 @@ import random from datetime import date from datetime import datetime +from datetime import timedelta +from types import SimpleNamespace from unittest.mock import patch from uuid import uuid4 @@ -16,6 +18,7 @@ from masu.config import Config from masu.external.report_downloader import ReportDownloaderError from masu.processor.expired_data_remover import ExpiredDataRemover +from masu.processor.orchestrator import check_currently_processing from masu.processor.orchestrator import get_billing_month_start from masu.processor.orchestrator import get_billing_months from masu.processor.orchestrator import Orchestrator @@ -92,7 +95,11 @@ def test_prepare_no_accounts(self, mock_downloader, mock_inspect, mock_account_a "masu.processor.orchestrator.is_cloud_source_processing_disabled", return_value=True, ) - def test_unleash_is_cloud_source_processing_disabled(self, mock_processing, mock_inspect): + @patch( + "masu.processor.orchestrator.check_currently_processing", + return_value=False, + ) + def test_unleash_is_cloud_source_processing_disabled(self, mock_processing_check, mock_processing, mock_inspect): """Test the is_cloud_source_processing_disabled.""" expected_result = "processing disabled" orchestrator = Orchestrator() @@ -142,7 +149,13 @@ def test_remove_expired_report_data_no_accounts(self, mock_task, mock_remover, m @patch("masu.processor.worker_cache.CELERY_INSPECT") @patch("masu.processor.orchestrator.update_account_aliases") @patch("masu.processor.orchestrator.Orchestrator.start_manifest_processing", side_effect=ReportDownloaderError) - def test_prepare_w_downloader_error(self, mock_task, mock_account_alias_updater, mock_inspect): + @patch( + "masu.processor.orchestrator.check_currently_processing", + return_value=False, + ) + def test_prepare_w_downloader_error( + self, mock_check_processing, mock_task, mock_account_alias_updater, mock_inspect + ): """Test that Orchestrator.prepare() handles downloader errors.""" orchestrator = Orchestrator() @@ -153,7 +166,11 @@ def test_prepare_w_downloader_error(self, mock_task, mock_account_alias_updater, @patch("masu.processor.worker_cache.CELERY_INSPECT") @patch("masu.processor.orchestrator.update_account_aliases") @patch("masu.processor.orchestrator.Orchestrator.start_manifest_processing", side_effect=Exception) - def test_prepare_w_exception(self, mock_task, mock_account_alias_updater, mock_inspect): + @patch( + "masu.processor.orchestrator.check_currently_processing", + return_value=False, + ) + def test_prepare_w_exception(self, mock_check_processing, mock_task, mock_account_alias_updater, mock_inspect): """Test that Orchestrator.prepare() handles broad exceptions.""" orchestrator = Orchestrator() @@ -164,7 +181,13 @@ def test_prepare_w_exception(self, mock_task, mock_account_alias_updater, mock_i @patch("masu.processor.worker_cache.CELERY_INSPECT") @patch("masu.processor.orchestrator.update_account_aliases") @patch("masu.processor.orchestrator.Orchestrator.start_manifest_processing", return_value=([], True)) - def test_prepare_w_manifest_processing_successful(self, mock_task, mock_account_alias_updater, mock_inspect): + @patch( + "masu.processor.orchestrator.check_currently_processing", + return_value=False, + ) + def test_prepare_w_manifest_processing_successful( + self, mock_check_processing, mock_task, mock_account_alias_updater, mock_inspect + ): """Test that Orchestrator.prepare() works when manifest processing is successful.""" # mock_account_alias_updater().get_label_details.return_value = (True, True) @@ -175,8 +198,12 @@ def test_prepare_w_manifest_processing_successful(self, mock_task, mock_account_ @patch("masu.processor.worker_cache.CELERY_INSPECT") @patch("masu.processor.orchestrator.update_account_aliases") @patch("masu.processor.orchestrator.Orchestrator.start_manifest_processing", return_value=([], True)) + @patch( + "masu.processor.orchestrator.check_currently_processing", + return_value=False, + ) def test_prepare_w_ingress_reports_processing_successful( - self, mock_task, mock_account_alias_updater, mock_inspect + self, mock_check_processing, mock_task, mock_account_alias_updater, mock_inspect ): """Test that Orchestrator.prepare() works when manifest processing is successful.""" # mock_account_alias_updater().get_label_details.return_value = (True, True) @@ -470,6 +497,12 @@ def test_orchestrator_args_polling_batch(self, *args): self.assertGreater(len(p), 0) self.assertEqual(len(p), expected_providers.count()) + # Check polling time updated while we are still processing + with patch("masu.processor.orchestrator.check_currently_processing", return_value=True): + o = Orchestrator(type=Provider.PROVIDER_AWS_LOCAL) + p = o.get_polling_batch() + self.assertEqual(len(p), 0) + def test_get_billing_months(self): """Test get_billing_months""" # test that num_months = 1 returns the current month @@ -513,3 +546,54 @@ def test_get_billing_month_start(self): result = get_billing_month_start(test_input.date()) self.assertEqual(result, expected) + + def test_check_currently_processing(self): + """Test to check if we should poll a provider that may have tasks in progress.""" + now = self.dh.now_utc + # Check we would process if not processed before + result = check_currently_processing(self.schema_name, self.ocp_provider) + self.assertEqual(result, False) + # Check we have task processing, no polling + one_days_ago = now - timedelta(days=1) + two_days_ago = now - timedelta(days=2) + provider_processing = SimpleNamespace() + provider_processing.polling_timestamp = one_days_ago + provider_processing.data_updated_timestamp = two_days_ago + result = check_currently_processing(self.schema_name, provider_processing) + self.assertEqual(result, True) + # Check we have task processing but likely failed, needs repolling + seven_days_ago = now - timedelta(days=7) + provider_failed = SimpleNamespace() + provider_failed.polling_timestamp = two_days_ago + provider_failed.data_updated_timestamp = seven_days_ago + result = check_currently_processing(self.schema_name, provider_failed) + self.assertEqual(result, False) + # Check we have task processing but likely failed, needs repolling + with patch("masu.processor.orchestrator.is_customer_large", return_value=True): + nine_days_ago = now - timedelta(days=9) + provider_failed = SimpleNamespace() + provider_failed.polling_timestamp = two_days_ago + provider_failed.data_updated_timestamp = nine_days_ago + result = check_currently_processing(self.schema_name, provider_failed) + self.assertEqual(result, False) + # Check initial ingest + initial_prov = SimpleNamespace() + initial_prov.polling_timestamp = now + initial_prov.created_timestamp = now + initial_prov.data_updated_timestamp = None + result = check_currently_processing(self.schema_name, initial_prov) + self.assertEqual(result, False) + # Check initial ingest processing + provider_initial_prov = SimpleNamespace() + provider_initial_prov.polling_timestamp = now + provider_initial_prov.created_timestamp = one_days_ago + provider_initial_prov.data_updated_timestamp = None + result = check_currently_processing(self.schema_name, provider_initial_prov) + self.assertEqual(result, True) + # Check initial ingest failed, needs repolling + initial_prov_failed = SimpleNamespace() + initial_prov_failed.polling_timestamp = now + initial_prov_failed.created_timestamp = nine_days_ago + initial_prov_failed.data_updated_timestamp = None + result = check_currently_processing(self.schema_name, initial_prov_failed) + self.assertEqual(result, False) From 4eddf8ea531a309b9296ff1a4d707e6f28906fd6 Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Fri, 28 Jun 2024 14:43:16 -0400 Subject: [PATCH 19/26] [COST-5214] Move TARGETARCH declaration to the top of the Dockerfile (#5195) There is a bug in podman where this is only used correctly for the multi-stage build if it is defined as the first line. Update Jenkinsfile to use RHEL 8 Unfortunately this breaks the image build for Docker. I'll fix that in a followup PR. --- Dockerfile | 4 ++-- Jenkinsfile | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 14f3d3f598..d8ee0ba594 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,5 @@ +ARG TARGETARCH + FROM registry.access.redhat.com/ubi8/ubi-minimal:latest AS base USER root @@ -61,8 +63,6 @@ RUN ldconfig # No intermetiate steps for x86_64, but declare it so it can be used for the final image FROM --platform=amd64 base AS stage-amd64 -ARG TARGETARCH - FROM stage-${TARGETARCH} AS final # PIPENV_DEV is set to true in the docker-compose allowing # local builds to install the dev dependencies diff --git a/Jenkinsfile b/Jenkinsfile index 65c83ac953..1d4a63375b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -22,7 +22,7 @@ def secrets = [ def configuration = [vaultUrl: params.VAULT_ADDRESS, vaultCredentialId: params.VAULT_CREDS_ID, engineVersion: 1] pipeline { - agent { label 'insights' } + agent { label 'rhel8' } options { timestamps() } From a42cf328cd4492a638ac49d72abd1312e1dd16bc Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Fri, 28 Jun 2024 21:38:05 +0100 Subject: [PATCH 20/26] [COST-5213] - fix S3 prepare (#5194) * Switch default parquet flag to prevent iterating on all files in each worker when there is nothing to delete --- .../database/report_manifest_db_accessor.py | 8 ------ .../downloader/aws/aws_report_downloader.py | 1 - .../azure/azure_report_downloader.py | 1 - .../test_report_manifest_db_accessor.py | 26 +++++-------------- .../parquet/test_parquet_report_processor.py | 7 +++-- koku/masu/util/aws/common.py | 11 +++++--- ...tusagereportmanifest_s3_parquet_cleared.py | 18 +++++++++++++ koku/reporting_common/models.py | 2 +- 8 files changed, 39 insertions(+), 35 deletions(-) create mode 100644 koku/reporting_common/migrations/0042_alter_costusagereportmanifest_s3_parquet_cleared.py diff --git a/koku/masu/database/report_manifest_db_accessor.py b/koku/masu/database/report_manifest_db_accessor.py index b4c89261d1..d17c323d4d 100644 --- a/koku/masu/database/report_manifest_db_accessor.py +++ b/koku/masu/database/report_manifest_db_accessor.py @@ -245,14 +245,6 @@ def mark_s3_parquet_cleared(self, manifest: CostUsageReportManifest, report_key: update_fields = ["s3_parquet_cleared"] manifest.save(update_fields=update_fields) - def mark_s3_parquet_to_be_cleared(self, manifest_id): - """Mark manifest to clear parquet files.""" - manifest = self.get_manifest_by_id(manifest_id) - if manifest: - # Set this to false to reprocesses a full month of files for AWS/Azure - manifest.s3_parquet_cleared = False - manifest.save(update_fields=["s3_parquet_cleared"]) - def set_manifest_daily_start_date(self, manifest_id, date): """ Mark manifest processing daily archive start date. diff --git a/koku/masu/external/downloader/aws/aws_report_downloader.py b/koku/masu/external/downloader/aws/aws_report_downloader.py index 7fd5ff2682..84933f5e47 100644 --- a/koku/masu/external/downloader/aws/aws_report_downloader.py +++ b/koku/masu/external/downloader/aws/aws_report_downloader.py @@ -82,7 +82,6 @@ def get_processing_date( if ( data_frame[invoice_bill].any() and start_date.month != DateHelper().now_utc.month or ingress_reports ) or not check_provider_setup_complete(provider_uuid): - ReportManifestDBAccessor().mark_s3_parquet_to_be_cleared(manifest_id) process_date = ReportManifestDBAccessor().set_manifest_daily_start_date(manifest_id, start_date) else: process_date = utils.get_or_clear_daily_s3_by_date( diff --git a/koku/masu/external/downloader/azure/azure_report_downloader.py b/koku/masu/external/downloader/azure/azure_report_downloader.py index 900060c153..aa5796979d 100644 --- a/koku/masu/external/downloader/azure/azure_report_downloader.py +++ b/koku/masu/external/downloader/azure/azure_report_downloader.py @@ -68,7 +68,6 @@ def get_processing_date( or ingress_reports ): process_date = start_date - ReportManifestDBAccessor().mark_s3_parquet_to_be_cleared(manifest_id) process_date = ReportManifestDBAccessor().set_manifest_daily_start_date(manifest_id, process_date) else: process_date = get_or_clear_daily_s3_by_date( diff --git a/koku/masu/test/database/test_report_manifest_db_accessor.py b/koku/masu/test/database/test_report_manifest_db_accessor.py index 0a777630df..87d0f12e87 100644 --- a/koku/masu/test/database/test_report_manifest_db_accessor.py +++ b/koku/masu/test/database/test_report_manifest_db_accessor.py @@ -182,33 +182,21 @@ def test_get_s3_parquet_cleared_no_manifest(self): status = self.manifest_accessor.get_s3_parquet_cleared(None) self.assertFalse(status) - def test_get_s3_parquet_cleared_non_ocp(self): - """Test that s3 CSV clear status is reported.""" - status = self.manifest_accessor.get_s3_parquet_cleared(self.manifest) - self.assertTrue(status) - - self.manifest_accessor.mark_s3_parquet_to_be_cleared(self.manifest.id) - fetch_manifest = self.manifest_accessor.get_manifest_by_id(self.manifest.id) - - status = self.manifest_accessor.get_s3_parquet_cleared(fetch_manifest) - self.assertFalse(status) - def test_get_s3_parquet_cleared_ocp_no_key(self): """Test that s3 CSV clear status is reported.""" self.manifest_dict["cluster_id"] = "cluster_id" self.manifest_dict["assembly_id"] = uuid.uuid4() manifest = self.baker.make("CostUsageReportManifest", **self.manifest_dict) status = self.manifest_accessor.get_s3_parquet_cleared(manifest) - self.assertTrue(status) + self.assertFalse(status) - self.manifest_accessor.mark_s3_parquet_to_be_cleared(manifest.id) - fetch_manifest = self.manifest_accessor.get_manifest_by_id(manifest.id) + self.manifest_accessor.mark_s3_parquet_cleared(manifest) - self.assertDictEqual(fetch_manifest.s3_parquet_cleared_tracker, {}) - self.assertFalse(fetch_manifest.s3_parquet_cleared) + self.assertDictEqual(manifest.s3_parquet_cleared_tracker, {}) + self.assertTrue(manifest.s3_parquet_cleared) - status = self.manifest_accessor.get_s3_parquet_cleared(fetch_manifest) - self.assertFalse(status) + status = self.manifest_accessor.get_s3_parquet_cleared(manifest) + self.assertTrue(status) def test_get_s3_parquet_cleared_ocp_with_key(self): """Test that s3 CSV clear status is reported.""" @@ -222,7 +210,7 @@ def test_get_s3_parquet_cleared_ocp_with_key(self): self.manifest_accessor.mark_s3_parquet_cleared(manifest, key) self.assertDictEqual(manifest.s3_parquet_cleared_tracker, {key: True}) - self.assertTrue(manifest.s3_parquet_cleared) + self.assertFalse(manifest.s3_parquet_cleared) status = self.manifest_accessor.get_s3_parquet_cleared(manifest, key) self.assertTrue(status) diff --git a/koku/masu/test/processor/parquet/test_parquet_report_processor.py b/koku/masu/test/processor/parquet/test_parquet_report_processor.py index 6c4bdb546e..5371bb1d7a 100644 --- a/koku/masu/test/processor/parquet/test_parquet_report_processor.py +++ b/koku/masu/test/processor/parquet/test_parquet_report_processor.py @@ -275,8 +275,11 @@ def test_convert_to_parquet(self, mock_remove, mock_exists): with patch( "masu.processor.parquet.parquet_report_processor.ParquetReportProcessor.report_type", return_value=None ): - with self.assertRaises(ParquetReportProcessorError): - self.report_processor_ocp.convert_to_parquet() + with patch( + "masu.processor.parquet.parquet_report_processor.ParquetReportProcessor.prepare_parquet_s3" + ): + with self.assertRaises(ParquetReportProcessorError): + self.report_processor_ocp.convert_to_parquet() expected = "no split files to convert to parquet" with patch("masu.processor.parquet.parquet_report_processor.get_path_prefix", return_value=""), patch.object( diff --git a/koku/masu/util/aws/common.py b/koku/masu/util/aws/common.py index e562db13bf..1f4df87815 100644 --- a/koku/masu/util/aws/common.py +++ b/koku/masu/util/aws/common.py @@ -637,7 +637,6 @@ def get_or_clear_daily_s3_by_date(csv_s3_path, provider_uuid, start_date, end_da delete_s3_objects(request_id, to_delete, context) manifest = ReportManifestDBAccessor().get_manifest_by_id(manifest_id) ReportManifestDBAccessor().mark_s3_csv_cleared(manifest) - ReportManifestDBAccessor().mark_s3_parquet_to_be_cleared(manifest_id) LOG.info( log_json(msg="removed csv files, marked manifest csv cleared and parquet not cleared", context=context) ) @@ -823,7 +822,7 @@ def delete_s3_objects(request_id, keys_to_delete, context) -> list[str]: def clear_s3_files( - csv_s3_path, provider_uuid, start_date, metadata_key, metadata_value_check, context, request_id, invoice_month=None + csv_s3_path, provider_uuid, start_date, metadata_key, manifest_id, context, request_id, invoice_month=None ): """Clear s3 files for daily archive processing""" account = context.get("account") @@ -858,7 +857,7 @@ def clear_s3_files( try: existing_object = obj_summary.Object() metadata_value = existing_object.metadata.get(metadata_key) - if str(metadata_value) != str(metadata_value_check): + if str(metadata_value) != str(manifest_id): to_delete.append(existing_object.key) except (ClientError) as err: LOG.warning( @@ -871,6 +870,12 @@ def clear_s3_files( exc_info=err, ) delete_s3_objects(request_id, to_delete, context) + manifest_accessor = ReportManifestDBAccessor() + manifest = manifest_accessor.get_manifest_by_id(manifest_id) + # Note: Marking the parquet files cleared here prevents all + # the parquet files for the manifest from being deleted + # later on in report_parquet_processor + manifest_accessor.mark_s3_parquet_cleared(manifest) def remove_files_not_in_set_from_s3_bucket(request_id, s3_path, manifest_id, context=None): diff --git a/koku/reporting_common/migrations/0042_alter_costusagereportmanifest_s3_parquet_cleared.py b/koku/reporting_common/migrations/0042_alter_costusagereportmanifest_s3_parquet_cleared.py new file mode 100644 index 0000000000..e37cd61017 --- /dev/null +++ b/koku/reporting_common/migrations/0042_alter_costusagereportmanifest_s3_parquet_cleared.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.11 on 2024-06-28 12:00 +from django.db import migrations +from django.db import models + + +class Migration(migrations.Migration): + + dependencies = [ + ("reporting_common", "0041_diskcapacity"), + ] + + operations = [ + migrations.AlterField( + model_name="costusagereportmanifest", + name="s3_parquet_cleared", + field=models.BooleanField(default=False, null=True), + ), + ] diff --git a/koku/reporting_common/models.py b/koku/reporting_common/models.py index cc89cc757e..4d37c1c9d6 100644 --- a/koku/reporting_common/models.py +++ b/koku/reporting_common/models.py @@ -43,7 +43,7 @@ class Meta: # s3_csv_cleared used in AWS/Azure to indicate csv's have been cleared for daily archive processing s3_csv_cleared = models.BooleanField(default=False, null=True) # s3_parquet_cleared used to indicate parquet files have been cleared prior to csv to parquet conversion - s3_parquet_cleared = models.BooleanField(default=True, null=True) + s3_parquet_cleared = models.BooleanField(default=False, null=True) # Indicates what initial date to start at for daily processing daily_archive_start_date = models.DateTimeField(null=True) operator_version = models.TextField(null=True) From abd4261b6c60db3ef68960ec4db193a2a00f619f Mon Sep 17 00:00:00 2001 From: Sam Doran Date: Mon, 1 Jul 2024 16:45:27 -0400 Subject: [PATCH 21/26] [COST-5214] pass build-arg to docker build command (#5196) --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 76ce5d44c9..f381795137 100644 --- a/Makefile +++ b/Makefile @@ -340,7 +340,9 @@ _koku-wait: done docker-build: - $(DOCKER_COMPOSE) build koku-base + # TARGETARCH: https://github.com/containers/podman/issues/23046 is resolved. + $(DOCKER_COMPOSE) build --build-arg TARGETARCH=$(shell uname -m | sed s/x86_64/amd64/) koku-base + docker-up: docker-build $(DOCKER_COMPOSE) up -d --scale koku-worker=$(scale) From 97ba98e2996ff15f2ea34f499525e80c0c0056a3 Mon Sep 17 00:00:00 2001 From: Cody Myers Date: Tue, 2 Jul 2024 08:18:28 -0400 Subject: [PATCH 22/26] [COST-5216] Delete filtering optimization (#5197) --- koku/masu/test/util/aws/test_common.py | 93 ++++++++++++++++---------- koku/masu/util/aws/common.py | 24 +++++-- 2 files changed, 75 insertions(+), 42 deletions(-) diff --git a/koku/masu/test/util/aws/test_common.py b/koku/masu/test/util/aws/test_common.py index 393b844ad9..20849322ee 100644 --- a/koku/masu/test/util/aws/test_common.py +++ b/koku/masu/test/util/aws/test_common.py @@ -507,34 +507,45 @@ def test_remove_s3_objects_not_matching_metadata(self): "account", Provider.PROVIDER_AWS, "provider_uuid", start_date, Config.CSV_DATA_TYPE ) expected_key = "not_matching_key" - mock_object = Mock(metadata={metadata_key: "this will be deleted"}, key=expected_key) not_matching_summary = Mock() - not_matching_summary.Object.return_value = mock_object + not_matching_summary.key = expected_key + not_expected_key = "matching_key" - mock_object = Mock(metadata={metadata_key: metadata_value}, key=not_expected_key) matching_summary = Mock() - matching_summary.Object.return_value = mock_object + matching_summary.key = not_expected_key + + def mock_head_object(Bucket, Key): + if Key == expected_key: + return {"Metadata": {metadata_key: "this will be deleted"}} + elif Key == not_expected_key: + return {"Metadata": {metadata_key: metadata_value}} + raise ClientError({}, "Error") + with patch("masu.util.aws.common.get_s3_resource") as mock_s3: mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ not_matching_summary, matching_summary, ] - removed = utils.delete_s3_objects_not_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, [{"Key": expected_key}]) + with patch("boto3.client") as mock_s3_client: + mock_s3_client.return_value.head_object.side_effect = mock_head_object + removed = utils.delete_s3_objects_not_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: - client_error_object = Mock() - client_error_object.Object.side_effect = ClientError({}, "Error") - mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ - not_matching_summary, - client_error_object, - ] - removed = utils.delete_s3_objects_not_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, []) + client_error_summary = Mock() + client_error_summary.key = expected_key + with patch("boto3.client") as mock_s3_client: + mock_s3_client.return_value.head_object.side_effect = ClientError({}, "Error") + mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ + not_matching_summary, + client_error_summary, + ] + removed = utils.delete_s3_objects_not_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, []) with patch("masu.util.aws.common.get_s3_objects_not_matching_metadata") as mock_get_objects, patch( "masu.util.aws.common.get_s3_resource" @@ -614,35 +625,45 @@ def test_remove_s3_objects_matching_metadata(self): "account", Provider.PROVIDER_AWS, "provider_uuid", start_date, Config.CSV_DATA_TYPE ) not_expected_key = "not_matching_key" - mock_object = Mock(metadata={metadata_key: "this will not be deleted"}, key=not_expected_key) not_matching_summary = Mock() - not_matching_summary.Object.return_value = mock_object + not_matching_summary.key = not_expected_key expected_key = "matching_key" - mock_object = Mock(metadata={metadata_key: metadata_value}, key=expected_key) matching_summary = Mock() - matching_summary.Object.return_value = mock_object + matching_summary.key = expected_key + + def mock_head_object(Bucket, Key): + if Key == not_expected_key: + return {"Metadata": {metadata_key: "this will not be deleted"}} + elif Key == expected_key: + return {"Metadata": {metadata_key: metadata_value}} + raise ClientError({}, "Error") + with patch("masu.util.aws.common.get_s3_resource") as mock_s3: mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ not_matching_summary, matching_summary, ] - removed = utils.delete_s3_objects_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, [{"Key": expected_key}]) + with patch("boto3.client") as mock_s3_client: + mock_s3_client.return_value.head_object.side_effect = mock_head_object + removed = utils.delete_s3_objects_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: - client_error_object = Mock() - client_error_object.Object.side_effect = ClientError({}, "Error") - mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ - not_matching_summary, - client_error_object, - ] - removed = utils.delete_s3_objects_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, []) + client_error_summary = Mock() + client_error_summary.key = not_expected_key + with patch("boto3.client") as mock_s3_client: + mock_s3_client.return_value.head_object.side_effect = ClientError({}, "Error") + mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ + not_matching_summary, + client_error_summary, + ] + removed = utils.delete_s3_objects_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, []) with patch("masu.util.aws.common.get_s3_objects_matching_metadata") as mock_get_objects, patch( "masu.util.aws.common.get_s3_resource" diff --git a/koku/masu/util/aws/common.py b/koku/masu/util/aws/common.py index 1f4df87815..aaf9791e08 100644 --- a/koku/masu/util/aws/common.py +++ b/koku/masu/util/aws/common.py @@ -715,12 +715,18 @@ def get_s3_objects_matching_metadata( if context is None: context = {} try: + s3_client = boto3.client( + "s3", + aws_access_key_id=settings.S3_ACCESS_KEY, + aws_secret_access_key=settings.S3_SECRET, + region_name=settings.S3_REGION, + ) keys = [] for obj_summary in _get_s3_objects(s3_path): - existing_object = obj_summary.Object() - metadata_value = existing_object.metadata.get(metadata_key) + response = s3_client.head_object(Bucket=obj_summary.bucket_name, Key=obj_summary.key) + metadata_value = response["Metadata"].get(metadata_key) if metadata_value == metadata_value_check: - keys.append(existing_object.key) + keys.append(obj_summary.key) return keys except (EndpointConnectionError, ClientError) as err: LOG.warning( @@ -743,12 +749,18 @@ def get_s3_objects_not_matching_metadata( if context is None: context = {} try: + s3_client = boto3.client( + "s3", + aws_access_key_id=settings.S3_ACCESS_KEY, + aws_secret_access_key=settings.S3_SECRET, + region_name=settings.S3_REGION, + ) keys = [] for obj_summary in _get_s3_objects(s3_path): - existing_object = obj_summary.Object() - metadata_value = existing_object.metadata.get(metadata_key) + response = s3_client.head_object(Bucket=obj_summary.bucket_name, Key=obj_summary.key) + metadata_value = response["Metadata"].get(metadata_key) if metadata_value != metadata_value_check: - keys.append(existing_object.key) + keys.append(obj_summary.key) return keys except (EndpointConnectionError, ClientError) as err: LOG.warning( From 066faf626f01558b726ec5927d17ab1ff81e43ec Mon Sep 17 00:00:00 2001 From: Cody Myers Date: Tue, 2 Jul 2024 09:56:18 -0400 Subject: [PATCH 23/26] Revert "[COST-5216] Delete filtering optimization (#5197)" (#5200) This reverts commit 97ba98e2996ff15f2ea34f499525e80c0c0056a3. --- koku/masu/test/util/aws/test_common.py | 93 ++++++++++---------------- koku/masu/util/aws/common.py | 24 ++----- 2 files changed, 42 insertions(+), 75 deletions(-) diff --git a/koku/masu/test/util/aws/test_common.py b/koku/masu/test/util/aws/test_common.py index 20849322ee..393b844ad9 100644 --- a/koku/masu/test/util/aws/test_common.py +++ b/koku/masu/test/util/aws/test_common.py @@ -507,45 +507,34 @@ def test_remove_s3_objects_not_matching_metadata(self): "account", Provider.PROVIDER_AWS, "provider_uuid", start_date, Config.CSV_DATA_TYPE ) expected_key = "not_matching_key" + mock_object = Mock(metadata={metadata_key: "this will be deleted"}, key=expected_key) not_matching_summary = Mock() - not_matching_summary.key = expected_key - + not_matching_summary.Object.return_value = mock_object not_expected_key = "matching_key" + mock_object = Mock(metadata={metadata_key: metadata_value}, key=not_expected_key) matching_summary = Mock() - matching_summary.key = not_expected_key - - def mock_head_object(Bucket, Key): - if Key == expected_key: - return {"Metadata": {metadata_key: "this will be deleted"}} - elif Key == not_expected_key: - return {"Metadata": {metadata_key: metadata_value}} - raise ClientError({}, "Error") - + matching_summary.Object.return_value = mock_object with patch("masu.util.aws.common.get_s3_resource") as mock_s3: mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ not_matching_summary, matching_summary, ] - with patch("boto3.client") as mock_s3_client: - mock_s3_client.return_value.head_object.side_effect = mock_head_object - removed = utils.delete_s3_objects_not_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, [{"Key": expected_key}]) + removed = utils.delete_s3_objects_not_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: - client_error_summary = Mock() - client_error_summary.key = expected_key - with patch("boto3.client") as mock_s3_client: - mock_s3_client.return_value.head_object.side_effect = ClientError({}, "Error") - mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ - not_matching_summary, - client_error_summary, - ] - removed = utils.delete_s3_objects_not_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, []) + client_error_object = Mock() + client_error_object.Object.side_effect = ClientError({}, "Error") + mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ + not_matching_summary, + client_error_object, + ] + removed = utils.delete_s3_objects_not_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, []) with patch("masu.util.aws.common.get_s3_objects_not_matching_metadata") as mock_get_objects, patch( "masu.util.aws.common.get_s3_resource" @@ -625,45 +614,35 @@ def test_remove_s3_objects_matching_metadata(self): "account", Provider.PROVIDER_AWS, "provider_uuid", start_date, Config.CSV_DATA_TYPE ) not_expected_key = "not_matching_key" + mock_object = Mock(metadata={metadata_key: "this will not be deleted"}, key=not_expected_key) not_matching_summary = Mock() - not_matching_summary.key = not_expected_key + not_matching_summary.Object.return_value = mock_object expected_key = "matching_key" + mock_object = Mock(metadata={metadata_key: metadata_value}, key=expected_key) matching_summary = Mock() - matching_summary.key = expected_key - - def mock_head_object(Bucket, Key): - if Key == not_expected_key: - return {"Metadata": {metadata_key: "this will not be deleted"}} - elif Key == expected_key: - return {"Metadata": {metadata_key: metadata_value}} - raise ClientError({}, "Error") - + matching_summary.Object.return_value = mock_object with patch("masu.util.aws.common.get_s3_resource") as mock_s3: mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ not_matching_summary, matching_summary, ] - with patch("boto3.client") as mock_s3_client: - mock_s3_client.return_value.head_object.side_effect = mock_head_object - removed = utils.delete_s3_objects_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, [{"Key": expected_key}]) + removed = utils.delete_s3_objects_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, [{"Key": expected_key}]) with patch("masu.util.aws.common.get_s3_resource") as mock_s3: - client_error_summary = Mock() - client_error_summary.key = not_expected_key - with patch("boto3.client") as mock_s3_client: - mock_s3_client.return_value.head_object.side_effect = ClientError({}, "Error") - mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ - not_matching_summary, - client_error_summary, - ] - removed = utils.delete_s3_objects_matching_metadata( - "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value - ) - self.assertListEqual(removed, []) + client_error_object = Mock() + client_error_object.Object.side_effect = ClientError({}, "Error") + mock_s3.return_value.Bucket.return_value.objects.filter.return_value = [ + not_matching_summary, + client_error_object, + ] + removed = utils.delete_s3_objects_matching_metadata( + "request_id", s3_csv_path, metadata_key=metadata_key, metadata_value_check=metadata_value + ) + self.assertListEqual(removed, []) with patch("masu.util.aws.common.get_s3_objects_matching_metadata") as mock_get_objects, patch( "masu.util.aws.common.get_s3_resource" diff --git a/koku/masu/util/aws/common.py b/koku/masu/util/aws/common.py index aaf9791e08..1f4df87815 100644 --- a/koku/masu/util/aws/common.py +++ b/koku/masu/util/aws/common.py @@ -715,18 +715,12 @@ def get_s3_objects_matching_metadata( if context is None: context = {} try: - s3_client = boto3.client( - "s3", - aws_access_key_id=settings.S3_ACCESS_KEY, - aws_secret_access_key=settings.S3_SECRET, - region_name=settings.S3_REGION, - ) keys = [] for obj_summary in _get_s3_objects(s3_path): - response = s3_client.head_object(Bucket=obj_summary.bucket_name, Key=obj_summary.key) - metadata_value = response["Metadata"].get(metadata_key) + existing_object = obj_summary.Object() + metadata_value = existing_object.metadata.get(metadata_key) if metadata_value == metadata_value_check: - keys.append(obj_summary.key) + keys.append(existing_object.key) return keys except (EndpointConnectionError, ClientError) as err: LOG.warning( @@ -749,18 +743,12 @@ def get_s3_objects_not_matching_metadata( if context is None: context = {} try: - s3_client = boto3.client( - "s3", - aws_access_key_id=settings.S3_ACCESS_KEY, - aws_secret_access_key=settings.S3_SECRET, - region_name=settings.S3_REGION, - ) keys = [] for obj_summary in _get_s3_objects(s3_path): - response = s3_client.head_object(Bucket=obj_summary.bucket_name, Key=obj_summary.key) - metadata_value = response["Metadata"].get(metadata_key) + existing_object = obj_summary.Object() + metadata_value = existing_object.metadata.get(metadata_key) if metadata_value != metadata_value_check: - keys.append(obj_summary.key) + keys.append(existing_object.key) return keys except (EndpointConnectionError, ClientError) as err: LOG.warning( From 0507abbb7d6011a4042fccd7f028925aa0078f56 Mon Sep 17 00:00:00 2001 From: Luke Couzens Date: Tue, 2 Jul 2024 17:48:09 +0100 Subject: [PATCH 24/26] [COST-5226] - Skip S3 delete (daily flow) if we have marked deletion complete. (#5198) * dont attempt more S3 deletes if we have marked deletion complete --- koku/masu/util/aws/common.py | 37 ++++++++++++++---------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/koku/masu/util/aws/common.py b/koku/masu/util/aws/common.py index 1f4df87815..9d1abd0121 100644 --- a/koku/masu/util/aws/common.py +++ b/koku/masu/util/aws/common.py @@ -587,10 +587,13 @@ def get_or_clear_daily_s3_by_date(csv_s3_path, provider_uuid, start_date, end_da Fetches latest processed date based on daily csv files and clears relevant s3 files """ # We do this if we have multiple workers running different files for a single manifest. - processing_date = ReportManifestDBAccessor().get_manifest_daily_start_date(manifest_id) + manifest_accessor = ReportManifestDBAccessor() + manifest = manifest_accessor.get_manifest_by_id(manifest_id) + processing_date = manifest_accessor.get_manifest_daily_start_date(manifest_id) if processing_date: - # Prevent other works running trino queries until all files are removed. - clear_s3_files(csv_s3_path, provider_uuid, processing_date, "manifestid", manifest_id, context, request_id) + if not manifest_accessor.get_s3_parquet_cleared(manifest): + # Prevent other works running trino queries until all files are removed. + clear_s3_files(csv_s3_path, provider_uuid, processing_date, "manifestid", manifest_id, context, request_id) return processing_date processing_date = start_date try: @@ -610,7 +613,7 @@ def get_or_clear_daily_s3_by_date(csv_s3_path, provider_uuid, start_date, end_da process_date - datetime.timedelta(days=3) if process_date.day > 3 else process_date.replace(day=1) ) # Set processing date for all workers - processing_date = ReportManifestDBAccessor().set_manifest_daily_start_date(manifest_id, processing_date) + processing_date = manifest_accessor.set_manifest_daily_start_date(manifest_id, processing_date) # Try to clear s3 files for dates. Small edge case, we may have parquet files even without csvs clear_s3_files(csv_s3_path, provider_uuid, processing_date, "manifestid", manifest_id, context, request_id) except (EndpointConnectionError, ClientError, AttributeError, ValueError): @@ -626,7 +629,7 @@ def get_or_clear_daily_s3_by_date(csv_s3_path, provider_uuid, start_date, end_da bucket=settings.S3_BUCKET_NAME, ), ) - processing_date = ReportManifestDBAccessor().set_manifest_daily_start_date(manifest_id, processing_date) + processing_date = manifest_accessor.set_manifest_daily_start_date(manifest_id, processing_date) to_delete = get_s3_objects_not_matching_metadata( request_id, csv_s3_path, @@ -635,8 +638,7 @@ def get_or_clear_daily_s3_by_date(csv_s3_path, provider_uuid, start_date, end_da context=context, ) delete_s3_objects(request_id, to_delete, context) - manifest = ReportManifestDBAccessor().get_manifest_by_id(manifest_id) - ReportManifestDBAccessor().mark_s3_csv_cleared(manifest) + manifest_accessor.mark_s3_csv_cleared(manifest) LOG.info( log_json(msg="removed csv files, marked manifest csv cleared and parquet not cleared", context=context) ) @@ -853,22 +855,11 @@ def clear_s3_files( s3_prefixes.append(parquet_ocp_on_cloud_path_s3 + path) to_delete = [] for prefix in s3_prefixes: - for obj_summary in _get_s3_objects(prefix): - try: - existing_object = obj_summary.Object() - metadata_value = existing_object.metadata.get(metadata_key) - if str(metadata_value) != str(manifest_id): - to_delete.append(existing_object.key) - except (ClientError) as err: - LOG.warning( - log_json( - request_id, - msg="unable to get matching object, likely deleted by another worker", - context=context, - bucket=settings.S3_BUCKET_NAME, - ), - exc_info=err, - ) + to_delete.extend( + get_s3_objects_not_matching_metadata( + request_id, prefix, metadata_key=metadata_key, metadata_value_check=str(manifest_id), context=context + ) + ) delete_s3_objects(request_id, to_delete, context) manifest_accessor = ReportManifestDBAccessor() manifest = manifest_accessor.get_manifest_by_id(manifest_id) From 89feae06140382b4c6dbbd655f5cb5269ed7a134 Mon Sep 17 00:00:00 2001 From: Michael Skarbek Date: Tue, 2 Jul 2024 14:53:15 -0400 Subject: [PATCH 25/26] [COST-5076] upgrade to python 3.11 (#4444) * upgrade to python 3.11 * pipfile update * add gcc-c++ compiler Co-authored-by: Sam Doran * update test * replace gcc with gcc-c++ --------- Co-authored-by: Sam Doran --- .github/workflows/ci.yml | 4 +- Dockerfile | 10 +- Pipfile | 4 +- Pipfile.lock | 1477 ++++++++++++++++++++------------------ README.md | 2 +- 5 files changed, 780 insertions(+), 717 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce83fd2798..72230ed851 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: - name: Install Python uses: actions/setup-python@v5.1.0 with: - python-version: '3.9' + python-version: '3.11' - name: Run pre-commit checks uses: pre-commit/action@v3.0.1 @@ -153,7 +153,7 @@ jobs: max-parallel: 4 matrix: python-version: - - '3.9' + - '3.11' env: COMPOSE_FILE: .github/postgres/docker-compose.yaml diff --git a/Dockerfile b/Dockerfile index d8ee0ba594..43714e9c6c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,7 @@ FROM registry.access.redhat.com/ubi8/ubi-minimal:latest AS base USER root -ENV PYTHON_VERSION=3.9 \ +ENV PYTHON_VERSION=3.11 \ PYTHONUNBUFFERED=1 \ PYTHONIOENCODING=UTF-8 \ LC_ALL=en_US.UTF-8 \ @@ -24,8 +24,8 @@ LABEL summary="$SUMMARY" \ io.k8s.description="$DESCRIPTION" \ io.k8s.display-name="Koku" \ io.openshift.expose-services="8000:http" \ - io.openshift.tags="builder,python,python39,rh-python39" \ - com.redhat.component="python39-docker" \ + io.openshift.tags="builder,python,python3.11,rh-python3.11" \ + com.redhat.component="python3.11-docker" \ name="Koku" \ version="1" \ maintainer="Red Hat Cost Management Services " @@ -35,7 +35,7 @@ LABEL summary="$SUMMARY" \ # gcc to compile some python packages (e.g. ciso8601) # shadow-utils to make useradd available # libpq-devel needed for building psycopg2 -RUN INSTALL_PKGS="python39 python39-devel glibc-langpack-en gcc shadow-utils libpq-devel" && \ +RUN INSTALL_PKGS="python3.11 python3.11-devel glibc-langpack-en gcc-c++ shadow-utils libpq-devel" && \ microdnf --nodocs -y upgrade && \ microdnf reinstall tzdata && \ microdnf -y --setopt=tsflags=nodocs --setopt=install_weak_deps=0 install $INSTALL_PKGS && \ @@ -72,7 +72,7 @@ ARG USER_ID=1000 # Create a Python virtual environment for use by any application to avoid # potential conflicts with Python packages preinstalled in the main Python # installation. -RUN python3.9 -m venv /pipenv-venv \ +RUN python3.11 -m venv /pipenv-venv \ && /pipenv-venv/bin/python -m pip install --upgrade pip setuptools \ && /pipenv-venv/bin/python -m pip install pipenv diff --git a/Pipfile b/Pipfile index eb948c3172..27c1aeef99 100644 --- a/Pipfile +++ b/Pipfile @@ -13,7 +13,7 @@ azure-mgmt-compute = "*" azure-mgmt-resource = ">=8.0" azure-mgmt-storage = ">=20.1.0" azure-storage-blob = ">=12.1" -boto3 = ">=1.10" +boto3 = ">=1.34" bs4 = ">=0.0.1" cachetools = ">=4.1.0" celery = ">=5.2.2" @@ -85,4 +85,4 @@ tox = ">=3.0" watchdog = ">=2.1.1" [requires] -python_version = "3.9" +python_version = "3.11" diff --git a/Pipfile.lock b/Pipfile.lock index 2c7772b475..f618322901 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "b67b765f3e4de6c57aed4affb209e2fef66468fcded4fa762d5147bb829dc685" + "sha256": "95846845b22296408a13fca3baf3ee881254ccc7a7debb3bf5595a89f761adcf" }, "pipfile-spec": 6, "requires": { - "python_version": "3.9" + "python_version": "3.11" }, "sources": [ { @@ -42,11 +42,11 @@ }, "app-common-python": { "hashes": [ - "sha256:ae9e914d808d73be4694f53fcab88dcde19b1c4f0187b4a30909cd40cf12e939", - "sha256:d1a1f3f07ac51382d5986f79b1c2d47a063f0139889422db668431d1b9af0612" + "sha256:9388623371c7f4c04727b0057578d85bf6c7086961f10058370e023b7620eea3", + "sha256:bd3c7fea89eae26eac26d3ca5cddb45459408e0d84fa1b805d0a3c1ad782351b" ], "index": "pypi", - "version": "==0.2.6" + "version": "==0.2.7" }, "apscheduler": { "hashes": [ @@ -64,14 +64,6 @@ "markers": "python_version >= '3.8'", "version": "==3.8.1" }, - "async-timeout": { - "hashes": [ - "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", - "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" - ], - "markers": "python_full_version < '3.11.3'", - "version": "==4.0.3" - }, "azure-common": { "hashes": [ "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", @@ -89,12 +81,12 @@ }, "azure-identity": { "hashes": [ - "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e", - "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726" + "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea", + "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.16.1" + "version": "==1.17.1" }, "azure-mgmt-compute": { "hashes": [ @@ -102,6 +94,7 @@ "sha256:5a5b1c4fc1a19ecb022a12ded1be8b1b155f6979d03fb9efc04642f606644bbf" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==31.0.0" }, "azure-mgmt-core": { @@ -118,6 +111,7 @@ "sha256:a95734102450b6f01aaa938d78c00285b90fc52b866caae17efb5521ae986db7" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==4.0.1" }, "azure-mgmt-resource": { @@ -126,15 +120,17 @@ "sha256:fcaa4eca357d216f285b04e20b7f7bfaefda738ba6d30d956193090d3e325248" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==23.1.1" }, "azure-mgmt-storage": { "hashes": [ - "sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc", - "sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162" + "sha256:503a7ff9c31254092b0656445f5728bfdfda2d09d46a82e97019eaa9a1ecec64", + "sha256:f97df1fa39cde9dbacf2cd96c9cba1fc196932185e24853e276f74b18a0bd031" ], "index": "pypi", - "version": "==21.1.0" + "markers": "python_version >= '3.8'", + "version": "==21.2.1" }, "azure-storage-blob": { "hashes": [ @@ -142,6 +138,7 @@ "sha256:eeb91256e41d4b5b9bad6a87fd0a8ade07dd58aa52344e2c8d2746e27a017d3b" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==12.20.0" }, "beautifulsoup4": { @@ -161,19 +158,20 @@ }, "boto3": { "hashes": [ - "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280", - "sha256:eeb11bca9b19d12baf93436fb8a16b8b824f1f7e8b9bcc722607e862c46b1b08" + "sha256:0b21b84db4619b3711a6f643d465a5a25e81231ee43615c55a20ff6b89c6cc3c", + "sha256:7cb697d67fd138ceebc6f789919ae370c092a50c6b0ccc4ef483027935502eab" ], "index": "pypi", - "version": "==1.34.114" + "markers": "python_version >= '3.8'", + "version": "==1.34.137" }, "botocore": { "hashes": [ - "sha256:5705f74fda009656a218ffaf4afd81228359160f2ab806ab8222d07e9da3a73b", - "sha256:606d1e55984d45e41a812badee292755f4db0233eed9cca63ea3bb8f5755507f" + "sha256:a980fa4adec4bfa23fff70a3512622e9412c69c791898a52cafc2458b0be6040", + "sha256:e29c8e9bfda0b20a1997792968e85868bfce42fefad9730f633a81adcff3f2ef" ], "markers": "python_version >= '3.8'", - "version": "==1.34.114" + "version": "==1.34.137" }, "bs4": { "hashes": [ @@ -189,6 +187,7 @@ "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==5.3.3" }, "celery": { @@ -197,6 +196,7 @@ "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==5.2.7" }, "certifi": { @@ -504,6 +504,7 @@ "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed" ], "index": "pypi", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.0.5" }, "cryptography": { @@ -550,15 +551,17 @@ "sha256:a17fcba2aad3fc7d46fdb23215095dbbd64e6174bf4589171e732b18b07e426a" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==4.2.13" }, "django-cors-headers": { "hashes": [ - "sha256:0b1fd19297e37417fc9f835d39e45c8c642938ddba1acce0c1753d3edef04f36", - "sha256:0bf65ef45e606aff1994d35503e6b677c0b26cafff6506f8fd7187f3be840207" + "sha256:5c6e3b7fe870876a1efdfeb4f433782c3524078fa0dc9e0195f6706ce7a242f6", + "sha256:92cf4633e22af67a230a1456cb1b7a02bb213d6536d2dcb2a4a24092ea9cebc2" ], "index": "pypi", - "version": "==4.3.1" + "markers": "python_version >= '3.8'", + "version": "==4.4.0" }, "django-environ": { "hashes": [ @@ -566,6 +569,7 @@ "sha256:f32a87aa0899894c27d4e1776fa6b477e8164ed7f6b3e410a62a6d72caaf64be" ], "index": "pypi", + "markers": "python_version >= '3.6' and python_version < '4'", "version": "==0.11.2" }, "django-extensions": { @@ -574,6 +578,7 @@ "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==3.2.3" }, "django-filter": { @@ -582,6 +587,7 @@ "sha256:df2ee9857e18d38bed203c8745f62a803fa0f31688c9fe6f8e868120b1848e48" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==24.2" }, "django-prometheus": { @@ -598,6 +604,7 @@ "sha256:ebc88df7da810732e2af9987f7f426c96204bf89319df4c6da6ca9a2942edd5b" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==5.4.0" }, "django-tenants": { @@ -609,11 +616,12 @@ }, "djangorestframework": { "hashes": [ - "sha256:3ccc0475bce968608cf30d07fb17d8e52d1d7fc8bfe779c905463200750cbca6", - "sha256:f88fad74183dfc7144b2756d0d2ac716ea5b4c7c9840995ac3bfd8ec034333c1" + "sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20", + "sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad" ], "index": "pypi", - "version": "==3.15.1" + "markers": "python_version >= '3.8'", + "version": "==3.15.2" }, "djangorestframework-csv": { "hashes": [ @@ -632,28 +640,33 @@ "version": "==0.5.2" }, "google-api-core": { + "extras": [ + "grpc" + ], "hashes": [ - "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251", - "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10" + "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125", + "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd" ], "markers": "python_version >= '3.7'", - "version": "==2.19.0" + "version": "==2.19.1" }, "google-api-python-client": { "hashes": [ - "sha256:1c03e24af62238a8817ecc24e9d4c32ddd4cb1f323b08413652d9a9a592fc00d", - "sha256:e325409bdcef4604d505d9246ce7199960a010a0569ac503b9f319db8dbdc217" + "sha256:91742fa4c779d48456c0256ef346fa1cc185ba427176d3277e35141fa3268026", + "sha256:b552a28123ed95493035698db80e8ed78c9106a8b422e63a175150b9b55b704e" ], "index": "pypi", - "version": "==2.131.0" + "markers": "python_version >= '3.7'", + "version": "==2.135.0" }, "google-auth": { "hashes": [ - "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", - "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415" + "sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23", + "sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871" ], "index": "pypi", - "version": "==2.29.0" + "markers": "python_version >= '3.7'", + "version": "==2.31.0" }, "google-auth-httplib2": { "hashes": [ @@ -664,11 +677,12 @@ }, "google-cloud-bigquery": { "hashes": [ - "sha256:4b4597f9291b42102c9667d3b4528f801d4c8f24ef2b12dd1ecb881273330955", - "sha256:9fb72884fdbec9c4643cea6b7f21e1ecf3eb61d5305f87493d271dc801647a9e" + "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509", + "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9" ], "index": "pypi", - "version": "==3.23.1" + "markers": "python_version >= '3.7'", + "version": "==3.25.0" }, "google-cloud-core": { "hashes": [ @@ -680,11 +694,12 @@ }, "google-cloud-storage": { "hashes": [ - "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", - "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f" + "sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388", + "sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1" ], "index": "pypi", - "version": "==2.16.0" + "markers": "python_version >= '3.7'", + "version": "==2.17.0" }, "google-crc32c": { "hashes": [ @@ -762,77 +777,77 @@ }, "google-resumable-media": { "hashes": [ - "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b", - "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08" + "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c", + "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33" ], "markers": "python_version >= '3.7'", - "version": "==2.7.0" + "version": "==2.7.1" }, "googleapis-common-protos": { "hashes": [ - "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", - "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632" + "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945", + "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87" ], "markers": "python_version >= '3.7'", - "version": "==1.63.0" + "version": "==1.63.2" }, "grpcio": { "hashes": [ - "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5", - "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e", - "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145", - "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e", - "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91", - "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812", - "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2", - "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c", - "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1", - "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590", - "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0", - "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c", - "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21", - "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618", - "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd", - "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004", - "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4", - "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db", - "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396", - "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b", - "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f", - "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553", - "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0", - "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa", - "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b", - "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c", - "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d", - "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5", - "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c", - "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106", - "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa", - "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e", - "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231", - "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba", - "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851", - "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e", - "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b", - "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9", - "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d", - "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d", - "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c", - "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a", - "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48", - "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d", - "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1", - "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150" - ], - "version": "==1.64.0" + "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040", + "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122", + "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9", + "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f", + "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd", + "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d", + "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33", + "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762", + "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294", + "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650", + "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b", + "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad", + "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1", + "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff", + "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59", + "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4", + "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027", + "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502", + "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae", + "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61", + "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb", + "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa", + "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5", + "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1", + "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9", + "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90", + "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b", + "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179", + "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e", + "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a", + "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489", + "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d", + "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a", + "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2", + "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd", + "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb", + "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61", + "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca", + "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6", + "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602", + "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367", + "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62", + "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d", + "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd", + "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22", + "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309" + ], + "version": "==1.64.1" }, "grpcio-status": { "hashes": [ - "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f", - "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a" + "sha256:2ec6e0777958831484a517e32b6ffe0a4272242eae81bff2f5c3707fa58b40e3", + "sha256:c50bd14eb6506d8580a6c553bea463d7c08499b2c0e93f6d1864c5e8eabb1066" ], - "version": "==1.62.2" + "version": "==1.64.1" }, "gunicorn": { "hashes": [ @@ -840,6 +855,7 @@ "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==22.0.0" }, "httplib2": { @@ -852,17 +868,17 @@ }, "ibm-cloud-sdk-core": { "hashes": [ - "sha256:bf2b6940761970c14b53fc8fc135af567c58b9709db3214be403a383dd51cabb" + "sha256:9c4d492256252793b72fb1500f92fe26f2e756cee7ab815fe9d987be05e38f43" ], "index": "pypi", - "version": "==3.18.0" + "version": "==3.20.1" }, "ibm-platform-services": { "hashes": [ - "sha256:ac4a09ff587191c8d615fb5629fca06ddca056f88e3403c8e08e51141c492e26" + "sha256:0c8c75d1ef9990e17c1af93fbf9b4a4e1b6beeba293b02c6605800c73bb97f22" ], "index": "pypi", - "version": "==0.48.0" + "version": "==0.54.1" }, "idna": { "hashes": [ @@ -917,6 +933,7 @@ "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==5.2.4" }, "markupsafe": { @@ -1072,19 +1089,19 @@ }, "msal": { "hashes": [ - "sha256:563c2d70de77a2ca9786aab84cb4e133a38a6897e6676774edc23d610bfc9e7b", - "sha256:d72bbfe2d5c2f2555f4bc6205be4450ddfd12976610dd9a16a9ab0f05c68b64d" + "sha256:6b301e63f967481f0cc1a3a3bac0cf322b276855bc1b0955468d9deb3f33d511", + "sha256:8f6725f099752553f9b2fe84125e2a5ebe47b49f92eacca33ebedd3a9ebaae25" ], "markers": "python_version >= '3.7'", - "version": "==1.28.1" + "version": "==1.29.0" }, "msal-extensions": { "hashes": [ - "sha256:01be9711b4c0b1a151450068eeb2c4f0997df3bba085ac299de3a66f585e382f", - "sha256:6ab357867062db7b253d0bd2df6d411c7891a0ee7308d54d1e4317c1d1c54252" + "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef", + "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d" ], "markers": "python_version >= '3.7'", - "version": "==1.1.0" + "version": "==1.2.0" }, "msrest": { "hashes": [ @@ -1141,7 +1158,6 @@ "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f" ], - "index": "pypi", "markers": "platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'x86_64'", "version": "==1.26.4" }, @@ -1155,11 +1171,11 @@ }, "oci": { "hashes": [ - "sha256:4bc8f3265e2ca68cf85f25f4da05678d638f1d713e2a37309e86ab76e4687d45", - "sha256:5f173539118751f0a29f418077557d57cc99b2d3195392843be497db243417de" + "sha256:1b6ba5fb87d98f20ef3389d3d25906f71f32f12fff57c0923eecacb0efa49eaa", + "sha256:e014058d05c6512fd316099f6b6de635d081a32907893c5bb80461078499c6f2" ], "index": "pypi", - "version": "==2.127.0" + "version": "==2.129.1" }, "packaging": { "hashes": [ @@ -1198,6 +1214,7 @@ "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==2.1.4" }, "patsy": { @@ -1217,11 +1234,11 @@ }, "portalocker": { "hashes": [ - "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33", - "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e" + "sha256:48944147b2cd42520549bc1bb8fe44e220296e56f7c3d551bc6ecce69d9b0de1", + "sha256:49de8bc0a2f68ca98bf9e219c81a3e6b27097c7bf505a87c5a112ce1aaeb9b81" ], - "markers": "platform_system != 'Windows'", - "version": "==2.8.2" + "markers": "python_version >= '3.8'", + "version": "==2.10.0" }, "prometheus-client": { "hashes": [ @@ -1229,62 +1246,65 @@ "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.20.0" }, "prompt-toolkit": { "hashes": [ - "sha256:07c60ee4ab7b7e90824b61afa840c8f5aad2d46b3e2e10acc33d8ecc94a49089", - "sha256:a29b89160e494e3ea8622b09fa5897610b437884dcdcd054fdc1308883326c2a" + "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10", + "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360" ], "markers": "python_full_version >= '3.7.0'", - "version": "==3.0.45" + "version": "==3.0.47" }, "proto-plus": { "hashes": [ - "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2", - "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c" + "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445", + "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12" ], - "markers": "python_version >= '3.6'", - "version": "==1.23.0" + "markers": "python_version >= '3.7'", + "version": "==1.24.0" }, "protobuf": { "hashes": [ - "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4", - "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8", - "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c", - "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d", - "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4", - "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa", - "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c", - "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019", - "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9", - "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c", - "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2" + "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505", + "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b", + "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38", + "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863", + "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470", + "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6", + "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce", + "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca", + "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5", + "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e", + "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714" ], "markers": "python_version >= '3.8'", - "version": "==4.25.3" + "version": "==5.27.2" }, "psutil": { "hashes": [ - "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d", - "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73", - "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8", - "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2", - "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e", - "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36", - "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7", - "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c", - "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee", - "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421", - "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf", - "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81", - "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0", - "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631", - "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", - "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8" - ], - "index": "pypi", - "version": "==5.9.8" + "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", + "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0", + "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c", + "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", + "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3", + "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c", + "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", + "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3", + "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", + "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", + "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6", + "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d", + "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c", + "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", + "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132", + "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14", + "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0" + ], + "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==6.0.0" }, "psycopg2": { "hashes": [ @@ -1303,6 +1323,7 @@ "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.9.9" }, "pyarrow": { @@ -1345,6 +1366,7 @@ "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==16.1.0" }, "pyasn1": { @@ -1373,96 +1395,106 @@ }, "pydantic": { "hashes": [ - "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7", - "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7" + "sha256:d970ffb9d030b710795878940bd0489842c638e7252fc4a19c3ae2f7da4d6141", + "sha256:ead4f3a1e92386a734ca1411cb25d94147cf8778ed5be6b56749047676d6364e" ], "index": "pypi", - "version": "==2.7.2" + "markers": "python_version >= '3.8'", + "version": "==2.8.0" }, "pydantic-core": { "hashes": [ - "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9", - "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b", - "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426", - "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc", - "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9", - "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc", - "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4", - "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702", - "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6", - "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231", - "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083", - "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60", - "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7", - "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a", - "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4", - "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6", - "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39", - "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0", - "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026", - "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c", - "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442", - "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7", - "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044", - "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd", - "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94", - "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361", - "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af", - "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4", - "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022", - "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7", - "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd", - "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8", - "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2", - "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5", - "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be", - "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c", - "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9", - "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417", - "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1", - "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326", - "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4", - "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb", - "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34", - "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d", - "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9", - "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812", - "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558", - "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779", - "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be", - "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c", - "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe", - "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0", - "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c", - "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2", - "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a", - "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab", - "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048", - "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3", - "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106", - "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d", - "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb", - "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6", - "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b", - "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef", - "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5", - "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb", - "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c", - "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06", - "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059", - "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943", - "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1", - "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2", - "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c", - "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522", - "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f", - "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78", - "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7", - "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a", - "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5" - ], - "markers": "python_version >= '3.8'", - "version": "==2.18.3" + "sha256:03aceaf6a5adaad3bec2233edc5a7905026553916615888e53154807e404545c", + "sha256:05e83ce2f7eba29e627dd8066aa6c4c0269b2d4f889c0eba157233a353053cea", + "sha256:0b0eefc7633a04c0694340aad91fbfd1986fe1a1e0c63a22793ba40a18fcbdc8", + "sha256:0e75794883d635071cf6b4ed2a5d7a1e50672ab7a051454c76446ef1ebcdcc91", + "sha256:0f6dd3612a3b9f91f2e63924ea18a4476656c6d01843ca20a4c09e00422195af", + "sha256:116b326ac82c8b315e7348390f6d30bcfe6e688a7d3f1de50ff7bcc2042a23c2", + "sha256:16197e6f4fdecb9892ed2436e507e44f0a1aa2cff3b9306d1c879ea2f9200997", + "sha256:1c3c5b7f70dd19a6845292b0775295ea81c61540f68671ae06bfe4421b3222c2", + "sha256:1dacf660d6de692fe351e8c806e7efccf09ee5184865893afbe8e59be4920b4a", + "sha256:1def125d59a87fe451212a72ab9ed34c118ff771e5473fef4f2f95d8ede26d75", + "sha256:1e4f46189d8740561b43655263a41aac75ff0388febcb2c9ec4f1b60a0ec12f3", + "sha256:1f038156b696a1c39d763b2080aeefa87ddb4162c10aa9fabfefffc3dd8180fa", + "sha256:21d9f7e24f63fdc7118e6cc49defaab8c1d27570782f7e5256169d77498cf7c7", + "sha256:22b813baf0dbf612752d8143a2dbf8e33ccb850656b7850e009bad2e101fc377", + "sha256:22e3b1d4b1b3f6082849f9b28427ef147a5b46a6132a3dbaf9ca1baa40c88609", + "sha256:23425eccef8f2c342f78d3a238c824623836c6c874d93c726673dbf7e56c78c0", + "sha256:25c46bb2ff6084859bbcfdf4f1a63004b98e88b6d04053e8bf324e115398e9e7", + "sha256:2761f71faed820e25ec62eacba670d1b5c2709bb131a19fcdbfbb09884593e5a", + "sha256:2aec8eeea0b08fd6bc2213d8e86811a07491849fd3d79955b62d83e32fa2ad5f", + "sha256:2d06a7fa437f93782e3f32d739c3ec189f82fca74336c08255f9e20cea1ed378", + "sha256:316fe7c3fec017affd916a0c83d6f1ec697cbbbdf1124769fa73328e7907cc2e", + "sha256:344e352c96e53b4f56b53d24728217c69399b8129c16789f70236083c6ceb2ac", + "sha256:35681445dc85446fb105943d81ae7569aa7e89de80d1ca4ac3229e05c311bdb1", + "sha256:366be8e64e0cb63d87cf79b4e1765c0703dd6313c729b22e7b9e378db6b96877", + "sha256:3a7235b46c1bbe201f09b6f0f5e6c36b16bad3d0532a10493742f91fbdc8035f", + "sha256:3c05eaf6c863781eb834ab41f5963604ab92855822a2062897958089d1335dad", + "sha256:3e147fc6e27b9a487320d78515c5f29798b539179f7777018cedf51b7749e4f4", + "sha256:3f0f3a4a23717280a5ee3ac4fb1f81d6fde604c9ec5100f7f6f987716bb8c137", + "sha256:5084ec9721f82bef5ff7c4d1ee65e1626783abb585f8c0993833490b63fe1792", + "sha256:52527e8f223ba29608d999d65b204676398009725007c9336651c2ec2d93cffc", + "sha256:53b06aea7a48919a254b32107647be9128c066aaa6ee6d5d08222325f25ef175", + "sha256:58e251bb5a5998f7226dc90b0b753eeffa720bd66664eba51927c2a7a2d5f32c", + "sha256:603a843fea76a595c8f661cd4da4d2281dff1e38c4a836a928eac1a2f8fe88e4", + "sha256:616b9c2f882393d422ba11b40e72382fe975e806ad693095e9a3b67c59ea6150", + "sha256:649a764d9b0da29816889424697b2a3746963ad36d3e0968784ceed6e40c6355", + "sha256:658287a29351166510ebbe0a75c373600cc4367a3d9337b964dada8d38bcc0f4", + "sha256:6d0f52684868db7c218437d260e14d37948b094493f2646f22d3dda7229bbe3f", + "sha256:6dc85b9e10cc21d9c1055f15684f76fa4facadddcb6cd63abab702eb93c98943", + "sha256:72432fd6e868c8d0a6849869e004b8bcae233a3c56383954c228316694920b38", + "sha256:73deadd6fd8a23e2f40b412b3ac617a112143c8989a4fe265050fd91ba5c0608", + "sha256:763602504bf640b3ded3bba3f8ed8a1cc2fc6a87b8d55c1c5689f428c49c947e", + "sha256:7701df088d0b05f3460f7ba15aec81ac8b0fb5690367dfd072a6c38cf5b7fdb5", + "sha256:78d584caac52c24240ef9ecd75de64c760bbd0e20dbf6973631815e3ef16ef8b", + "sha256:7a3639011c2e8a9628466f616ed7fb413f30032b891898e10895a0a8b5857d6c", + "sha256:7b6a24d7b5893392f2b8e3b7a0031ae3b14c6c1942a4615f0d8794fdeeefb08b", + "sha256:7d4df13d1c55e84351fab51383520b84f490740a9f1fec905362aa64590b7a5d", + "sha256:7e37b6bb6e90c2b8412b06373c6978d9d81e7199a40e24a6ef480e8acdeaf918", + "sha256:8093473d7b9e908af1cef30025609afc8f5fd2a16ff07f97440fd911421e4432", + "sha256:840200827984f1c4e114008abc2f5ede362d6e11ed0b5931681884dd41852ff1", + "sha256:85770b4b37bb36ef93a6122601795231225641003e0318d23c6233c59b424279", + "sha256:879ae6bb08a063b3e1b7ac8c860096d8fd6b48dd9b2690b7f2738b8c835e744b", + "sha256:87d3df115f4a3c8c5e4d5acf067d399c6466d7e604fc9ee9acbe6f0c88a0c3cf", + "sha256:8b315685832ab9287e6124b5d74fc12dda31e6421d7f6b08525791452844bc2d", + "sha256:8e49524917b8d3c2f42cd0d2df61178e08e50f5f029f9af1f402b3ee64574392", + "sha256:978d4123ad1e605daf1ba5e01d4f235bcf7b6e340ef07e7122e8e9cfe3eb61ab", + "sha256:a0586cddbf4380e24569b8a05f234e7305717cc8323f50114dfb2051fcbce2a3", + "sha256:a272785a226869416c6b3c1b7e450506152d3844207331f02f27173562c917e0", + "sha256:a340d2bdebe819d08f605e9705ed551c3feb97e4fd71822d7147c1e4bdbb9508", + "sha256:a3f243f318bd9523277fa123b3163f4c005a3e8619d4b867064de02f287a564d", + "sha256:a4f0f71653b1c1bad0350bc0b4cc057ab87b438ff18fa6392533811ebd01439c", + "sha256:ab760f17c3e792225cdaef31ca23c0aea45c14ce80d8eff62503f86a5ab76bff", + "sha256:ac76f30d5d3454f4c28826d891fe74d25121a346c69523c9810ebba43f3b1cec", + "sha256:ad1bd2f377f56fec11d5cfd0977c30061cd19f4fa199bf138b200ec0d5e27eeb", + "sha256:b2ba34a099576234671f2e4274e5bc6813b22e28778c216d680eabd0db3f7dad", + "sha256:b2f13c3e955a087c3ec86f97661d9f72a76e221281b2262956af381224cfc243", + "sha256:b34480fd6778ab356abf1e9086a4ced95002a1e195e8d2fd182b0def9d944d11", + "sha256:b4a085bd04af7245e140d1b95619fe8abb445a3d7fdf219b3f80c940853268ef", + "sha256:b81ec2efc04fc1dbf400647d4357d64fb25543bae38d2d19787d69360aad21c9", + "sha256:b8c46a8cf53e849eea7090f331ae2202cd0f1ceb090b00f5902c423bd1e11805", + "sha256:bc7e43b4a528ffca8c9151b6a2ca34482c2fdc05e6aa24a84b7f475c896fc51d", + "sha256:c3dc8ec8b87c7ad534c75b8855168a08a7036fdb9deeeed5705ba9410721c84d", + "sha256:c4a9732a5cad764ba37f3aa873dccb41b584f69c347a57323eda0930deec8e10", + "sha256:c867230d715a3dd1d962c8d9bef0d3168994ed663e21bf748b6e3a529a129aab", + "sha256:cafde15a6f7feaec2f570646e2ffc5b73412295d29134a29067e70740ec6ee20", + "sha256:cb1ad5b4d73cde784cf64580166568074f5ccd2548d765e690546cff3d80937d", + "sha256:d08264b4460326cefacc179fc1411304d5af388a79910832835e6f641512358b", + "sha256:d42669d319db366cb567c3b444f43caa7ffb779bf9530692c6f244fc635a41eb", + "sha256:d43e7ab3b65e4dc35a7612cfff7b0fd62dce5bc11a7cd198310b57f39847fd6c", + "sha256:d5b8376a867047bf08910573deb95d3c8dfb976eb014ee24f3b5a61ccc5bee1b", + "sha256:d6f2d8b8da1f03f577243b07bbdd3412eee3d37d1f2fd71d1513cbc76a8c1239", + "sha256:d6f8c49657f3eb7720ed4c9b26624063da14937fc94d1812f1e04a2204db3e17", + "sha256:d70a8ff2d4953afb4cbe6211f17268ad29c0b47e73d3372f40e7775904bc28fc", + "sha256:d82e5ed3a05f2dcb89c6ead2fd0dbff7ac09bc02c1b4028ece2d3a3854d049ce", + "sha256:e9dcd7fb34f7bfb239b5fa420033642fff0ad676b765559c3737b91f664d4fa9", + "sha256:ed741183719a5271f97d93bbcc45ed64619fa38068aaa6e90027d1d17e30dc8d", + "sha256:ee7785938e407418795e4399b2bf5b5f3cf6cf728077a7f26973220d58d885cf", + "sha256:efbb412d55a4ffe73963fed95c09ccb83647ec63b711c4b3752be10a56f0090b", + "sha256:f8ea1d8b7df522e5ced34993c423c3bf3735c53df8b2a15688a2f03a7d678800" + ], + "markers": "python_version >= '3.8'", + "version": "==2.20.0" }, "pyjwt": { "extras": [ @@ -1497,6 +1529,7 @@ "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0.post0" }, "pytz": { @@ -1516,20 +1549,21 @@ }, "redis": { "hashes": [ - "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91", - "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61" + "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db", + "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b" ], "index": "pypi", - "version": "==5.0.4" + "markers": "python_version >= '3.7'", + "version": "==5.0.7" }, "requests": { "hashes": [ - "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", - "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289", + "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==2.32.3" + "version": "==2.32.2" }, "requests-oauthlib": { "hashes": [ @@ -1549,50 +1583,51 @@ }, "s3transfer": { "hashes": [ - "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19", - "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d" + "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6", + "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69" ], "markers": "python_version >= '3.8'", - "version": "==0.10.1" + "version": "==0.10.2" }, "scipy": { "hashes": [ - "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d", - "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c", - "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca", - "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9", - "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54", - "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16", - "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2", - "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5", - "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59", - "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326", - "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b", - "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1", - "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d", - "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24", - "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627", - "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c", - "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa", - "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949", - "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989", - "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004", - "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f", - "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884", - "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299", - "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94", - "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f" - ], - "markers": "python_version >= '3.9'", - "version": "==1.13.1" + "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0", + "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7", + "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d", + "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0", + "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20", + "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc", + "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0", + "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159", + "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6", + "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1", + "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e", + "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f", + "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484", + "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf", + "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74", + "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8", + "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14", + "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86", + "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359", + "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b", + "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4", + "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9", + "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb", + "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209", + "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb" + ], + "markers": "python_version >= '3.10'", + "version": "==1.14.0" }, "sentry-sdk": { "hashes": [ - "sha256:139a71a19f5e9eb5d3623942491ce03cf8ebc14ea2e39ba3e6fe79560d8a5b1f", - "sha256:c5aeb095ba226391d337dd42a6f9470d86c9fc236ecc71cfc7cd1942b45010c6" + "sha256:25006c7e68b75aaa5e6b9c6a420ece22e8d7daec4b7a906ffd3a8607b67c037b", + "sha256:ef1b3d54eb715825657cd4bb3cb42bb4dc85087bac14c56b0fd8c21abd968c9a" ], "index": "pypi", - "version": "==2.3.1" + "markers": "python_version >= '3.6'", + "version": "==2.7.1" }, "six": { "hashes": [ @@ -1616,6 +1651,7 @@ "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.5.0" }, "statsmodels": { @@ -1646,6 +1682,7 @@ "sha256:f870d14a587ea58a3b596aa994c2ed889cc051f9e450e887d2c83656fc6a64bf" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==0.14.2" }, "trino": { @@ -1654,6 +1691,7 @@ "sha256:bd6454ef1c16cc630b4bccb73e975502b007ff9b96ddeb56711f6099ca41e3a2" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.328.0" }, "typing-extensions": { @@ -1661,7 +1699,7 @@ "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], - "markers": "python_version >= '3.8'", + "markers": "python_version < '3.13'", "version": "==4.12.2" }, "tzdata": { @@ -1698,12 +1736,11 @@ }, "urllib3": { "hashes": [ - "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3", - "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429" + "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", + "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" ], - "index": "pypi", - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.19" + "markers": "python_version >= '3.8'", + "version": "==2.2.2" }, "vine": { "hashes": [ @@ -1719,6 +1756,7 @@ "sha256:5eb5d78e730e1016e166b14a79a02d1b939cf1a58f2d559ff4f7c6f953284ebf" ], "index": "pypi", + "markers": "python_version >= '3.5'", "version": "==1.0.6" }, "wcwidth": { @@ -1730,11 +1768,12 @@ }, "whitenoise": { "hashes": [ - "sha256:8998f7370973447fac1e8ef6e8ded2c5209a7b1f67c1012866dbcd09681c3251", - "sha256:b1f9db9bf67dc183484d760b99f4080185633136a273a03f6436034a41064146" + "sha256:58c7a6cd811e275a6c91af22e96e87da0b1109e9a53bb7464116ef4c963bf636", + "sha256:a1ae85e01fdc9815d12fa33f17765bc132ed2c54fa76daf9e39e879dd93566f6" ], "index": "pypi", - "version": "==6.6.0" + "markers": "python_version >= '3.8'", + "version": "==6.7.0" } }, "develop": { @@ -1756,11 +1795,11 @@ }, "app-common-python": { "hashes": [ - "sha256:ae9e914d808d73be4694f53fcab88dcde19b1c4f0187b4a30909cd40cf12e939", - "sha256:d1a1f3f07ac51382d5986f79b1c2d47a063f0139889422db668431d1b9af0612" + "sha256:9388623371c7f4c04727b0057578d85bf6c7086961f10058370e023b7620eea3", + "sha256:bd3c7fea89eae26eac26d3ca5cddb45459408e0d84fa1b805d0a3c1ad782351b" ], "index": "pypi", - "version": "==0.2.6" + "version": "==0.2.7" }, "argh": { "hashes": [ @@ -1768,6 +1807,7 @@ "sha256:db1c34885804f7d4646c385dc2fb19b45298561322f4c15eae1b133993f9e323" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.31.2" }, "asgiref": { @@ -1784,15 +1824,16 @@ "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0" ], "index": "pypi", + "markers": "python_full_version >= '3.8.0'", "version": "==3.2.2" }, "azure-core": { "hashes": [ - "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f", - "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74" + "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472", + "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a" ], - "markers": "python_version >= '3.7'", - "version": "==1.30.1" + "markers": "python_version >= '3.8'", + "version": "==1.30.2" }, "azure-storage-blob": { "hashes": [ @@ -1800,6 +1841,7 @@ "sha256:eeb91256e41d4b5b9bad6a87fd0a8ade07dd58aa52344e2c8d2746e27a017d3b" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==12.20.0" }, "billiard": { @@ -1811,19 +1853,20 @@ }, "boto3": { "hashes": [ - "sha256:4460958d2b0c53bd2195b23ed5d45db2350e514486fe8caeb38b285b30742280", - "sha256:eeb11bca9b19d12baf93436fb8a16b8b824f1f7e8b9bcc722607e862c46b1b08" + "sha256:0b21b84db4619b3711a6f643d465a5a25e81231ee43615c55a20ff6b89c6cc3c", + "sha256:7cb697d67fd138ceebc6f789919ae370c092a50c6b0ccc4ef483027935502eab" ], "index": "pypi", - "version": "==1.34.114" + "markers": "python_version >= '3.8'", + "version": "==1.34.137" }, "botocore": { "hashes": [ - "sha256:5705f74fda009656a218ffaf4afd81228359160f2ab806ab8222d07e9da3a73b", - "sha256:606d1e55984d45e41a812badee292755f4db0233eed9cca63ea3bb8f5755507f" + "sha256:a980fa4adec4bfa23fff70a3512622e9412c69c791898a52cafc2458b0be6040", + "sha256:e29c8e9bfda0b20a1997792968e85868bfce42fefad9730f633a81adcff3f2ef" ], "markers": "python_version >= '3.8'", - "version": "==1.34.114" + "version": "==1.34.137" }, "cached-property": { "hashes": [ @@ -1838,6 +1881,7 @@ "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==5.3.3" }, "celery": { @@ -1851,11 +1895,11 @@ }, "certifi": { "hashes": [ - "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", - "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1" + "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516", + "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56" ], "markers": "python_version >= '3.6'", - "version": "==2024.2.2" + "version": "==2024.6.2" }, "cffi": { "hashes": [ @@ -2115,107 +2159,109 @@ }, "coverage": { "hashes": [ - "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523", - "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f", - "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d", - "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb", - "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0", - "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c", - "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98", - "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83", - "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8", - "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7", - "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac", - "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84", - "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb", - "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3", - "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884", - "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614", - "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd", - "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807", - "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd", - "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8", - "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc", - "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db", - "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0", - "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08", - "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232", - "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d", - "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a", - "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1", - "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286", - "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303", - "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341", - "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84", - "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45", - "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc", - "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec", - "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd", - "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155", - "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52", - "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d", - "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485", - "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31", - "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d", - "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d", - "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d", - "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85", - "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce", - "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb", - "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974", - "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24", - "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56", - "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9", - "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35" - ], - "index": "pypi", - "version": "==7.5.3" + "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f", + "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d", + "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747", + "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f", + "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d", + "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f", + "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47", + "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e", + "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba", + "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c", + "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b", + "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4", + "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7", + "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555", + "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233", + "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace", + "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805", + "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136", + "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4", + "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d", + "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806", + "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99", + "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8", + "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b", + "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5", + "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da", + "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0", + "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078", + "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f", + "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029", + "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353", + "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638", + "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9", + "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f", + "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7", + "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3", + "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e", + "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016", + "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088", + "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4", + "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882", + "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7", + "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53", + "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d", + "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080", + "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5", + "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d", + "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c", + "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8", + "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633", + "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9", + "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==7.5.4" }, "crc-bonfire": { "hashes": [ - "sha256:248b5700662b16a6c8e7574ee0169c2a73ebd11fb31d85e96fdd1ae50f760c03", - "sha256:4d533bdfe693804924a1da13c2d8a1ac82b5cdd588d8b339d1e334efee6ee787" + "sha256:aeaefcb7525febd27df7d66f5ab858d18abff3c708b1685b78faa1d15c9d388d", + "sha256:e8763976dbc52fa496daae821cb36c959bbd954c6a82b0cd3482d4e27381360c" ], "index": "pypi", - "version": "==5.8.0" + "markers": "python_version >= '3.6'", + "version": "==5.9.0" }, "cryptography": { "hashes": [ - "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55", - "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785", - "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b", - "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886", - "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82", - "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1", - "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda", - "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f", - "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68", - "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60", - "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7", - "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd", - "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582", - "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc", - "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858", - "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b", - "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2", - "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678", - "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13", - "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4", - "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8", - "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604", - "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477", - "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e", - "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a", - "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9", - "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14", - "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda", - "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da", - "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562", - "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2", - "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9" + "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad", + "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583", + "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b", + "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c", + "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1", + "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648", + "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949", + "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba", + "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c", + "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9", + "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d", + "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c", + "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e", + "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2", + "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d", + "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7", + "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70", + "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2", + "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7", + "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14", + "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe", + "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e", + "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71", + "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961", + "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7", + "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c", + "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28", + "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842", + "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902", + "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801", + "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a", + "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e" ], "markers": "python_version >= '3.7'", - "version": "==42.0.7" + "version": "==42.0.8" }, "cycler": { "hashes": [ @@ -2227,31 +2273,32 @@ }, "debugpy": { "hashes": [ - "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb", - "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146", - "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8", - "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242", - "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0", - "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741", - "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539", - "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23", - "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3", - "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39", - "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd", - "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9", - "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace", - "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42", - "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0", - "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7", - "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e", - "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234", - "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98", - "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703", - "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42", - "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099" - ], - "index": "pypi", - "version": "==1.8.1" + "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3", + "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9", + "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859", + "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755", + "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca", + "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad", + "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6", + "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02", + "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d", + "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835", + "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3", + "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2", + "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326", + "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a", + "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00", + "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634", + "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e", + "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031", + "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210", + "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa", + "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1", + "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.8.2" }, "distlib": { "hashes": [ @@ -2266,6 +2313,7 @@ "sha256:a17fcba2aad3fc7d46fdb23215095dbbd64e6174bf4589171e732b18b07e426a" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==4.2.13" }, "django-cprofile-middleware": { @@ -2277,27 +2325,29 @@ }, "faker": { "hashes": [ - "sha256:0158d47e955b6ec22134c0a74ebb7ed34fe600896208bafbf1008db831b17f04", - "sha256:bcbe31eee5ef4bbf87ce36c4eba53c01e2a1d912fde2a4d3528b430d2beb784f" + "sha256:0f60978314973de02c00474c2ae899785a42b2cf4f41b7987e93c132a2b8a4a9", + "sha256:886ee28219be96949cd21ecc96c4c742ee1680e77f687b095202c8def1a08f06" ], "index": "pypi", - "version": "==25.3.0" + "markers": "python_version >= '3.8'", + "version": "==26.0.0" }, "filelock": { "hashes": [ - "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f", - "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a" + "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb", + "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7" ], "markers": "python_version >= '3.8'", - "version": "==3.14.0" + "version": "==3.15.4" }, "flake8": { "hashes": [ - "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132", - "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3" + "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a", + "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5" ], "index": "pypi", - "version": "==7.0.0" + "markers": "python_full_version >= '3.8.1'", + "version": "==7.1.0" }, "flower": { "hashes": [ @@ -2310,75 +2360,80 @@ }, "fonttools": { "hashes": [ - "sha256:00d9abf4b400f98fb895566eb298f60432b4b29048e3dc02807427b09a06604e", - "sha256:05e4291db6af66f466a203d9922e4c1d3e18ef16868f76f10b00e2c3b9814df2", - "sha256:15efb2ba4b8c2d012ee0bb7a850c2e4780c530cc83ec8e843b2a97f8b3a5fd4b", - "sha256:1dc626de4b204d025d029e646bae8fdbf5acd9217158283a567f4b523fda3bae", - "sha256:21921e5855c399d10ddfc373538b425cabcf8b3258720b51450909e108896450", - "sha256:309b617942041073ffa96090d320b99d75648ed16e0c67fb1aa7788e06c834de", - "sha256:346d08ff92e577b2dc5a0c228487667d23fe2da35a8b9a8bba22c2b6ba8be21c", - "sha256:35af630404223273f1d7acd4761f399131c62820366f53eac029337069f5826a", - "sha256:46cc5d06ee05fd239c45d7935aaffd060ee773a88b97e901df50478247472643", - "sha256:4b0b9eb0f55dce9c7278ad4175f1cbaed23b799dce5ecc20e3213da241584140", - "sha256:4b419207e53db1599b3d385afd4bca6692c219d53732890d0814a2593104d0e2", - "sha256:4c3ad89204c2d7f419436f1d6fde681b070c5e20b888beb57ccf92f640628cc9", - "sha256:52f6001814ec5e0c961cabe89642f7e8d7e07892b565057aa526569b9ebb711c", - "sha256:5ecb88318ff249bd2a715e7aec36774ce7ae3441128007ef72a39a60601f4a8f", - "sha256:70d87f2099006304d33438bdaa5101953b7e22e23a93b1c7b7ed0f32ff44b423", - "sha256:73ba38b98c012957940a04d9eb5439b42565ac892bba8cfc32e10d88e73921fe", - "sha256:7467161f1eed557dbcec152d5ee95540200b1935709fa73307da16bc0b7ca361", - "sha256:7dccf4666f716e5e0753f0fa28dad2f4431154c87747bc781c838b8a5dca990e", - "sha256:859399b7adc8ac067be8e5c80ef4bb2faddff97e9b40896a9de75606a43d0469", - "sha256:8873d6edd1dae5c088dd3d61c9fd4dd80c827c486fa224d368233e7f33dc98af", - "sha256:890e7a657574610330e42dd1e38d3b9e0a8cb0eff3da080f80995460a256d3dd", - "sha256:89b53386214197bd5b3e3c753895bad691de84726ced3c222a59cde1dd12d57b", - "sha256:8b186cd6b8844f6cf04a7e0a174bc3649d3deddbfc10dc59846a4381f796d348", - "sha256:9180775c9535389a665cae7c5282f8e07754beabf59b66aeba7f6bfeb32a3652", - "sha256:95e8a5975d08d0b624a14eec0f987e204ad81b480e24c5436af99170054434b8", - "sha256:9725687db3c1cef13c0f40b380c3c15bea0113f4d0231b204d58edd5f2a53d90", - "sha256:9a5d1b0475050056d2e3bc378014f2ea2230e8ae434eeac8dfb182aa8efaf642", - "sha256:9ed23a03b7d9f0e29ca0679eafe5152aeccb0580312a3fc36f0662e178b4791b", - "sha256:a4daf2751a98c69d9620717826ed6c5743b662ef0ae7bb33dc6c205425e48eba", - "sha256:a64e72d2c144630e017ac9c1c416ddf8ac43bef9a083bf81fe08c0695f0baa95", - "sha256:a791f002d1b717268235cfae7e4957b7fd132e92e2c5400e521bf191f1b3a9a5", - "sha256:b4cba644e2515d685d4ee3ca2fbb5d53930a0e9ec2cf332ed704dc341b145878", - "sha256:b9a22cf1adaae7b2ba2ed7d8651a4193a4f348744925b4b740e6b38a94599c5b", - "sha256:bb7d206fa5ba6e082ba5d5e1b7107731029fc3a55c71c48de65121710d817986", - "sha256:cf694159528022daa71b1777cb6ec9e0ebbdd29859f3e9c845826cafaef4ca29", - "sha256:d0184aa88865339d96f7f452e8c5b621186ef7638744d78bf9b775d67e206819", - "sha256:d272c7e173c3085308345ccc7fb2ad6ce7f415d777791dd6ce4e8140e354d09c", - "sha256:d2cc7906bc0afdd2689aaf88b910307333b1f936262d1d98f25dbf8a5eb2e829", - "sha256:e03dae26084bb3632b4a77b1cd0419159d2226911aff6dc4c7e3058df68648c6", - "sha256:e176249292eccd89f81d39f514f2b5e8c75dfc9cef8653bdc3021d06697e9eff", - "sha256:ebb183ed8b789cece0bd6363121913fb6da4034af89a2fa5408e42a1592889a8", - "sha256:fb8cd6559f0ae3a8f5e146f80ab2a90ad0325a759be8d48ee82758a0b89fa0aa" - ], - "markers": "python_version >= '3.8'", - "version": "==4.52.4" + "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d", + "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64", + "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2", + "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4", + "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6", + "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b", + "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f", + "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380", + "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e", + "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749", + "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20", + "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0", + "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4", + "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5", + "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206", + "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9", + "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac", + "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1", + "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce", + "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4", + "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12", + "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca", + "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d", + "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068", + "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796", + "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec", + "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea", + "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f", + "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005", + "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2", + "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06", + "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109", + "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002", + "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9", + "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a", + "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68", + "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6", + "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161", + "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd", + "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d", + "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee", + "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af" + ], + "markers": "python_version >= '3.8'", + "version": "==4.53.0" }, "google-api-core": { + "extras": [ + "grpc" + ], "hashes": [ - "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251", - "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10" + "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125", + "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd" ], "markers": "python_version >= '3.7'", - "version": "==2.19.0" + "version": "==2.19.1" }, "google-auth": { "hashes": [ - "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", - "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415" + "sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23", + "sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871" ], "index": "pypi", - "version": "==2.29.0" + "markers": "python_version >= '3.7'", + "version": "==2.31.0" }, "google-cloud-bigquery": { "hashes": [ - "sha256:4b4597f9291b42102c9667d3b4528f801d4c8f24ef2b12dd1ecb881273330955", - "sha256:9fb72884fdbec9c4643cea6b7f21e1ecf3eb61d5305f87493d271dc801647a9e" + "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509", + "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9" ], "index": "pypi", - "version": "==3.23.1" + "markers": "python_version >= '3.7'", + "version": "==3.25.0" }, "google-cloud-core": { "hashes": [ @@ -2390,11 +2445,12 @@ }, "google-cloud-storage": { "hashes": [ - "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", - "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f" + "sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388", + "sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1" ], "index": "pypi", - "version": "==2.16.0" + "markers": "python_version >= '3.7'", + "version": "==2.17.0" }, "google-crc32c": { "hashes": [ @@ -2472,19 +2528,19 @@ }, "google-resumable-media": { "hashes": [ - "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b", - "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08" + "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c", + "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33" ], "markers": "python_version >= '3.7'", - "version": "==2.7.0" + "version": "==2.7.1" }, "googleapis-common-protos": { "hashes": [ - "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", - "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632" + "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945", + "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87" ], "markers": "python_version >= '3.7'", - "version": "==1.63.0" + "version": "==1.63.2" }, "gql": { "hashes": [ @@ -2502,61 +2558,61 @@ }, "grpcio": { "hashes": [ - "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5", - "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e", - "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145", - "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e", - "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91", - "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812", - "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2", - "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c", - "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1", - "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590", - "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0", - "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c", - "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21", - "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618", - "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd", - "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004", - "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4", - "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db", - "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396", - "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b", - "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f", - "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553", - "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0", - "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa", - "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b", - "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c", - "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d", - "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5", - "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c", - "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106", - "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa", - "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e", - "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231", - "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba", - "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851", - "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e", - "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b", - "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9", - "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d", - "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d", - "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c", - "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a", - "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48", - "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d", - "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1", - "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150" - ], - "version": "==1.64.0" + "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040", + "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122", + "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9", + "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f", + "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd", + "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d", + "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33", + "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762", + "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294", + "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650", + "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b", + "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad", + "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1", + "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff", + "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59", + "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4", + "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027", + "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502", + "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae", + "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61", + "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb", + "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa", + "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5", + "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1", + "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9", + "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90", + "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b", + "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179", + "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e", + "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a", + "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489", + "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d", + "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a", + "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2", + "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd", + "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb", + "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61", + "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca", + "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6", + "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602", + "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367", + "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62", + "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d", + "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd", + "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22", + "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309" + ], + "version": "==1.64.1" }, "grpcio-status": { "hashes": [ - "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f", - "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a" + "sha256:2ec6e0777958831484a517e32b6ffe0a4272242eae81bff2f5c3707fa58b40e3", + "sha256:c50bd14eb6506d8580a6c553bea463d7c08499b2c0e93f6d1864c5e8eabb1066" ], - "version": "==1.62.2" + "version": "==1.64.1" }, "humanize": { "hashes": [ @@ -2582,14 +2638,6 @@ "markers": "python_version >= '3.5'", "version": "==3.7" }, - "importlib-resources": { - "hashes": [ - "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c", - "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145" - ], - "markers": "python_version < '3.10'", - "version": "==6.4.0" - }, "isodate": { "hashes": [ "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96", @@ -2732,11 +2780,11 @@ }, "koku-nise": { "hashes": [ - "sha256:2cd97ad205ba4fd3e2610e34e3570b9b2f6c0ac0238fab203bb44332ed3f9ce9", - "sha256:75268c8f789ffe47aeb1b2919ffd763ccee0ef5c6d30a468b835502d70a41b3a" + "sha256:159d0cf568869fb436262d8adeada179fdc942382635dc8dad49b2bd23dfc6dc", + "sha256:7b20c2397f86cb3cd3de8e6c28ff6ace209310163a8e6328c30572132d57d7a3" ], "index": "pypi", - "version": "==4.5.2" + "version": "==4.5.6" }, "kombu": { "hashes": [ @@ -2749,11 +2797,11 @@ }, "kubernetes": { "hashes": [ - "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e", - "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459" + "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc", + "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d" ], "markers": "python_version >= '3.6'", - "version": "==29.0.0" + "version": "==30.1.0" }, "markupsafe": { "hashes": [ @@ -2854,6 +2902,7 @@ "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==3.9.0" }, "mccabe": { @@ -2866,11 +2915,12 @@ }, "model-bakery": { "hashes": [ - "sha256:c894ecea99d4b24a790c10f02140f695130cdba13e834296263e542e543b32ed", - "sha256:d44c36ea9891323c5c5eaa62224a2bc4daa9f5c31facdd54715d5d89ab6fbdb4" + "sha256:8f8ab4ba26a206ed848da9b1740b5006b5eeca8a67389efb28dbff37b362e802", + "sha256:fd13a251d20db78b790d80f75350a73af5d199e5151227b5dd35cb76f2f08fe8" ], "index": "pypi", - "version": "==1.18.0" + "markers": "python_version >= '3.8'", + "version": "==1.18.2" }, "multidict": { "hashes": [ @@ -2970,11 +3020,11 @@ }, "nodeenv": { "hashes": [ - "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1", - "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a" + "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", + "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==1.9.0" + "version": "==1.9.1" }, "numpy": { "hashes": [ @@ -3015,7 +3065,6 @@ "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f" ], - "index": "pypi", "markers": "platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'x86_64'", "version": "==1.26.4" }, @@ -3029,11 +3078,11 @@ }, "oci": { "hashes": [ - "sha256:4bc8f3265e2ca68cf85f25f4da05678d638f1d713e2a37309e86ab76e4687d45", - "sha256:5f173539118751f0a29f418077557d57cc99b2d3195392843be497db243417de" + "sha256:1b6ba5fb87d98f20ef3389d3d25906f71f32f12fff57c0923eecacb0efa49eaa", + "sha256:e014058d05c6512fd316099f6b6de635d081a32907893c5bb80461078499c6f2" ], "index": "pypi", - "version": "==2.127.0" + "version": "==2.129.1" }, "ocviapy": { "hashes": [ @@ -3045,11 +3094,11 @@ }, "packaging": { "hashes": [ - "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", - "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" ], - "markers": "python_version >= '3.7'", - "version": "==24.0" + "markers": "python_version >= '3.8'", + "version": "==24.1" }, "parsedatetime": { "hashes": [ @@ -3060,78 +3109,89 @@ }, "pillow": { "hashes": [ - "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c", - "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2", - "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb", - "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d", - "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa", - "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3", - "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1", - "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a", - "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd", - "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8", - "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999", - "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599", - "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936", - "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375", - "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d", - "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b", - "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60", - "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572", - "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3", - "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced", - "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f", - "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b", - "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19", - "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f", - "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d", - "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383", - "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795", - "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355", - "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57", - "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09", - "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b", - "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462", - "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf", - "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f", - "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a", - "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad", - "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9", - "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d", - "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45", - "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994", - "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d", - "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338", - "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463", - "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451", - "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591", - "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c", - "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd", - "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32", - "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9", - "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf", - "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5", - "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828", - "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3", - "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5", - "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2", - "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b", - "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2", - "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475", - "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3", - "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb", - "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef", - "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015", - "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002", - "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170", - "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84", - "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57", - "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f", - "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27", - "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a" - ], - "markers": "python_version >= '3.8'", - "version": "==10.3.0" + "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", + "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", + "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df", + "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", + "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", + "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d", + "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd", + "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", + "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908", + "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", + "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", + "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", + "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b", + "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", + "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a", + "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e", + "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", + "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", + "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b", + "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", + "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", + "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab", + "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", + "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", + "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", + "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", + "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", + "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", + "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", + "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", + "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", + "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", + "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", + "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0", + "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", + "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", + "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", + "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef", + "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680", + "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b", + "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", + "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", + "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", + "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", + "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8", + "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", + "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736", + "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", + "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126", + "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd", + "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5", + "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b", + "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", + "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b", + "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", + "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", + "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2", + "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c", + "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", + "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", + "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", + "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", + "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", + "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b", + "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", + "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3", + "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84", + "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1", + "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", + "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", + "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", + "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", + "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", + "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e", + "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", + "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", + "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", + "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27", + "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", + "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1" + ], + "markers": "python_version >= '3.8'", + "version": "==10.4.0" }, "platformdirs": { "hashes": [ @@ -3155,6 +3215,7 @@ "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5" ], "index": "pypi", + "markers": "python_version >= '3.9'", "version": "==3.7.1" }, "prometheus-client": { @@ -3176,28 +3237,28 @@ }, "proto-plus": { "hashes": [ - "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2", - "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c" + "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445", + "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12" ], - "markers": "python_version >= '3.6'", - "version": "==1.23.0" + "markers": "python_version >= '3.7'", + "version": "==1.24.0" }, "protobuf": { "hashes": [ - "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4", - "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8", - "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c", - "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d", - "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4", - "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa", - "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c", - "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019", - "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9", - "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c", - "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2" + "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505", + "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b", + "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38", + "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863", + "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470", + "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6", + "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce", + "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca", + "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5", + "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e", + "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714" ], "markers": "python_version >= '3.8'", - "version": "==4.25.3" + "version": "==5.27.2" }, "py": { "hashes": [ @@ -3225,11 +3286,11 @@ }, "pycodestyle": { "hashes": [ - "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f", - "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67" + "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c", + "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4" ], "markers": "python_version >= '3.8'", - "version": "==2.11.1" + "version": "==2.12.0" }, "pycparser": { "hashes": [ @@ -3245,6 +3306,7 @@ "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==6.3.0" }, "pyflakes": { @@ -3277,6 +3339,7 @@ "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.9.0.post0" }, "python-dotenv": { @@ -3349,15 +3412,17 @@ "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f" ], "index": "pypi", + "markers": "python_version >= '3.6'", "version": "==6.0.1" }, "requests": { "hashes": [ - "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", - "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289", + "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c" ], "index": "pypi", - "version": "==2.32.3" + "markers": "python_version >= '3.8'", + "version": "==2.32.2" }, "requests-mock": { "hashes": [ @@ -3365,6 +3430,7 @@ "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401" ], "index": "pypi", + "markers": "python_version >= '3.5'", "version": "==1.12.1" }, "requests-oauthlib": { @@ -3377,11 +3443,12 @@ }, "responses": { "hashes": [ - "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66", - "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a" + "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb", + "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba" ], "index": "pypi", - "version": "==0.25.0" + "markers": "python_version >= '3.8'", + "version": "==0.25.3" }, "rsa": { "hashes": [ @@ -3393,19 +3460,19 @@ }, "s3transfer": { "hashes": [ - "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19", - "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d" + "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6", + "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69" ], "markers": "python_version >= '3.8'", - "version": "==0.10.1" + "version": "==0.10.2" }, "sh": { "hashes": [ - "sha256:9b2998f313f201c777e2c0061f0b1367497097ef13388595be147e2a00bf7ba1", - "sha256:ced8f2e081a858b66a46ace3703dec243779abbd5a1887ba7e3c34f34da70cd2" + "sha256:029d45198902bfb967391eccfd13a88d92f7cebd200411e93f99ebacc6afbb35", + "sha256:2f2f79a65abd00696cf2e9ad26508cf8abb6dba5745f40255f1c0ded2876926d" ], "markers": "python_version < '4.0' and python_full_version >= '3.8.1'", - "version": "==2.0.6" + "version": "==2.0.7" }, "six": { "hashes": [ @@ -3421,6 +3488,7 @@ "sha256:7bfd00be7ae147eb4a170a471578e1cd3f41f803238958b6b8efcf2c698a6aa9" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.2.0" }, "snowballstemmer": { @@ -3436,6 +3504,7 @@ "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==0.5.0" }, "tabulate": { @@ -3452,16 +3521,9 @@ "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==3.0.0" }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11' and python_version >= '3.7'", - "version": "==2.0.1" - }, "tornado": { "hashes": [ "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", @@ -3476,7 +3538,6 @@ "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4" ], - "index": "pypi", "markers": "python_version >= '3.8'", "version": "==6.4.1" }, @@ -3486,31 +3547,40 @@ "sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640" ], "index": "pypi", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==3.28.0" }, "typing-extensions": { "hashes": [ - "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8", - "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594" + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" ], - "markers": "python_version >= '3.8'", - "version": "==4.12.0" + "markers": "python_version < '3.13'", + "version": "==4.12.2" }, "urllib3": { "hashes": [ - "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", - "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0" + "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", + "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.18" + "markers": "python_version >= '3.8'", + "version": "==2.2.2" + }, + "vine": { + "hashes": [ + "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", + "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0" + ], + "markers": "python_version >= '3.6'", + "version": "==5.1.0" }, "virtualenv": { "hashes": [ - "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c", - "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b" + "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a", + "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589" ], "markers": "python_version >= '3.7'", - "version": "==20.26.2" + "version": "==20.26.3" }, "wait-for": { "hashes": [ @@ -3556,6 +3626,7 @@ "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5" ], "index": "pypi", + "markers": "python_version >= '3.8'", "version": "==4.0.1" }, "wcwidth": { @@ -3668,14 +3739,6 @@ ], "markers": "python_version >= '3.7'", "version": "==1.9.4" - }, - "zipp": { - "hashes": [ - "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee", - "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec" - ], - "markers": "python_version < '3.10'", - "version": "==3.19.0" } } } diff --git a/README.md b/README.md index ffd97be5ba..ac2062aa7e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ To submit an issue please visit [https://issues.redhat.com/projects/COST/](). ## Getting Started -This project is developed using Python 3.9. Make sure you have at least this version installed. +This project is developed using Python 3.11. Make sure you have at least this version installed. ### Prerequisites From bdbbfee2d60afa9a82ab751e7f286401dfae9384 Mon Sep 17 00:00:00 2001 From: David Nakabaale Date: Wed, 3 Jul 2024 10:26:08 -0400 Subject: [PATCH 26/26] [COST-5228] log outside for loop (#5202) * [COST-5228] log outside for loop * additional log clean up * add context to logs in _remove_expired_data func --- koku/masu/processor/_tasks/remove_expired.py | 15 ++-- .../processor/aws/aws_report_db_cleaner.py | 40 +++++------ .../azure/azure_report_db_cleaner.py | 28 ++++---- koku/masu/processor/expired_data_remover.py | 17 ++--- .../processor/gcp/gcp_report_db_cleaner.py | 28 ++++---- .../processor/oci/oci_report_db_cleaner.py | 30 ++++---- .../processor/ocp/ocp_report_db_cleaner.py | 31 +++++---- koku/masu/test/processor/test_tasks.py | 68 +++++++++++++++++-- 8 files changed, 160 insertions(+), 97 deletions(-) diff --git a/koku/masu/processor/_tasks/remove_expired.py b/koku/masu/processor/_tasks/remove_expired.py index b1160ae34e..da52687ac2 100644 --- a/koku/masu/processor/_tasks/remove_expired.py +++ b/koku/masu/processor/_tasks/remove_expired.py @@ -5,6 +5,7 @@ """Remove expired data asynchronous tasks.""" import logging +from api.common import log_json from masu.processor.expired_data_remover import ExpiredDataRemover LOG = logging.getLogger(__name__) @@ -23,17 +24,13 @@ def _remove_expired_data(schema_name, provider, simulate, provider_uuid=None): None """ - log_statement = ( - f"Remove expired data:\n" - f" schema_name: {schema_name}\n" - f" provider: {provider}\n" - f" simulate: {simulate}\n" - ) - LOG.info(log_statement) + + context = {"schema": schema_name, "provider_type": provider, "provider_uuid": provider_uuid, "simulate": simulate} + + LOG.info(log_json(msg="Remove expired data", context=context)) remover = ExpiredDataRemover(schema_name, provider) removed_data = remover.remove(simulate=simulate, provider_uuid=provider_uuid) if removed_data: status_msg = "Expired Data" if simulate else "Removed Data" - result_msg = f"{status_msg}:\n {str(removed_data)}" - LOG.info(result_msg) + LOG.info(log_json(msg=status_msg, removed_data=removed_data, context=context)) diff --git a/koku/masu/processor/aws/aws_report_db_cleaner.py b/koku/masu/processor/aws/aws_report_db_cleaner.py index d63808c9ef..2a2ad68e44 100644 --- a/koku/masu/processor/aws/aws_report_db_cleaner.py +++ b/koku/masu/processor/aws/aws_report_db_cleaner.py @@ -58,7 +58,7 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul removed_items = [] all_account_ids = set() - all_period_start = set() + all_period_starts = set() with AWSReportDBAccessor(self._schema) as accessor: if (expired_date is None and provider_uuid is None) or ( # noqa: W504 @@ -85,16 +85,16 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul } ) all_account_ids.add(bill.payer_account_id) - all_period_start.add(str(bill.billing_period_start)) - - LOG.info( - log_json( - msg="deleting provider billing data for AWS", - schema=self._schema, - provider_uuid=bill.payer_account_id, - start_date=bill.billing_period_start, - ) + all_period_starts.add(str(bill.billing_period_start)) + + LOG.info( + log_json( + msg="deleting provider billing data for AWS", + schema=self._schema, + accounts=all_account_ids, + period_starts=all_period_starts, ) + ) if not simulate: cascade_delete(bill_objects.query.model, bill_objects) @@ -105,7 +105,7 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): partition_from = str(date(expired_date.year, expired_date.month, 1)) removed_items = [] all_account_ids = set() - all_period_start = set() + all_period_starts = set() with AWSReportDBAccessor(self._schema) as accessor: all_bill_objects = accessor.get_bill_query_before_date(expired_date).all() @@ -114,16 +114,16 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): {"account_payer_id": bill.payer_account_id, "billing_period_start": str(bill.billing_period_start)} ) all_account_ids.add(bill.payer_account_id) - all_period_start.add(str(bill.billing_period_start)) - - LOG.info( - log_json( - msg="deleting provider billing data for AWS", - schema=self._schema, - provider_uuid=bill.payer_account_id, - start_date=bill.billing_period_start, - ) + all_period_starts.add(str(bill.billing_period_start)) + + LOG.info( + log_json( + msg="deleting provider billing data for AWS", + schema=self._schema, + accounts=all_account_ids, + period_starts=all_period_starts, ) + ) table_names = [ accessor._table_map["ocp_on_aws_daily_summary"], diff --git a/koku/masu/processor/azure/azure_report_db_cleaner.py b/koku/masu/processor/azure/azure_report_db_cleaner.py index 69db2d87e9..8c940ffe1c 100644 --- a/koku/masu/processor/azure/azure_report_db_cleaner.py +++ b/koku/masu/processor/azure/azure_report_db_cleaner.py @@ -77,14 +77,14 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + LOG.info( + log_json( + msg="deleting provider billing data", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) if not simulate: cascade_delete(bill_objects.query.model, bill_objects) @@ -117,14 +117,14 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + LOG.info( + log_json( + msg="deleting provider billing data", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) if not simulate: # Will call trigger to detach, truncate, and drop partitions diff --git a/koku/masu/processor/expired_data_remover.py b/koku/masu/processor/expired_data_remover.py index b96d9e66b4..3dfefb69ac 100644 --- a/koku/masu/processor/expired_data_remover.py +++ b/koku/masu/processor/expired_data_remover.py @@ -9,6 +9,7 @@ from django.conf import settings +from api.common import log_json from api.models import Provider from api.utils import DateHelper from masu.config import Config @@ -138,10 +139,11 @@ def remove(self, simulate=False, provider_uuid=None): if not simulate: manifest_accessor.purge_expired_report_manifest_provider_uuid(provider_uuid, expiration_date) LOG.info( - """Removed CostUsageReportManifest for - provider uuid: %s before billing period: %s""", - provider_uuid, - expiration_date, + log_json( + msg="Removed CostUsageReportManifest", + provider_uuid=provider_uuid, + expiration_date=expiration_date, + ) ) else: expiration_date = self._calculate_expiration_date() @@ -151,10 +153,9 @@ def remove(self, simulate=False, provider_uuid=None): if not simulate: manifest_accessor.purge_expired_report_manifest(self._provider, expiration_date) LOG.info( - """Removed CostUsageReportManifest for - provider type: %s before billing period: %s""", - self._provider, - expiration_date, + log_json( + msg="Removed CostUsageReportManifest", provider=self._provider, expiration_date=expiration_date + ) ) return removed_data diff --git a/koku/masu/processor/gcp/gcp_report_db_cleaner.py b/koku/masu/processor/gcp/gcp_report_db_cleaner.py index 5ef5df28bc..6287805d4b 100644 --- a/koku/masu/processor/gcp/gcp_report_db_cleaner.py +++ b/koku/masu/processor/gcp/gcp_report_db_cleaner.py @@ -74,14 +74,14 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data for GCP", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + LOG.info( + log_json( + msg="deleting provider billing data for GCP", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) if not simulate: cascade_delete(bill_objects.query.model, bill_objects) @@ -132,13 +132,13 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data for GCP", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + LOG.info( + log_json( + msg="deleting provider billing data for GCP", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) return removed_items diff --git a/koku/masu/processor/oci/oci_report_db_cleaner.py b/koku/masu/processor/oci/oci_report_db_cleaner.py index d14ee1a0dc..03d059358f 100644 --- a/koku/masu/processor/oci/oci_report_db_cleaner.py +++ b/koku/masu/processor/oci/oci_report_db_cleaner.py @@ -73,14 +73,15 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul ) all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + + LOG.info( + log_json( + msg="deleting provider billing data", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) if not simulate: cascade_delete(bill_objects.query.model, bill_objects) @@ -128,13 +129,14 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): ) all_providers.add(bill.provider_id) all_period_starts.add(str(bill.billing_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data", - schema=self._schema, - provider_uuid=bill.provider_id, - start_date=bill.billing_period_start, - ) + + LOG.info( + log_json( + msg="deleting provider billing data", + schema=self._schema, + providers=all_providers, + period_starts=all_period_starts, ) + ) return removed_items diff --git a/koku/masu/processor/ocp/ocp_report_db_cleaner.py b/koku/masu/processor/ocp/ocp_report_db_cleaner.py index 61d1a3f204..b905247cb6 100644 --- a/koku/masu/processor/ocp/ocp_report_db_cleaner.py +++ b/koku/masu/processor/ocp/ocp_report_db_cleaner.py @@ -73,14 +73,16 @@ def purge_expired_report_data(self, expired_date=None, provider_uuid=None, simul all_cluster_ids.add(usage_period.cluster_id) all_period_starts.add(str(usage_period.report_period_start)) - LOG.info( - log_json( - msg="deleting provider billing data", - schema=self._schema, - provider_uuid=provider_uuid, - start_date=usage_period.report_period_start, - ) + LOG.info( + log_json( + msg="deleting provider billing data", + schema=self._schema, + provider_uuid=provider_uuid, + report_periods=all_report_periods, + cluster_ids=all_cluster_ids, + period_starts=all_period_starts, ) + ) if not simulate: cascade_delete(usage_period_objs.query.model, usage_period_objs) @@ -115,14 +117,15 @@ def purge_expired_report_data_by_date(self, expired_date, simulate=False): all_cluster_ids.add(usage_period.cluster_id) all_period_starts.add(str(usage_period.report_period_start)) - LOG.info( - log_json( - msg="removing all data related to cluster_ids", - cluster_ids=all_cluster_ids, - period_starts=all_period_starts, - schema=self._schema, - ) + LOG.info( + log_json( + msg="removing all data related to cluster_ids", + report_periods=all_report_periods, + cluster_ids=all_cluster_ids, + period_starts=all_period_starts, + schema=self._schema, ) + ) if not simulate: # Will call trigger to detach, truncate, and drop partitions diff --git a/koku/masu/test/processor/test_tasks.py b/koku/masu/test/processor/test_tasks.py index 275acc9706..e54d273140 100644 --- a/koku/masu/test/processor/test_tasks.py +++ b/koku/masu/test/processor/test_tasks.py @@ -645,18 +645,78 @@ class TestRemoveExpiredDataTasks(MasuTestCase): """Test cases for Processor Celery tasks.""" @patch.object(ExpiredDataRemover, "remove") - def test_remove_expired_data(self, fake_remover): + def test_remove_expired_data_simulate(self, fake_remover): """Test task.""" expected_results = [{"account_payer_id": "999999999", "billing_period_start": "2018-06-24 15:47:33.052509"}] fake_remover.return_value = expected_results - expected = "INFO:masu.processor._tasks.remove_expired:Expired Data:\n {}" + schema = self.schema + provider = Provider.PROVIDER_AWS + simulate = True + + expected_initial_remove_log = ( + "INFO:masu.processor._tasks.remove_expired:" + "{'message': 'Remove expired data', 'tracing_id': '', " + "'schema': '" + schema + "', " + "'provider_type': '" + provider + "', " + "'provider_uuid': " + str(None) + ", " + "'simulate': " + str(simulate) + "}" + ) + + expected_expired_data_log = ( + "INFO:masu.processor._tasks.remove_expired:" + "{'message': 'Expired Data', 'tracing_id': '', " + "'schema': '" + schema + "', " + "'provider_type': '" + provider + "', " + "'provider_uuid': " + str(None) + ", " + "'simulate': " + str(simulate) + ", " + "'removed_data': " + str(expected_results) + "}" + ) + + # disable logging override set in masu/__init__.py + logging.disable(logging.NOTSET) + with self.assertLogs("masu.processor._tasks.remove_expired") as logger: + remove_expired_data(schema_name=schema, provider=provider, simulate=simulate) + + self.assertIn(expected_initial_remove_log, logger.output) + self.assertIn(expected_expired_data_log, logger.output) + + @patch.object(ExpiredDataRemover, "remove") + def test_remove_expired_data_no_simulate(self, fake_remover): + """Test task.""" + expected_results = [{"account_payer_id": "999999999", "billing_period_start": "2018-06-24 15:47:33.052509"}] + fake_remover.return_value = expected_results + + schema = self.schema + provider = Provider.PROVIDER_AWS + simulate = False + + expected_initial_remove_log = ( + "INFO:masu.processor._tasks.remove_expired:" + "{'message': 'Remove expired data', 'tracing_id': '', " + "'schema': '" + schema + "', " + "'provider_type': '" + provider + "', " + "'provider_uuid': " + str(None) + ", " + "'simulate': " + str(simulate) + "}" + ) + + expected_expired_data_log = ( + "INFO:masu.processor._tasks.remove_expired:" + "{'message': 'Expired Data', 'tracing_id': '', " + "'schema': '" + schema + "', " + "'provider_type': '" + provider + "', " + "'provider_uuid': " + str(None) + ", " + "'simulate': " + str(simulate) + ", " + "'removed_data': " + str(expected_results) + "}" + ) # disable logging override set in masu/__init__.py logging.disable(logging.NOTSET) with self.assertLogs("masu.processor._tasks.remove_expired") as logger: - remove_expired_data(schema_name=self.schema, provider=Provider.PROVIDER_AWS, simulate=True) - self.assertIn(expected.format(str(expected_results)), logger.output) + remove_expired_data(schema_name=schema, provider=provider, simulate=simulate) + + self.assertIn(expected_initial_remove_log, logger.output) + self.assertNotIn(expected_expired_data_log, logger.output) class TestUpdateSummaryTablesTask(MasuTestCase):