-
Notifications
You must be signed in to change notification settings - Fork 85
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'improvement/ZENKO-4879' into w/2.7/improvement/ZENKO-4879
- Loading branch information
Showing
12 changed files
with
222 additions
and
107 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -63,3 +63,18 @@ jobs: | |
replicas=3 | ||
quorum=3 | ||
github_token: ${{ secrets.GIT_ACCESS_TOKEN }} | ||
|
||
- name: Render and test dr alerts | ||
uses: scality/[email protected] | ||
with: | ||
alert_file_path: monitoring/dr/alerts.yaml | ||
test_file_path: monitoring/dr/alerts.test.yaml | ||
alert_inputs: | | ||
namespace=zenko | ||
kafka_connect_src_job=artesca-data-dr-source-base-queue-connector-metrics | ||
kafka_connect_sink_job=artesca-data-dr-base-queue-connector-metrics | ||
dr_sink_instance=artesca-data-dr | ||
rto_alert_threshold=30 | ||
mongo_jobs=zenko/data-db-mongodb-sharded-shard.* | ||
lifecycle_jobs=artesca-data-backbeat-lifecycle-.*-headless | ||
github_token: ${{ secrets.GIT_ACCESS_TOKEN }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -265,26 +265,7 @@ jobs: | |
|
||
- name: Extract environment | ||
run: |- | ||
sudo curl --fail -L https://github.com/mikefarah/yq/releases/download/${{ env.YQ_VERSION}}/${{ env.YQ_BINARY }} -o /usr/bin/yq | ||
sudo chmod +x /usr/bin/yq | ||
get_image_from_deps() { | ||
local dep_name=$1 | ||
yq eval ".$dep_name | (.sourceRegistry // \"docker.io\") + \"/\" + .image" deps.yaml | ||
} | ||
cd solution/ | ||
cat <<EOF >> $GITHUB_ENV | ||
scala_version=$(yq eval '.kafka.tag | split("-").[0]' deps.yaml) | ||
kafka_version=$(yq eval '.kafka.tag | split("-").[1]' deps.yaml) | ||
KAFKA_IMAGE=$(get_image_from_deps kafka) | ||
KAFKA_TAG=$(yq eval '.kafka.tag' deps.yaml) | ||
KAFKA_CONNECT_IMAGE=$(get_image_from_deps kafka-connect) | ||
KAFKA_CONNECT_TAG=$(yq eval '.kafka-connect.tag' deps.yaml) | ||
JMX_JAVAAGENT_IMAGE=$(get_image_from_deps jmx-javaagent) | ||
JMX_JAVAAGENT_TAG=$(yq eval '.jmx-javaagent.tag' deps.yaml) | ||
MONGODB_CONNECTOR_TAG=$(yq eval '.mongodb-connector.tag' deps.yaml) | ||
EOF | ||
solution/kafka_build_vars.sh >> $GITHUB_ENV | ||
- name: Check kafka & kafka-connect versions match | ||
run: |- | ||
|
@@ -298,9 +279,9 @@ jobs: | |
build-args: |- | ||
scala_version=${{ env.scala_version }} | ||
kafka_version=${{ env.kafka_version }} | ||
tags: "${{ env.KAFKA_IMAGE }}:${{ env.KAFKA_TAG }}" | ||
cache-from: type=gha,scope=$GITHUB_REF_NAME-kafka | ||
cache-to: type=gha,mode=max,scope=$GITHUB_REF_NAME-kafka | ||
tags: "${{ env.KAFKA_IMAGE }}:${{ env.KAFKA_TAG }}-${{ env.BUILD_TREE_HASH }}" | ||
cache-from: type=gha,scope=kafka-${{ env.KAFKA_TAG }} | ||
cache-to: type=gha,mode=max,scope=kafka-${{ env.KAFKA_TAG }} | ||
|
||
- name: Build and push kafka-connect | ||
uses: docker/build-push-action@v5 | ||
|
@@ -311,11 +292,11 @@ jobs: | |
JMX_JAVAAGENT_IMAGE=${{ env.JMX_JAVAAGENT_IMAGE }} | ||
JMX_JAVAAGENT_TAG=${{ env.JMX_JAVAAGENT_TAG }} | ||
KAFKA_IMAGE=${{ env.KAFKA_IMAGE }} | ||
KAFKA_TAG=${{ env.KAFKA_TAG }} | ||
KAFKA_TAG=${{ env.KAFKA_TAG }}-${{ env.BUILD_TREE_HASH }} | ||
MONGODB_CONNECTOR_TAG=${{ env.MONGODB_CONNECTOR_TAG }} | ||
tags: "${{ env.KAFKA_CONNECT_IMAGE }}:${{ env.KAFKA_CONNECT_TAG }}" | ||
cache-from: type=gha,scope=$GITHUB_REF_NAME-kafka-connect | ||
cache-to: type=gha,mode=max,scope=$GITHUB_REF_NAME-kafka-connect | ||
tags: "${{ env.KAFKA_CONNECT_IMAGE }}:${{ env.KAFKA_CONNECT_TAG }}-${{ env.BUILD_TREE_HASH }}" | ||
cache-from: type=gha,scope=kafka-connect-${{ env.KAFKA_CONNECT_TAG }} | ||
cache-to: type=gha,mode=max,scope=kafka-connect-${{ env.KAFKA_CONNECT_TAG }} | ||
|
||
build-test-image: | ||
runs-on: ubuntu-20.04 | ||
|
@@ -738,35 +719,7 @@ jobs: | |
source: /tmp/artifacts | ||
if: always() | ||
|
||
write-final-failed-status: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- check-dashboard-versions | ||
- build-doc | ||
- build-iso | ||
- build-kafka | ||
- build-test-image | ||
- end2end-http | ||
- end2end-https | ||
- end2end-sharded | ||
- end2end-pra | ||
- ctst-end2end-sharded | ||
if: failure() | ||
steps: | ||
- name: write failure status | ||
run: | | ||
mkdir -p artifacts | ||
echo -n "FAILED" > artifacts/.final_status | ||
- name: Upload artifacts | ||
uses: scality/action-artifacts@v4 | ||
with: | ||
method: upload | ||
url: https://artifacts.scality.net | ||
user: ${{ secrets.ARTIFACTS_USER }} | ||
password: ${{ secrets.ARTIFACTS_PASSWORD }} | ||
source: artifacts | ||
|
||
write-final-success-status: | ||
write-final-status: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- check-dashboard-versions | ||
|
@@ -780,17 +733,11 @@ jobs: | |
- end2end-sharded | ||
- end2end-pra | ||
- ctst-end2end-sharded | ||
if: success() | ||
steps: | ||
- name: write success status | ||
run: | | ||
mkdir -p artifacts | ||
echo -n "SUCCESSFUL" > artifacts/.final_status | ||
- name: Upload artifacts | ||
uses: scality/action-artifacts@v4 | ||
- name: Upload final status | ||
if: always() | ||
uses: scality/actions/[email protected] | ||
with: | ||
method: upload | ||
url: https://artifacts.scality.net | ||
user: ${{ secrets.ARTIFACTS_USER }} | ||
password: ${{ secrets.ARTIFACTS_PASSWORD }} | ||
source: artifacts | ||
ARTIFACTS_USER: ${{ secrets.ARTIFACTS_USER }} | ||
ARTIFACTS_PASSWORD: ${{ secrets.ARTIFACTS_PASSWORD }} | ||
JOBS_RESULTS: ${{ join(needs.*.result) }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
# These tests are meant to go hand-in-hand with the rendered alert rule. | ||
# Use github.com/scality/action-prom-render-test@python-renderer python module | ||
# | ||
# Render the alerts file with | ||
# gen-alert render alerts.yaml | ||
evaluation_interval: 1m | ||
rule_files: | ||
- alerts.rendered.yaml | ||
|
||
tests: | ||
- interval: 1m | ||
input_series: | ||
- series: up{job="artesca-data-dr-source-base-queue-connector-metrics", namespace="zenko"} | ||
values: _x7 | ||
- series: up{drSinkInstance="artesca-data-dr", job="artesca-data-dr-base-queue-connector-metrics", namespace="zenko"} | ||
values: 1x7 | ||
- series: kafka_connect_task_error_total_record_errors{job="artesca-data-dr-source-base-queue-connector-metrics"} | ||
values: 0 1 2x5 | ||
- series: kafka_connect_task_error_total_record_failures{job="artesca-data-dr-source-base-queue-connector-metrics"} | ||
values: 0x7 | ||
- series: kafka_connect_task_error_total_record_errors{drSinkInstance="artesca-data-dr", job="artesca-data-dr-base-queue-connector-metrics"} | ||
values: 0 1 2x5 | ||
- series: kafka_connect_task_error_total_record_failures{drSinkInstance="artesca-data-dr", job="artesca-data-dr-base-queue-connector-metrics"} | ||
values: 0 1x6 | ||
- series: s3_lifecycle_last_timestamp_ms{ job="artesca-data-backbeat-lifecycle-.*-headless", namespace="zenko"} | ||
values: 10000000x3 | ||
- series: mongodb_ss_repl_lastWrite_lastWriteDate{drSinkInstance="artesca-data-dr", job="zenko/data-db-mongodb-sharded-shard.*", namespace="zenko"} | ||
values: 5000000x3 | ||
alert_rule_test: | ||
- alertname: DrResourcePausedWhileOtherRunning | ||
eval_time: 1m | ||
exp_alerts: | ||
- exp_labels: | ||
severity: warning | ||
exp_annotations: | ||
summary: 'DR Resource Paused While Other Running' | ||
description: 'One site''s DR resource is paused while the other is not. This could lead to data inconsistency between sites.' | ||
- alertname: DrResourcePausedForTooLong | ||
eval_time: 4m59s | ||
exp_alerts: [] | ||
- alertname: DrResourcePausedForTooLong | ||
eval_time: 5m | ||
exp_alerts: | ||
- exp_labels: | ||
severity: "warning" | ||
exp_annotations: | ||
summary: 'DR resource paused for too long' | ||
description: 'The DR resource has been paused for more than 5 minutes.' | ||
- alertname: DrResourcePausedForTooLong | ||
eval_time: 6m # Grace period check | ||
exp_alerts: | ||
- exp_labels: | ||
severity: "warning" | ||
exp_annotations: | ||
summary: 'DR resource paused for too long' | ||
description: 'The DR resource has been paused for more than 5 minutes.' | ||
- alertname: KafkaConnectOutageSource | ||
eval_time: 1m | ||
exp_alerts: [] | ||
- alertname: KafkaConnectOutageSource | ||
eval_time: 2m | ||
exp_alerts: | ||
- exp_labels: | ||
severity: critical | ||
exp_annotations: | ||
description: >- | ||
Kafka-connect on source is not working nominally. The rate of errors or failures has exceeded 0. This could lead DR to get out of sync if not addressed promptly. | ||
summary: 'Kafka Connect not working' | ||
- alertname: KafkaConnectOutageSink | ||
eval_time: 1m | ||
exp_alerts: [] | ||
- alertname: KafkaConnectOutageSink | ||
eval_time: 2m | ||
exp_alerts: | ||
- exp_labels: | ||
severity: critical | ||
exp_annotations: | ||
description: >- | ||
Kafka-connect on sink is not working nominally. The rate of errors or failures has exceeded 0. This could lead DR to get out of sync if not addressed promptly. | ||
summary: 'Kafka Connect not working' | ||
- alertname: WriteTimesLatency | ||
eval_time: 1m | ||
exp_alerts: | ||
- exp_labels: | ||
severity: critical | ||
exp_annotations: | ||
summary: 'Write times latency' | ||
description: 'The difference in write times between the source and protected sites is more than half of the Recovery Time Objective (12 hours). This could lead to data inconsistency between sites.' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.