Skip to content

Commit

Permalink
add grafonnet support (#641)
Browse files Browse the repository at this point in the history
  • Loading branch information
ebattat authored Aug 28, 2023
1 parent 1371c27 commit b3cf076
Show file tree
Hide file tree
Showing 30 changed files with 16,255 additions and 9,096 deletions.
82 changes: 82 additions & 0 deletions .github/workflows/Nightly_Perf_Env_CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ jobs:
if: always()
run: |
if [[ "${{ job.status }}" == "failure" || "${{ job.status }}" == "cancelled" ]]; then echo "status=${{ job.status }}" >> $GITHUB_OUTPUT; fi
finalize_nightly:
name: finalize nightly
runs-on: ubuntu-latest
Expand Down Expand Up @@ -314,3 +315,84 @@ jobs:
echo "if [[ \"\$(sudo podman images -q quay.io/ebattat/benchmark-runner 2> /dev/null)\" != \"\" ]]; then sudo podman rmi -f \$(sudo podman images -q quay.io/ebattat/benchmark-runner 2> /dev/null); fi" > "$RUNNER_PATH/remove_image.sh"
scp -r "$RUNNER_PATH/remove_image.sh" provision:"/tmp/remove_image.sh"
ssh -t provision "chmod +x /tmp/remove_image.sh;/tmp/./remove_image.sh;rm -f /tmp/remove_image.sh"
update_grafana_dashboard:
# update grafana dashboard with latest product versions/ grafonnet code
name: update_grafana_dashboard
needs: [ initialize_nightly, workload, finalize_nightly ]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [[ -f requirements.txt ]]; then pip install -r requirements.txt; fi
- name: Update latest product versions in main.libsonnet
env:
MAIN_LIBSONNET_PATH: ${{ secrets.PERF_MAIN_LIBSONNET_PATH }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
run: |
python "$GITHUB_WORKSPACE"/grafonnet_generator/grafana/update_versions_main_libsonnet.py
- name: Generate grafana dashboard.json using grafonnet container
run: |
sudo podman run --rm --name run_grafonnet -v "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf:/app --privileged quay.io/ebattat/run_grafonnet:latest
- name: Check for changes in dashboard.json
id: json_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Update the Grafana dashboard if any changes are detected in dashboard.json
if: steps.json_check_changes.outputs.changes == 'true'
env:
PERF_GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
PERF_GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Backup dashboard.json before before adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json
# Updates new product versions and override Grafana dashboard
python "$GITHUB_WORKSPACE"/benchmark_runner/grafana/update_grafana_dashboard.py
# Revert dashboard.json after adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
- name: Commit dashboard.json if any changes are detected
if: steps.json_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
- name: Check if adding new products versions in main.libsonnet
id: libsonnet_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Commit main.libsonnet if any changes are detected
if: steps.libsonnet_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
93 changes: 88 additions & 5 deletions .github/workflows/Perf_Env_Build_Test_CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,9 @@ jobs:
IBM_BUCKET: ${{ secrets.IBM_BUCKET }}
IBM_KEY: ${{ secrets.IBM_KEY }}
RUN_ARTIFACTS_URL: ${{ secrets.PERF_RUN_ARTIFACTS_URL }}
GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Install Dockerfile content for pytest
# install oc/kubectl
Expand Down Expand Up @@ -261,9 +264,90 @@ jobs:
echo '⌛ Wait 30 sec till image will be updated in quay.io'
sleep 30
update_grafana_dashboard:
# update grafana dashboard with latest product versions/ grafonnet code
name: update_grafana_dashboard
needs: [ unittest, integration_test, pypi_upload, pypi_validate ]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [[ -f requirements.txt ]]; then pip install -r requirements.txt; fi
- name: Update latest product versions in main.libsonnet
env:
MAIN_LIBSONNET_PATH: ${{ secrets.PERF_MAIN_LIBSONNET_PATH }}
ELASTICSEARCH: ${{ secrets.PERF_ELASTICSEARCH }}
ELASTICSEARCH_PORT: ${{ secrets.PERF_ELASTICSEARCH_PORT }}
ELASTICSEARCH_USER: ${{ secrets.PERF_ELASTICSEARCH_USER }}
ELASTICSEARCH_PASSWORD: ${{ secrets.PERF_ELASTICSEARCH_PASSWORD }}
run: |
python "$GITHUB_WORKSPACE"/grafonnet_generator/grafana/update_versions_main_libsonnet.py
- name: Generate grafana dashboard.json using grafonnet container
run: |
sudo podman run --rm --name run_grafonnet -v "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf:/app --privileged quay.io/ebattat/run_grafonnet:latest
- name: Check for changes in dashboard.json
id: json_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Update the Grafana dashboard if any changes are detected in dashboard.json
if: steps.json_check_changes.outputs.changes == 'true'
env:
PERF_GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
PERF_GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Backup dashboard.json before before adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json
# Updates new product versions and override Grafana dashboard
python "$GITHUB_WORKSPACE"/benchmark_runner/grafana/update_grafana_dashboard.py
# Revert dashboard.json after adding dashboard version by Python code
cp -p "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard_backup.json "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
- name: Commit dashboard.json if any changes are detected
if: steps.json_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/dashboard.json
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
- name: Check if adding new products versions in main.libsonnet
id: libsonnet_check_changes
run: |
git diff --quiet "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet || echo "changes=true" >> "$GITHUB_OUTPUT"
- name: Commit main.libsonnet if any changes are detected
if: steps.libsonnet_check_changes.outputs.changes == 'true'
run: |
git checkout main
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git add "$GITHUB_WORKSPACE"/benchmark_runner/grafana/perf/jsonnet/main.libsonnet
git commit -m "Update grafana json file"
# Pull the latest changes from the remote main branch
git pull origin main
# Push the changes to the remote main branch
git push
bump_version:
name: bump_version
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload]
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload, update_grafana_dashboard]
runs-on: ubuntu-latest
strategy:
matrix:
Expand All @@ -283,8 +367,8 @@ jobs:
version=$(python3 setup.py --version)
git checkout main
pip install bumpversion
git config --global user.email ${{ secrets.EMAIL }}
git config --global user.name ${{ secrets.USER_NAME }}
git config --global user.email "${{ secrets.EMAIL }}"
git config --global user.name "${{ secrets.USER_NAME }}"
git config pull.rebase false # merge (the default strategy)
bumpversion patch
# GITHUB_REPOSITORY already taken => GIT_REPOSITORY
Expand All @@ -295,7 +379,7 @@ jobs:
e2e:
name: e2e
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload, bump_version]
needs: [unittest, integration_test, pypi_upload, pypi_validate, quay_upload, update_grafana_dashboard, bump_version]
runs-on: ubuntu-latest
strategy:
# run one job every time
Expand Down Expand Up @@ -373,4 +457,3 @@ jobs:
ssh -t provision "podman run --rm -t -e WORKLOAD='${{ matrix.workload }}' -e KUBEADMIN_PASSWORD='$KUBEADMIN_PASSWORD' -e PIN_NODE_BENCHMARK_OPERATOR='$PIN_NODE_BENCHMARK_OPERATOR' -e PIN_NODE1='$PIN_NODE1' -e PIN_NODE2='$PIN_NODE2' -e ELASTICSEARCH='$ELASTICSEARCH' -e ELASTICSEARCH_PORT='$ELASTICSEARCH_PORT' -e ELASTICSEARCH_USER='$ELASTICSEARCH_USER' -e ELASTICSEARCH_PASSWORD='$ELASTICSEARCH_PASSWORD' -e IBM_REGION_NAME='$IBM_REGION_NAME' -e IBM_ENDPOINT_URL='$IBM_ENDPOINT_URL' -e IBM_ACCESS_KEY_ID='$IBM_ACCESS_KEY_ID' -e IBM_SECRET_ACCESS_KEY='$IBM_SECRET_ACCESS_KEY' -e IBM_BUCKET='$IBM_BUCKET' -e IBM_KEY='$IBM_KEY' -e RUN_ARTIFACTS_URL='$RUN_ARTIFACTS_URL' -e BUILD_VERSION='$build_version' -e RUN_TYPE='test_ci' -e TIMEOUT='2000' -e log_level='INFO' -v '$CONTAINER_KUBECONFIG_PATH':'$CONTAINER_KUBECONFIG_PATH' --privileged 'quay.io/ebattat/benchmark-runner:v$build_version'"
ssh -t provision "podman rmi -f 'quay.io/ebattat/benchmark-runner:v$build_version'"
echo '>>>>>>>>>>>>>>>>>>>>>>>>>> End E2E workload: ${{ matrix.workload }} >>>>>>>>>>>>>>>>>>>>>>>>>>>>'
3 changes: 3 additions & 0 deletions .github/workflows/Perf_Env_PR_Test_CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,9 @@ jobs:
IBM_BUCKET: ${{ secrets.IBM_BUCKET }}
IBM_KEY: ${{ secrets.IBM_KEY }}
RUN_ARTIFACTS_URL: ${{ secrets.PERF_RUN_ARTIFACTS_URL }}
GRAFANA_URL: ${{ secrets.PERF_GRAFANA_URL }}
GRAFANA_API_KEY: ${{ secrets.PERF_GRAFANA_API_KEY }}
GRAFANA_JSON: ${{ secrets.PERF_GRAFANA_JSON }}
run: |
# Install Dockerfile content for pytest
# install oc/kubectl
Expand Down
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,11 @@ fabric.properties
# Editor-based Rest Client
.idea/httpRequests

# Terraform
.terraform
.terraform.lock.hcl
terraform.tfstate

# custom file configuration
/benchmark_runner/main/empty_environment_variables.py
/benchmark_runner/main/empty_test_environment_variables.py
Expand Down
6 changes: 3 additions & 3 deletions HOW_TO.md
Original file line number Diff line number Diff line change
Expand Up @@ -226,10 +226,10 @@ any template .yaml files.
6. Save & test
2. Open grafana dashboard benchmark-runner-report:
1. Open grafana
2. Create(+) -> import -> paste [grafana/func/benchmark-runner-report.json](grafana/func/benchmark-runner-report.json) -> Load
3. Create panel from scratch or duplicate existing on (stressng/uperf)
2. Create(+) -> import -> paste [grafana/func/dashboard.json](benchmark_runner/grafana/func/dashboard.json) -> Load
3. Create panel from scratch or duplicate existing on (e.g. uperf)
4. Configure the workload related metrics
5. Save dashboard -> share -> Export -> view json -> Copy to clipboard -> override existing one [grafana/func/benchmark-runner-report.json](grafana/func/benchmark-runner-report.json)
5. Save dashboard -> share -> Export -> view json -> Copy to clipboard -> override existing one [grafana/func/dashboard.json](benchmark_runner/grafana/func/dashboard.json)

### Data template

Expand Down
7 changes: 3 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,9 @@ SAVE RUN ARTIFACTS LOCAL:
## Grafana dashboards

There are 2 grafana dashboards templates:
1. [grafana/func/benchmark-runner-ci-status-report.json](grafana/func/benchmark-runner-ci-status-report.json)
![](media/benchmark-runner-ci-status.png)
2. [grafana/func/benchmark-runner-report.json](grafana/func/benchmark-runner-report.json)
![](media/benchmark-runner-report.png)
1. [FuncCi dashboard](benchmark_runner/grafana/func/dashboard.json)
2. [PerfCi dashboard](benchmark_runner/grafana/perf/dashboard.json)
** PerfCi dashboard is generated automatically in [Build GitHub actions](https://github.com/redhat-performance/benchmark-runner/blob/main/.github/workflows/Perf_Env_Build_Test_CI.yml) from [main.libsonnet](benchmark_runner/grafana/perf/jsonnet/main.libsonnet)

** After importing json in grafana, you need to configure elasticsearch data source. (for more details: see [HOW_TO.md](HOW_TO.md))

Expand Down
Empty file.
106 changes: 106 additions & 0 deletions benchmark_runner/common/grafana/grafana_operations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@

import requests
import json
import logging

logging.basicConfig(level=logging.INFO)


class GrafanaOperations:
"""
This class is responsible for Grafana operations
"""
def __init__(self, grafana_url: str, grafana_api_key: str, grafana_json_path: str):
self.grafana_url = grafana_url
self.grafana_api_key = grafana_api_key
self.grafana_json_path = grafana_json_path
self.dashboard_data = {}
self.logger = logging.getLogger(__name__)

def fetch_all_dashboards(self):
"""
This method fetches all dashboards
:return:
"""
dashboard_list = []
headers = {
"Authorization": f"Bearer {self.grafana_api_key}",
}

try:
response = requests.get(f"{self.grafana_url}/api/search", headers=headers)

if response.status_code == 200:
dashboards = response.json()
for dashboard in dashboards:
dashboard_list.append(f"Dashboard ID: {dashboard['id']}, Title: {dashboard['title']}")
else:
raise Exception(f"Failed to fetch dashboards. Status code: {response.status_code}. Message: {response.text}")
return dashboard_list
except requests.exceptions.RequestException as e:
raise Exception(f"Error fetching dashboards: {e}")

def increment_dashboard_version(self):
"""
This method increments dashboard version
:return:
"""
self.dashboard_data["version"] = self.get_latest_dashboard_version()

def read_dashboard_json(self):
"""
This method reads dashboard from json into dictionary
:return:
"""
with open(self.grafana_json_path, 'r') as f:
self.dashboard_data = json.load(f)

def write_dashboard_json(self):
"""
This method writes dashboard data into json
:return:
"""
with open(self.grafana_json_path, 'w') as json_file:
json.dump(self.dashboard_data, json_file, indent=2)

def get_latest_dashboard_version(self):
"""
This method get latest dashboard version
:return:
"""
headers = {
"Authorization": f"Bearer {self.grafana_api_key}",
}
try:
response = requests.get(f"{self.grafana_url}/api/dashboards/uid/{self.dashboard_data['uid']}", headers=headers)
response_json = response.json()
return response_json['dashboard']['version']

except requests.exceptions.RequestException as e:
raise Exception(f"Error fetching dashboard version: {e}")

def override_dashboard(self):
"""
This method overrides dashboard with new json
:return:
"""
headers = {
"Authorization": f"Bearer {self.grafana_api_key}",
"Content-Type": "application/json",
}
try:
response = requests.post(
f"{self.grafana_url}/api/dashboards/db",
headers=headers,
json={"dashboard": self.dashboard_data},
)

if response.status_code == 200:
self.logger.info(f"Dashboard '{self.dashboard_data['title']}' overridden successfully.")
else:
# The 412 status code is used when a newer dashboard already exists.
raise Exception(
f"Failed to override dashboard '{self.dashboard_data['title']}'. Status code: {response.status_code}. Message: {response.text}")

except requests.exceptions.RequestException as e:
raise Exception(f"Error overriding dashboard '{self.dashboard_data['title']}': {e}")
Loading

0 comments on commit b3cf076

Please sign in to comment.