Skip to content

Commit e3c39d5

Browse files
authored
Merge pull request #95 from vishnuchalla/telco-splunk
Integrating Splunk for Telco KPIs
2 parents 309a5a1 + a1f404f commit e3c39d5

File tree

16 files changed

+983
-504
lines changed

16 files changed

+983
-504
lines changed

README.md

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# OpenShift Performance Dashboard
22

3-
## Elasticsearch configuration
3+
## Backend configuration
44

55
### Requires
66

@@ -15,6 +15,13 @@ indice=
1515
username=
1616
password=
1717

18+
[<product>.splunk]
19+
host=
20+
port=
21+
indice=
22+
username=
23+
password=
24+
1825
[ocp-server]
1926
port=8000
2027

@@ -30,9 +37,9 @@ password=
3037

3138
[TOML](https://toml.io/en/) is used above, but it also accepts YAML.
3239

33-
The elasticsearch configuration should be set up by product, that way each product can configure their own ES server.
40+
The backend configuration should be set up by product and its data store type, that way each product can configure their own backend server.
3441

35-
As an example for `OCP` the configuration looks like this:
42+
As an example for `OCP` with its ES configuration looks like this:
3643

3744
```toml
3845
[ocp.elasticsearch]
@@ -41,6 +48,7 @@ indice=
4148
username=
4249
password=
4350
```
51+
**Note: The below applies only for the elastic search at the moment**
4452
If you also have an archived internal instance that keeps track of older data, it can be specified with '.internal' suffix. Example of our `OCP` internal archived instance's configuration.
4553
```toml
4654
[ocp.elasticsearch.internal]
@@ -136,7 +144,7 @@ $ podman run \
136144
ocpp-front
137145
```
138146

139-
## Integrating to the dashboard
147+
## ES Integration to the dashboard
140148

141149
To integrate into our dashboard we provide a default set of fields that teams should adhere to. That set would be the one used to display a high level Homepage for all the teams.
142150

backend/app/api/api.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from app.api.v1.endpoints.jira import jira
88
from app.api.v1.endpoints.quay import quayJobs
99
from app.api.v1.endpoints.quay import quayGraphs
10+
from app.api.v1.endpoints.telco import telcoJobs
1011

1112
router = APIRouter()
1213

@@ -22,5 +23,8 @@
2223
router.include_router(quayJobs.router, tags=['quay'])
2324
router.include_router(quayGraphs.router, tags=['quay'])
2425

26+
# Telco endpoints
27+
router.include_router(telcoJobs.router, tags=['telco'])
28+
2529
# Jira endpoints
2630
router.include_router(jira.router, tags=['jira'])

backend/app/api/v1/commons/example_responses.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,12 +119,35 @@ def response_422():
119119
]
120120
}
121121

122+
telco_response_example ={
123+
"startDate": "2023-09-20",
124+
"endDate": "2023-09-20",
125+
"results": [
126+
{
127+
"ciSystem": "Jenkins",
128+
"uuid": "2cc5d4ca895ca5d84cab0fd7923db93b",
129+
"encrypted": "gAAAAABmQALtP0g5UPMsOjQw46tZ-aBz77yl-8QNI4jwLfIEV1POnOlA1ny89cp3Nrik3OzpNwXrWO3K4ZwtOliTfk0SO5NkNZHY8reJhvOVJBGFEw2enyjRaHp9hIaJdE0Vrfuqt_NjiYX-vOZo0Sjc84R76LvxjAC6f_urceGGZICH36IkT2g=",
130+
"releaseStream": "Release Candidate",
131+
"jobStatus": "success",
132+
"buildUrl": "https://ci-jenkins-xxx.com/job/your-tests/532",
133+
"startDate": "2024-05-09 14:10:51+00:00",
134+
"endDate": "2024-05-09 14:43:51+00:00",
135+
"product": "telco",
136+
"version": "4.16",
137+
"testName": "reboot"
138+
},
139+
]
140+
}
141+
122142
def ocp_200_response():
123143
return response_200(ocp_response_example)
124144

125145
def quay_200_response():
126146
return response_200(quay_response_example)
127147

148+
def telco_200_response():
149+
return response_200(telco_response_example)
150+
128151
cpt_response_example ={
129152
"startDate": "2023-11-18",
130153
"endDate": "2023-11-23",
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import zlib
2+
import hashlib
3+
from cryptography.fernet import Fernet
4+
5+
symmetric_encryptor = b'k3tGwuK6O59c0SEMmnIeJUEpTN5kuxibPy8Q8VfYC6A='
6+
7+
def hash_encrypt_json(json_data):
8+
# Serialize the JSON data to a string
9+
json_str = str(json_data)
10+
11+
# Generate an MD5 hash of the JSON string
12+
hash_digest = hashlib.md5(json_str.encode()).hexdigest()
13+
14+
# Compress the JSON string
15+
compressed_data = zlib.compress(json_str.encode())
16+
17+
cipher = Fernet(symmetric_encryptor)
18+
19+
# Encrypt the compressed JSON string
20+
encrypted_data = cipher.encrypt(compressed_data)
21+
22+
return hash_digest, encrypted_data
23+
24+
def decrypt_unhash_json(hash_digest, encrypted_data):
25+
cipher = Fernet(symmetric_encryptor)
26+
27+
# Decrypt the encrypted JSON data
28+
decompressed_data = cipher.decrypt(encrypted_data)
29+
30+
# Decompress the decrypted data
31+
decompressed_json_str = zlib.decompress(decompressed_data).decode()
32+
33+
# Verify hash digest
34+
calculated_hash = hashlib.md5(decompressed_json_str.encode()).hexdigest()
35+
if calculated_hash != hash_digest:
36+
raise ValueError("Hash digest does not match")
37+
38+
# Deserialize the JSON string back to JSON data
39+
json_data = eval(decompressed_json_str)
40+
41+
return json_data
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
from datetime import date
2+
import pandas as pd
3+
from app import config
4+
from app.services.splunk import SplunkService
5+
import app.api.v1.commons.hasher as hasher
6+
from datetime import datetime, timezone
7+
8+
9+
async def getData(start_datetime: date, end_datetime: date, configpath: str):
10+
test_types = ["oslat", "cyclictest", "cpu_util", "deployment", "ptp", "reboot", "rfc-2544"]
11+
cfg = config.get_config()
12+
try:
13+
jenkins_url = cfg.get('telco.config.job_url')
14+
except Exception as e:
15+
print(f"Error reading telco configuration: {e}")
16+
test_type_execution_times = {
17+
"oslat": 3720,
18+
"cyclictest": 3720,
19+
"cpu_util": 6600,
20+
"deployment": 3720,
21+
"ptp": 4200,
22+
"reboot": 1980,
23+
"rfc-2544": 5580,
24+
}
25+
query = {
26+
"earliest_time": "{}T00:00:00".format(start_datetime.strftime('%Y-%m-%d')),
27+
"latest_time": "{}T23:59:59".format(end_datetime.strftime('%Y-%m-%d')),
28+
"output_mode": "json"
29+
}
30+
searchList = ' OR '.join(['test_type="{}"'.format(test_type) for test_type in test_types])
31+
splunk = SplunkService(configpath=configpath)
32+
response = await splunk.query(query=query, searchList=searchList)
33+
mapped_list = []
34+
35+
for each_response in response:
36+
end_timestamp = int(each_response['timestamp'])
37+
test_data = each_response['data']
38+
hash_digest, encrypted_data = hasher.hash_encrypt_json(each_response)
39+
execution_time_seconds = test_type_execution_times.get(test_data['test_type'], 0)
40+
start_timestamp = end_timestamp - execution_time_seconds
41+
start_time_utc = datetime.fromtimestamp(start_timestamp, tz=timezone.utc)
42+
end_time_utc = datetime.fromtimestamp(end_timestamp, tz=timezone.utc)
43+
44+
mapped_list.append({
45+
"uuid": hash_digest,
46+
"encryptedData": encrypted_data.decode('utf-8'),
47+
"ciSystem": "Jenkins",
48+
"testName": test_data['test_type'],
49+
"version": test_data['ocp_version'],
50+
"releaseStream": test_data['ocp_build'],
51+
"startDate": str(start_time_utc),
52+
"endDate": str(end_time_utc),
53+
"buildUrl": jenkins_url + "/" + str(test_data['cluster_artifacts']['ref']['jenkins_build']),
54+
"jobStatus": "success"
55+
})
56+
57+
jobs = pd.json_normalize(mapped_list)
58+
if len(jobs) == 0:
59+
return jobs
60+
61+
return jobs

backend/app/api/v1/commons/utils.py

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,23 @@ def clasifyAWSJobs(job):
4949
def getBuild(job):
5050
releaseStream = job["releaseStream"] + "-"
5151
ocpVersion = job["ocpVersion"]
52-
return ocpVersion.replace(releaseStream, "")
52+
return ocpVersion.replace(releaseStream, "")
53+
54+
def getReleaseStream(row):
55+
if row["releaseStream"].__contains__("fast"):
56+
return "Fast"
57+
elif row["releaseStream"].__contains__("stable"):
58+
return "Stable"
59+
elif row["releaseStream"].__contains__("eus"):
60+
return "EUS"
61+
elif row["releaseStream"].__contains__("candidate"):
62+
return "Release Candidate"
63+
elif row["releaseStream"].__contains__("rc"):
64+
return "Release Candidate"
65+
elif row["releaseStream"].__contains__("nightly"):
66+
return "Nightly"
67+
elif row["releaseStream"].__contains__("ci"):
68+
return "ci"
69+
elif row["releaseStream"].__contains__("ec"):
70+
return "Engineering Candidate"
71+
return "Stable"

backend/app/api/v1/endpoints/cpt/cptJobs.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from .maps.ocp import ocpMapper
77
from .maps.quay import quayMapper
88
from .maps.hce import hceMapper
9+
from .maps.telco import telcoMapper
910
from ...commons.example_responses import cpt_200_response, response_422
1011
from fastapi.param_functions import Query
1112

@@ -14,7 +15,8 @@
1415
products = {
1516
"ocp": ocpMapper,
1617
"quay": quayMapper,
17-
"hce": hceMapper
18+
"hce": hceMapper,
19+
"telco": telcoMapper,
1820
}
1921

2022
@router.get('/api/v1/cpt/jobs',
@@ -43,7 +45,7 @@ async def jobs(start_date: date = Query(None, description="Start date for search
4345
results = pd.DataFrame()
4446
for product in products:
4547
try:
46-
df = await products[product](start_date, end_date, f'{product}.elasticsearch')
48+
df = await products[product](start_date, end_date)
4749
results = pd.concat([results, df.loc[:, ["ciSystem", "uuid", "releaseStream", "jobStatus", "buildUrl", "startDate", "endDate", "product", "version", "testName"]]])
4850
except ConnectionError:
4951
print("Connection Error in mapper for product " + product)

backend/app/api/v1/endpoints/cpt/maps/hce.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
# "version"
1616
# "testName"
1717
################################################################
18-
async def hceMapper(start_datetime: date, end_datetime: date, configpath: str):
19-
df = await getData(start_datetime, end_datetime, configpath)
18+
async def hceMapper(start_datetime: date, end_datetime: date):
19+
df = await getData(start_datetime, end_datetime, f'hce.elasticsearch')
2020
df["releaseStream"] = "Nightly"
2121
df["ciSystem"] = "Jenkins"
2222
df["testName"] = df["product"] + ":" + df["test"]
Lines changed: 3 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,15 @@
11
from ....commons.ocp import getData
2+
from ....commons.utils import getReleaseStream
23
from datetime import date
34

45

56
################################################################
67
# This will return a DataFrame from OCP required by the CPT endpoint
78
################################################################
8-
async def ocpMapper(start_datetime: date, end_datetime: date, configpath: str):
9-
df = await getData(start_datetime, end_datetime, configpath)
9+
async def ocpMapper(start_datetime: date, end_datetime: date):
10+
df = await getData(start_datetime, end_datetime, f'ocp.elasticsearch')
1011
df.insert(len(df.columns), "product", "ocp")
1112
df["releaseStream"] = df.apply(getReleaseStream, axis=1)
1213
df["version"] = df["shortVersion"]
1314
df["testName"] = df["benchmark"]
1415
return df
15-
16-
17-
def getReleaseStream(row):
18-
if row["releaseStream"].__contains__("fast"):
19-
return "Fast"
20-
elif row["releaseStream"].__contains__("stable"):
21-
return "Stable"
22-
elif row["releaseStream"].__contains__("eus"):
23-
return "EUS"
24-
elif row["releaseStream"].__contains__("candidate"):
25-
return "Release Candidate"
26-
elif row["releaseStream"].__contains__("rc"):
27-
return "Release Candidate"
28-
elif row["releaseStream"].__contains__("nightly"):
29-
return "Nightly"
30-
elif row["releaseStream"].__contains__("ci"):
31-
return "ci"
32-
elif row["releaseStream"].__contains__("ec"):
33-
return "Engineering Candidate"
34-
return "Stable"

backend/app/api/v1/endpoints/cpt/maps/quay.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@
55
#####################################################################
66
# This will return a DataFrame from Quay required by the CPT endpoint
77
#####################################################################
8-
async def quayMapper(start_datetime: date, end_datetime: date, configpath: str):
9-
df = await getData(start_datetime, end_datetime, configpath)
8+
async def quayMapper(start_datetime: date, end_datetime: date):
9+
df = await getData(start_datetime, end_datetime, f'quay.elasticsearch')
1010
df.insert(len(df.columns), "product", "quay")
1111
df["version"] = df["releaseStream"]
1212
df["testName"] = df["benchmark"]
13-
return df
13+
return df

0 commit comments

Comments
 (0)