Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Telco support #75

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions configs/small-scale-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# This is a template file
datasource:
type: perfscale
tests :
- name : aws-small-scale-cluster-density-v2
index: ospst-perf-scale-ci-*
Expand Down
2 changes: 2 additions & 0 deletions configs/small-scale-node-density-cni.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# This is a template file
datasource:
type: perfscale
tests :
- name : aws-small-scale-node-density-cni
index: ospst-perf-scale-ci-*
Expand Down
2 changes: 2 additions & 0 deletions configs/trt-payload-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : payload-cluster-density-v2
index: ospst-perf-scale-ci-*
Expand Down
2 changes: 2 additions & 0 deletions examples/label-small-scale-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : small-scale-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/payload-scale-415.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : aws-small-scale-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/payload-scale-416.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : aws-small-scale-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/readout-control-plane-cdv2.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : cluster-density-v2-24nodes
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/readout-control-plane-node-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : node-density-heavy-24nodes
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/readout-netperf-tcp.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : k8s-netperf-tcp
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/small-scale-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : aws-small-scale-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/small-scale-node-density-cni.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : aws-small-scale-node-density-cni
index: {{ es_metadata_index }}
Expand Down
14 changes: 14 additions & 0 deletions examples/telco-cpu-util.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
datasource:
type: telco
tests :
- name : telco_cpu_util_test
metadata:
test_type: cpu_util
ocp_version: 4.17

metrics :
- name: total_cpu
metric_of_interest: scenarios.scenario_name==idle.types.type_name==total.max_cpu

- name : os_daemon
metric_of_interest: scenarios.scenario_name==idle.types.type_name==os_daemon.max_cpu
2 changes: 2 additions & 0 deletions examples/trt-external-payload-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : payload-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/trt-external-payload-crd-scale.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : payload-crd-scale
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/trt-external-payload-node-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : payload-node-density
index: {{ es_metadata_index }}
Expand Down
2 changes: 2 additions & 0 deletions examples/trt-payload-cluster-density.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
datasource:
type: perfscale
tests :
- name : payload-cluster-density-v2
index: {{ es_metadata_index }}
Expand Down
8 changes: 7 additions & 1 deletion orion.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""

# pylint: disable = import-error, line-too-long, no-member
import asyncio
import logging
import sys
import warnings
Expand Down Expand Up @@ -116,14 +117,19 @@ def cli(max_content_width=120): # pylint: disable=unused-argument
@click.option("--node-count", default=False, help="Match any node iterations count")
@click.option("--lookback-size", type=int, default=10000, help="Maximum number of entries to be looked back")
def cmd_analysis(**kwargs):
"""Dummy function for asyncio
"""
asyncio.run(_cmd_analysis_async(**kwargs))

async def _cmd_analysis_async(**kwargs):
"""
Orion runs on command line mode, and helps in detecting regressions
"""
level = logging.DEBUG if kwargs["debug"] else logging.INFO
logger_instance = SingletonLogger(debug=level, name="Orion")
logger_instance.info("🏹 Starting Orion in command-line mode")
kwargs["configMap"] = load_config(kwargs["config"])
output, regression_flag = run(**kwargs)
output, regression_flag = await run(**kwargs)
if output is None:
logger_instance.error("Terminating test")
sys.exit(0)
Expand Down
7 changes: 4 additions & 3 deletions pkg/algorithms/algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,15 @@ def __init__( # pylint: disable = too-many-arguments
self.metrics_config = metrics_config
self.regression_flag = False


def output_json(self) -> Tuple[str, str, bool]:
"""Method to output json output

Returns:
Tuple[str, str]: returns test_name and json output
"""
_, change_points_by_metric = self._analyze()
dataframe_json = self.dataframe.to_json(orient="records")
dataframe_json = self.dataframe.to_json(orient="records", default_handler=str)
dataframe_json = json.loads(dataframe_json)

for index, entry in enumerate(dataframe_json):
Expand All @@ -57,8 +58,8 @@ def output_json(self) -> Tuple[str, str, bool]:
/ change_point.stats.mean_1
) * 100
if (
percentage_change * self.metrics_config[key]["direction"] > 0
or self.metrics_config[key]["direction"] == 0
percentage_change * self.metrics_config[key].get("direction",0) > 0
or self.metrics_config[key].get("direction",0) == 0
):
dataframe_json[index]["metrics"][key][
"percentage_change"
Expand Down
11 changes: 8 additions & 3 deletions pkg/algorithms/edivisive/edivisive.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,20 @@ class EDivisive(Algorithm):

def _analyze(self):
self.dataframe["timestamp"] = pd.to_datetime(self.dataframe["timestamp"])
self.dataframe["timestamp"] = self.dataframe["timestamp"].astype(int) // 10**9
self.dataframe["timestamp"] = self.dataframe["timestamp"].astype(int)
first_timestamp = self.dataframe["timestamp"].dropna().iloc[0]
if first_timestamp > 1_000_000_000_000:
self.dataframe["timestamp"] = self.dataframe["timestamp"].astype('int64') // 10**9
else:
self.dataframe["timestamp"] = self.dataframe["timestamp"].astype('int64')
series= self.setup_series()
change_points_by_metric = series.analyze().change_points

# filter by direction
for metric, changepoint_list in change_points_by_metric.items():
for i in range(len(changepoint_list)-1, -1, -1):
if ((self.metrics_config[metric]["direction"] == 1 and changepoint_list[i].stats.mean_1 > changepoint_list[i].stats.mean_2) or
(self.metrics_config[metric]["direction"] == -1 and changepoint_list[i].stats.mean_1 < changepoint_list[i].stats.mean_2) ):
if ((self.metrics_config[metric].get("direction",0) == 1 and changepoint_list[i].stats.mean_1 > changepoint_list[i].stats.mean_2) or
(self.metrics_config[metric].get("direction",0) == -1 and changepoint_list[i].stats.mean_1 < changepoint_list[i].stats.mean_2) ):
del changepoint_list[i]
if [val for li in change_points_by_metric.values() for val in li]:
self.regression_flag=True
Expand Down
7 changes: 7 additions & 0 deletions pkg/datasources/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
"""
Datasource module for orion
"""
from .datasource import Datasource
from .datasourceFactory import DatasourceFactory
from .perfscale import PerfscaleDatasource
from .telco import TelcoDatasource
28 changes: 28 additions & 0 deletions pkg/datasources/datasource.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# pylint: disable = R0903, E0211
"""
Generic Datasource implementation
"""
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Dict, Any

class Datasource(ABC):
"""Generic method for Datasource

Args:
ABC (_type_): _description_
"""

def __init__(self, test: Dict[str, Any],
match: Any,
options: Dict[str, Any],
start_timestamp: datetime,):
self.test = test
self.match = match
self.options = options
self.start_timestamp = start_timestamp

@abstractmethod
def process_test(self):
"""Unimplemented process test function
"""
51 changes: 51 additions & 0 deletions pkg/datasources/datasourceFactory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# pylint: disable = R0903, E0211
"""
Generate datasource factory
"""
from datetime import datetime
from typing import Any, Dict
from fmatch.matcher import Matcher
from fmatch.splunk_matcher import SplunkMatcher
from fmatch.logrus import SingletonLogger
from .perfscale import PerfscaleDatasource
from .telco import TelcoDatasource


class DatasourceFactory:
"""Datasource Factory implementation
"""
def instantiate_datasource(self, datasource:str,
test: Dict[str, Any],
options: Dict[str, Any],
start_timestamp: datetime):
"""Sets the datasource type

Args:
datasource (str): _description_
test (Dict[str, Any]): _description_
options (Dict[str, Any]): _description_
start_timestamp (datetime): _description_

Returns:
Datasource: _description_
"""
logger_instance = SingletonLogger.getLogger("Orion")
if datasource["type"]=="perfscale":
match = Matcher(
index=test["index"],
level=logger_instance.level,
ES_URL=datasource["ES_SERVER"],
verify_certs=False,
)
return PerfscaleDatasource(test, match, options, start_timestamp), match
if datasource["type"]=="telco":
match = SplunkMatcher(
host= datasource.get("host"),
port= datasource.get("port"),
username= datasource.get("username"),
password= datasource.get("password"),
indice=datasource.get("indice")
)
return TelcoDatasource(test, match, options, start_timestamp), match
return None, None

4 changes: 4 additions & 0 deletions pkg/datasources/perfscale/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""
Perfscale datasource init module
"""
from .perfscale import PerfscaleDatasource
Loading
Loading