Skip to content

Commit

Permalink
Merge pull request kruize#1378 from msvinaykumar/timerangeFix
Browse files Browse the repository at this point in the history
time_range fix
  • Loading branch information
dinogun authored Nov 21, 2024
2 parents 031d770 + d3356b0 commit 888f663
Show file tree
Hide file tree
Showing 4 changed files with 107 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,10 @@
import com.autotune.utils.KruizeConstants;
import com.autotune.utils.MetricsConfig;
import com.autotune.utils.Utils;
import com.google.gson.*;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.micrometer.core.instrument.Timer;
import org.json.JSONObject;
import org.slf4j.Logger;
Expand Down Expand Up @@ -100,6 +103,26 @@ private static int getNumPods(Map<Timestamp, IntervalResults> filteredResultsMap
private static void getPromQls(Map<AnalyzerConstants.MetricName, String> promQls) {
}

/**
* Calculates the number of pods for a namespace based on the provided results map.
*
* @param filteredResultsMap A map containing timestamp as keys and contains metric results for the corresponding timestamp.
* @return int maximum number of pods observed across all timestamps in the filtered results map.
*/
private static int getNumPodsForNamespace(Map<Timestamp, IntervalResults> filteredResultsMap) {
LOGGER.debug("Size of Filter Map: {}", filteredResultsMap.size());
Double max_pods_cpu = filteredResultsMap.values()
.stream()
.map(e -> {
Optional<MetricResults> numPodsResults = Optional.ofNullable(e.getMetricResultsMap().get(AnalyzerConstants.MetricName.namespaceTotalPods));
double numPods = numPodsResults.map(m -> m.getAggregationInfoResult().getAvg()).orElse(0.0);
return numPods;
})
.max(Double::compareTo).get();

return (int) Math.ceil(max_pods_cpu);
}

private void init() {
// Add new models
recommendationModels = new ArrayList<>();
Expand Down Expand Up @@ -236,7 +259,7 @@ public String validate_local() { //TODO Instead of relying on the 'lo
* @param calCount The count of incoming requests.
* @return The KruizeObject containing the prepared recommendations.
*/
public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsError{
public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsError {
Map<String, KruizeObject> mainKruizeExperimentMAP = new ConcurrentHashMap<>();
Map<String, Terms> terms = new HashMap<>();
ValidationOutputData validationOutputData;
Expand Down Expand Up @@ -311,6 +334,7 @@ public KruizeObject prepareRecommendations(int calCount) throws FetchMetricsErro

/**
* Generates recommendations for the specified KruizeObject
*
* @param kruizeObject The KruizeObject containing experiment data
*/
public void generateRecommendations(KruizeObject kruizeObject) {
Expand All @@ -322,7 +346,7 @@ public void generateRecommendations(KruizeObject kruizeObject) {
NamespaceData namespaceData = k8sObject.getNamespaceData();
LOGGER.info("Generating recommendations for namespace: {}", namespaceName);
generateRecommendationsBasedOnNamespace(namespaceData, kruizeObject);
} else if (kruizeObject.isContainerExperiment()){
} else if (kruizeObject.isContainerExperiment()) {
for (String containerName : k8sObject.getContainerDataMap().keySet()) {
ContainerData containerData = k8sObject.getContainerDataMap().get(containerName);

Expand Down Expand Up @@ -740,25 +764,6 @@ private MappedRecommendationForModel generateRecommendationBasedOnModel(Timestam
return mappedRecommendationForModel;
}

/**
* Calculates the number of pods for a namespace based on the provided results map.
* @param filteredResultsMap A map containing timestamp as keys and contains metric results for the corresponding timestamp.
* @return int maximum number of pods observed across all timestamps in the filtered results map.
*/
private static int getNumPodsForNamespace(Map<Timestamp, IntervalResults> filteredResultsMap) {
LOGGER.debug("Size of Filter Map: {}", filteredResultsMap.size());
Double max_pods_cpu = filteredResultsMap.values()
.stream()
.map(e -> {
Optional<MetricResults> numPodsResults = Optional.ofNullable(e.getMetricResultsMap().get(AnalyzerConstants.MetricName.namespaceTotalPods));
double numPods = numPodsResults.map(m -> m.getAggregationInfoResult().getAvg()).orElse(0.0);
return numPods;
})
.max(Double::compareTo).get();

return (int) Math.ceil(max_pods_cpu);
}

private void generateRecommendationsBasedOnNamespace(NamespaceData namespaceData, KruizeObject kruizeObject) {
Timestamp monitoringEndTime = namespaceData.getResults().keySet().stream().max(Timestamp::compareTo).get();
NamespaceRecommendations namespaceRecommendations = namespaceData.getNamespaceRecommendations();
Expand Down Expand Up @@ -806,8 +811,8 @@ private void generateRecommendationsBasedOnNamespace(NamespaceData namespaceData
}

private HashMap<AnalyzerConstants.ResourceSetting, HashMap<AnalyzerConstants.RecommendationItem, RecommendationConfigItem>> getCurrentNamespaceConfigData(NamespaceData namespaceData,
Timestamp monitoringEndTime,
MappedRecommendationForTimestamp timestampRecommendation) {
Timestamp monitoringEndTime,
MappedRecommendationForTimestamp timestampRecommendation) {

HashMap<AnalyzerConstants.ResourceSetting, HashMap<AnalyzerConstants.RecommendationItem, RecommendationConfigItem>> currentNamespaceConfig = new HashMap<>();

Expand Down Expand Up @@ -1096,13 +1101,13 @@ private MappedRecommendationForModel generateNamespaceRecommendationBasedOnModel
* DO NOT EDIT THIS METHOD UNLESS THERE ARE ANY CHANGES TO BE ADDED IN VALIDATION OR POPULATION MECHANISM
* EDITING THIS METHOD MIGHT LEAD TO UNEXPECTED OUTCOMES IN RECOMMENDATIONS, PLEASE PROCEED WITH CAUTION
*
* @param termEntry The entry containing a term key and its associated {@link Terms} object.
* @param recommendationModel The model used to map recommendations.
* @param notifications A list to which recommendation notifications will be added.
* @param internalMapToPopulate The internal map to populate with recommendation configuration items.
* @param numPods The number of pods to consider for the recommendation.
* @param cpuThreshold The CPU usage threshold for the recommendation.
* @param memoryThreshold The memory usage threshold for the recommendation.
* @param termEntry The entry containing a term key and its associated {@link Terms} object.
* @param recommendationModel The model used to map recommendations.
* @param notifications A list to which recommendation notifications will be added.
* @param internalMapToPopulate The internal map to populate with recommendation configuration items.
* @param numPods The number of pods to consider for the recommendation.
* @param cpuThreshold The CPU usage threshold for the recommendation.
* @param memoryThreshold The memory usage threshold for the recommendation.
* @param recommendationAcceleratorRequestMap The Map which has Accelerator recommendations
* @return {@code true} if the internal map was successfully populated; {@code false} otherwise.
*/
Expand Down Expand Up @@ -1800,10 +1805,10 @@ private String getResults(Map<String, KruizeObject> mainKruizeExperimentMAP, Kru
/**
* Fetches metrics based on the specified datasource using queries from the metricProfile for the given time interval.
*
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @throws Exception
*/
public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo) throws Exception, FetchMetricsError {
Expand Down Expand Up @@ -1839,12 +1844,13 @@ public void fetchMetricsBasedOnProfileAndDatasource(KruizeObject kruizeObject, T

/**
* Fetches namespace metrics based on the specified datasource using queries from the metricProfile for the given time interval.
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @param metricProfile performance profile to be used
* @param maxDateQuery max date query for namespace
*
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @param metricProfile performance profile to be used
* @param maxDateQuery max date query for namespace
* @throws Exception
*/
private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject, Timestamp interval_end_time, Timestamp interval_start_time, DataSourceInfo dataSourceInfo, PerformanceProfile metricProfile, String maxDateQuery) throws Exception, FetchMetricsError {
Expand Down Expand Up @@ -1941,7 +1947,7 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz
LOGGER.info(promQL);
String namespaceMetricsUrl;
try {
namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
namespaceMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY_RANGE,
dataSourceInfo.getUrl(),
URLEncoder.encode(promQL, CHARACTER_ENCODING),
interval_start_time_epoc,
Expand Down Expand Up @@ -1995,12 +2001,12 @@ private void fetchNamespaceMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz
/**
* Fetches Container metrics based on the specified datasource using queries from the metricProfile for the given time interval.
*
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @param metricProfile performance profile to be used
* @param maxDateQuery max date query for containers
* @param kruizeObject KruizeObject
* @param interval_end_time The end time of the interval in the format yyyy-MM-ddTHH:mm:sssZ
* @param interval_start_time The start time of the interval in the format yyyy-MM-ddTHH:mm:sssZ.
* @param dataSourceInfo DataSource object
* @param metricProfile performance profile to be used
* @param maxDateQuery max date query for containers
* @throws Exception
*/
private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruizeObject,
Expand Down Expand Up @@ -2052,7 +2058,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz


LOGGER.debug("maxDateQuery: {}", maxDateQuery);
queryToEncode = maxDateQuery
queryToEncode = maxDateQuery
.replace(AnalyzerConstants.NAMESPACE_VARIABLE, namespace)
.replace(AnalyzerConstants.CONTAINER_VARIABLE, containerName)
.replace(AnalyzerConstants.WORKLOAD_VARIABLE, workload)
Expand Down Expand Up @@ -2125,7 +2131,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz
continue;

HashMap<String, AggregationFunctions> aggregationFunctions = metricEntry.getAggregationFunctionsMap();
for (Map.Entry<String, AggregationFunctions> aggregationFunctionsEntry: aggregationFunctions.entrySet()) {
for (Map.Entry<String, AggregationFunctions> aggregationFunctionsEntry : aggregationFunctions.entrySet()) {
// Determine promQL query on metric type
String promQL = aggregationFunctionsEntry.getValue().getQuery();

Expand Down Expand Up @@ -2157,7 +2163,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz
LOGGER.debug(promQL);
String podMetricsUrl;
try {
podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY,
podMetricsUrl = String.format(KruizeConstants.DataSourceConstants.DATASOURCE_ENDPOINT_WITH_QUERY_RANGE,
dataSourceInfo.getUrl(),
URLEncoder.encode(promQL, CHARACTER_ENCODING),
interval_start_time_epoc,
Expand All @@ -2174,7 +2180,7 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz
continue;

// Process fetched metrics
if (isAcceleratorMetric){
if (isAcceleratorMetric) {
for (JsonElement result : resultArray) {
JsonObject resultObject = result.getAsJsonObject();
JsonObject metricObject = resultObject.getAsJsonObject(KruizeConstants.JSONKeys.METRIC);
Expand Down Expand Up @@ -2316,13 +2322,14 @@ private void fetchContainerMetricsBasedOnDataSourceAndProfile(KruizeObject kruiz

/**
* Fetches max date query for namespace and containers from performance profile
* @param metricProfile performance profile to be used
*
* @param metricProfile performance profile to be used
*/
private String getMaxDateQuery(PerformanceProfile metricProfile, String metricName) {
List<Metric> metrics = metricProfile.getSloInfo().getFunctionVariables();
for (Metric metric: metrics) {
for (Metric metric : metrics) {
String name = metric.getName();
if(name.equals(metricName)) {
if (name.equals(metricName)) {
return metric.getAggregationFunctionsMap().get("max").getQuery();
}
}
Expand Down Expand Up @@ -2376,6 +2383,7 @@ private void prepareIntervalResults(Map<Timestamp, IntervalResults> dataResultsM

/**
* Filters out maxDateQuery and includes metrics based on the experiment type and kubernetes_object
*
* @param metricProfile Metric profile to be used
* @param maxDateQuery maxDateQuery metric to be filtered out
* @param experimentType experiment type
Expand All @@ -2384,17 +2392,17 @@ public List<Metric> filterMetricsBasedOnExpTypeAndK8sObject(PerformanceProfile m
String namespace = KruizeConstants.JSONKeys.NAMESPACE;
String container = KruizeConstants.JSONKeys.CONTAINER;
return metricProfile.getSloInfo().getFunctionVariables().stream()
.filter(Metric -> {
String name = Metric.getName();
String kubernetes_object = Metric.getKubernetesObject();

// Include metrics based on experiment_type, kubernetes_object and exclude maxDate metric
return !name.equals(maxDateQuery) && (
(experimentType.equals(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT) && kubernetes_object.equals(namespace)) ||
(experimentType.equals(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT) && kubernetes_object.equals(container))
);
})
.toList();
.filter(Metric -> {
String name = Metric.getName();
String kubernetes_object = Metric.getKubernetesObject();

// Include metrics based on experiment_type, kubernetes_object and exclude maxDate metric
return !name.equals(maxDateQuery) && (
(experimentType.equals(AnalyzerConstants.ExperimentTypes.NAMESPACE_EXPERIMENT) && kubernetes_object.equals(namespace)) ||
(experimentType.equals(AnalyzerConstants.ExperimentTypes.CONTAINER_EXPERIMENT) && kubernetes_object.equals(container))
);
})
.toList();
}
}

Loading

0 comments on commit 888f663

Please sign in to comment.