Skip to content

Commit

Permalink
Merge pull request kruize#1487 from msvinaykumar/bulk_new_format_2
Browse files Browse the repository at this point in the history
2. Bulk new format - Metadata Inclusion into Bulk New Format
  • Loading branch information
dinogun authored Feb 14, 2025
2 parents 8ddda2a + f65c7b7 commit a49b7e5
Show file tree
Hide file tree
Showing 13 changed files with 228 additions and 215 deletions.
29 changes: 15 additions & 14 deletions src/main/java/com/autotune/analyzer/workerimpl/BulkJobManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ public void run() {
if (null == metadataInfo) {
setFinalJobStatus(COMPLETED, String.valueOf(HttpURLConnection.HTTP_OK), NOTHING_INFO, datasource);
} else {
jobData.setMetadata(metadataInfo);
Map<String, CreateExperimentAPIObject> createExperimentAPIObjectMap = getExperimentMap(labelString, jobData, metadataInfo, datasource); //Todo Store this map in buffer and use it if BulkAPI pods restarts and support experiment_type
jobData.getSummary().setTotal_experiments(createExperimentAPIObjectMap.size());
jobData.getSummary().setProcessed_experiments(0);
Expand Down Expand Up @@ -337,16 +338,16 @@ Map<String, CreateExperimentAPIObject> getExperimentMap(String labelString, Bulk
Timer.Sample timerGetExpMap = Timer.start(MetricsConfig.meterRegistry());
try {
Map<String, CreateExperimentAPIObject> createExperimentAPIObjectMap = new HashMap<>();
Collection<DataSource> dataSourceCollection = metadataInfo.getDataSourceHashMap().values();
Collection<DataSource> dataSourceCollection = metadataInfo.getDatasources().values();
for (DataSource ds : dataSourceCollection) {
HashMap<String, DataSourceCluster> clusterHashMap = ds.getDataSourceClusterHashMap();
HashMap<String, DataSourceCluster> clusterHashMap = ds.getClusters();
for (DataSourceCluster dsc : clusterHashMap.values()) {
HashMap<String, DataSourceNamespace> namespaceHashMap = dsc.getDataSourceNamespaceHashMap();
HashMap<String, DataSourceNamespace> namespaceHashMap = dsc.getNamespaces();
for (DataSourceNamespace namespace : namespaceHashMap.values()) {
HashMap<String, DataSourceWorkload> dataSourceWorkloadHashMap = namespace.getDataSourceWorkloadHashMap();
HashMap<String, DataSourceWorkload> dataSourceWorkloadHashMap = namespace.getWorkloads();
if (dataSourceWorkloadHashMap != null) {
for (DataSourceWorkload dsw : dataSourceWorkloadHashMap.values()) {
HashMap<String, DataSourceContainer> dataSourceContainerHashMap = dsw.getDataSourceContainerHashMap();
HashMap<String, DataSourceContainer> dataSourceContainerHashMap = dsw.getContainers();
if (dataSourceContainerHashMap != null) {
for (DataSourceContainer dc : dataSourceContainerHashMap.values()) {
// Experiment name - dynamically constructed
Expand Down Expand Up @@ -457,12 +458,12 @@ private CreateExperimentAPIObject prepareCreateExperimentJSONInput(DataSourceCon
createExperimentAPIObject.setPerformanceProfile(CREATE_EXPERIMENT_CONFIG_BEAN.getPerformanceProfile());
List<KubernetesAPIObject> kubernetesAPIObjectList = new ArrayList<>();
KubernetesAPIObject kubernetesAPIObject = new KubernetesAPIObject();
ContainerAPIObject cao = new ContainerAPIObject(dc.getDataSourceContainerName(),
dc.getDataSourceContainerImageName(), null, null);
ContainerAPIObject cao = new ContainerAPIObject(dc.getContainerName(),
dc.getContainerImageName(), null, null);
kubernetesAPIObject.setContainerAPIObjects(Arrays.asList(cao));
kubernetesAPIObject.setName(dsw.getDataSourceWorkloadName());
kubernetesAPIObject.setType(dsw.getDataSourceWorkloadType());
kubernetesAPIObject.setNamespace(namespace.getDataSourceNamespaceName());
kubernetesAPIObject.setName(dsw.getWorkloadName());
kubernetesAPIObject.setType(dsw.getWorkloadType());
kubernetesAPIObject.setNamespace(namespace.getNamespace());
kubernetesAPIObjectList.add(kubernetesAPIObject);
createExperimentAPIObject.setKubernetesObjects(kubernetesAPIObjectList);
RecommendationSettings rs = new RecommendationSettings();
Expand Down Expand Up @@ -493,10 +494,10 @@ public String frameExperimentName(String labelString, DataSourceCluster dataSour

String datasource = this.bulkInput.getDatasource();
String clusterName = dataSourceCluster.getDataSourceClusterName();
String namespace = dataSourceNamespace.getDataSourceNamespaceName();
String workloadName = dataSourceWorkload.getDataSourceWorkloadName();
String workloadType = dataSourceWorkload.getDataSourceWorkloadType();
String containerName = dataSourceContainer.getDataSourceContainerName();
String namespace = dataSourceNamespace.getNamespace();
String workloadName = dataSourceWorkload.getWorkloadName();
String workloadType = dataSourceWorkload.getWorkloadType();
String containerName = dataSourceContainer.getContainerName();

String experimentName = KruizeDeploymentInfo.experiment_name_format
.replace("%datasource%", datasource)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.autotune.common.data.dataSourceMetadata;

import com.autotune.utils.KruizeConstants;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -14,38 +15,38 @@
public class DataSource {
private static final Logger LOGGER = LoggerFactory.getLogger(DataSource.class);
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.DATASOURCE_NAME)
@JsonProperty(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.DATASOURCE_NAME)
private String dataSourceName;

/**
* Key: Cluster name
* Value: Associated DataSourceCluster object
*/
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CLUSTERS)
private HashMap<String, DataSourceCluster> clusterHashMap;
private HashMap<String, DataSourceCluster> clusters;

public DataSource(String dataSourceName, HashMap<String,DataSourceCluster> clusterHashMap) {
public DataSource(String dataSourceName, HashMap<String, DataSourceCluster> clusters) {
this.dataSourceName = dataSourceName;
this.clusterHashMap = clusterHashMap;
this.clusters = clusters;
}

public String getDataSourceName() {
return dataSourceName;
}

public HashMap<String, DataSourceCluster> getDataSourceClusterHashMap() {
return clusterHashMap;
public HashMap<String, DataSourceCluster> getClusters() {
return clusters;
}

public void setDataSourceClusterHashMap(HashMap<String, DataSourceCluster> clusterHashMap) {
public void setClusters(HashMap<String, DataSourceCluster> clusterHashMap) {
if (null == clusterHashMap) {
LOGGER.debug(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.SET_CLUSTER_MAP_ERROR + "{}", dataSourceName);
}
this.clusterHashMap = clusterHashMap;
this.clusters = clusterHashMap;
}

public DataSourceCluster getDataSourceClusterObject(String clusterName) {
if (null != clusterHashMap && clusterHashMap.containsKey(clusterName)) {
return clusterHashMap.get(clusterName);
if (null != clusters && clusters.containsKey(clusterName)) {
return clusters.get(clusterName);
}
return null;
}
Expand All @@ -54,7 +55,7 @@ public DataSourceCluster getDataSourceClusterObject(String clusterName) {
public String toString() {
return "DataSource{" +
"datasource_name='" + dataSourceName + '\'' +
", clusters=" + clusterHashMap +
", clusters=" + clusters +
'}';
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.autotune.common.data.dataSourceMetadata;

import com.autotune.utils.KruizeConstants;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -14,38 +15,38 @@
public class DataSourceCluster {
private static final Logger LOGGER = LoggerFactory.getLogger(DataSourceCluster.class);
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CLUSTER_NAME)
@JsonProperty(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CLUSTER_NAME)
private String clusterName;

/**
* Key: Namespace
* Value: Associated DataSourceNamespace object
*/
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.NAMESPACES)
private HashMap<String, DataSourceNamespace> namespaceHashMap;
private HashMap<String, DataSourceNamespace> namespaces;

public DataSourceCluster(String clusterName, HashMap<String, DataSourceNamespace> namespaceHashMap) {
public DataSourceCluster(String clusterName, HashMap<String, DataSourceNamespace> namespaces) {
this.clusterName = clusterName;
this.namespaceHashMap = namespaceHashMap;
this.namespaces = namespaces;
}

public String getDataSourceClusterName() {
return clusterName;
}

public HashMap<String, DataSourceNamespace> getDataSourceNamespaceHashMap() {
return namespaceHashMap;
public HashMap<String, DataSourceNamespace> getNamespaces() {
return namespaces;
}

public void setDataSourceNamespaceHashMap(HashMap<String, DataSourceNamespace> namespaceHashMap) {
public void setNamespaces(HashMap<String, DataSourceNamespace> namespaceHashMap) {
if (null == namespaceHashMap) {
LOGGER.debug(KruizeConstants.DataSourceConstants.DataSourceMetadataErrorMsgs.SET_NAMESPACE_MAP_ERROR + "{}", clusterName);
}
this.namespaceHashMap = namespaceHashMap;
this.namespaces = namespaceHashMap;
}

public DataSourceNamespace getDataSourceNamespaceObject(String namespace) {
if (null != namespaceHashMap && namespaceHashMap.containsKey(namespace)) {
return namespaceHashMap.get(namespace);
if (null != namespaces && namespaces.containsKey(namespace)) {
return namespaces.get(namespace);
}
return null;
}
Expand All @@ -54,7 +55,7 @@ public DataSourceNamespace getDataSourceNamespaceObject(String namespace) {
public String toString() {
return "DataSourceCluster{" +
"cluster_name='" + clusterName + '\'' +
", namespaces=" + namespaceHashMap +
", namespaces=" + namespaces +
'}';
}
}
Original file line number Diff line number Diff line change
@@ -1,24 +1,32 @@
package com.autotune.common.data.dataSourceMetadata;

import com.autotune.utils.KruizeConstants;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;

/**
* DataSourceContainer object represents the container metadata for a workload
*/
public class DataSourceContainer {
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CONTAINER_NAME)
@JsonProperty(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CONTAINER_NAME)
private String containerName;
@SerializedName(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CONTAINER_IMAGE_NAME)
@JsonProperty(KruizeConstants.DataSourceConstants.DataSourceMetadataInfoJSONKeys.CONTAINER_IMAGE_NAME)
private String containerImageName;

public DataSourceContainer(String containerName, String containerImageName) {
this.containerName = containerName;
this.containerImageName = containerImageName;
}

public String getDataSourceContainerName() { return containerName;}
public String getDataSourceContainerImageName() { return containerImageName;}
public String getContainerName() {
return containerName;
}

public String getContainerImageName() {
return containerImageName;
}

@Override
public String toString() {
Expand Down
Loading

0 comments on commit a49b7e5

Please sign in to comment.