Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updating dataproc container cost to be multiplied by number of cores #648

Merged
merged 9 commits into from
Nov 3, 2023
4 changes: 3 additions & 1 deletion user_tools/src/spark_rapids_pytools/cloud_api/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,5 +545,7 @@ def _calculate_group_cost(self, cluster_inst: ClusterGetAccessor, node_type: Spa
def _get_cost_per_cluster(self, cluster: ClusterGetAccessor):
master_cost = self._calculate_group_cost(cluster, SparkNodeType.MASTER)
workers_cost = self._calculate_group_cost(cluster, SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost()
master_cores = cluster.get_nodes_cnt(SparkNodeType.MASTER) * cluster.get_node_core_count(SparkNodeType.MASTER)
worker_cores = cluster.get_nodes_cnt(SparkNodeType.WORKER) * cluster.get_node_core_count(SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost() * (master_cores + worker_cores)
return master_cost + workers_cost + dataproc_cost
5 changes: 2 additions & 3 deletions user_tools/src/spark_rapids_pytools/cloud_api/dataproc_gke.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,6 @@ class DataprocGkeSavingsEstimator(DataprocSavingsEstimator):
"""

def _get_cost_per_cluster(self, cluster: ClusterGetAccessor):
master_cost = self._calculate_group_cost(cluster, SparkNodeType.MASTER)
workers_cost = self._calculate_group_cost(cluster, SparkNodeType.WORKER)
dataproc_cost = super()._get_cost_per_cluster()
parthosa marked this conversation as resolved.
Show resolved Hide resolved
dataproc_gke_cost = self.price_provider.get_container_cost()
return master_cost + workers_cost + dataproc_gke_cost
return dataproc_cost + dataproc_gke_cost
6 changes: 5 additions & 1 deletion user_tools/src/spark_rapids_pytools/cloud_api/onprem.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,10 @@ def _get_cost_per_cluster(self, cluster: ClusterGetAccessor):
if self.price_provider.name.casefold() == 'dataproc':
master_cost = self.__calculate_dataproc_group_cost(cluster, SparkNodeType.MASTER)
workers_cost = self.__calculate_dataproc_group_cost(cluster, SparkNodeType.WORKER)
dataproc_cost = self.price_provider.get_container_cost()
master_cores = (cluster.get_nodes_cnt(SparkNodeType.MASTER)
* cluster.get_node_core_count(SparkNodeType.MASTER))
worker_cores = (cluster.get_nodes_cnt(SparkNodeType.WORKER)
* cluster.get_node_core_count(SparkNodeType.WORKER))
dataproc_cost = self.price_provider.get_container_cost() * (master_cores + worker_cores)
total_cost = master_cost + workers_cost + dataproc_cost
return total_cost
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ class DataprocGkePriceProvider(DataprocPriceProvider):
name = 'DataprocGke'

def get_container_cost(self) -> float:
dataproc_cost = super().get_container_cost()
gke_container_cost = self.__get_gke_container_cost()
return dataproc_cost + gke_container_cost
parthosa marked this conversation as resolved.
Show resolved Hide resolved

Expand Down