From 768dda5ecbce457b9949900d9bcad1fefb69bea2 Mon Sep 17 00:00:00 2001 From: Sameer Sharma <30409342+CaptainSame@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:35:29 +0000 Subject: [PATCH] MLCOMPUTE-1155 | fix cost factor default while reading spark costs from srv configs (#138) Co-authored-by: Sameer Sharma --- service_configuration_lib/spark_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/service_configuration_lib/spark_config.py b/service_configuration_lib/spark_config.py index 5a27769..9a03b2c 100644 --- a/service_configuration_lib/spark_config.py +++ b/service_configuration_lib/spark_config.py @@ -980,7 +980,7 @@ def compute_approx_hourly_cost_dollars( spark_conf.get('spark.dynamicAllocation.minExecutors', min_cores), )) - cost_factor = self.spark_costs.get(paasta_cluster, dict())[paasta_pool] + cost_factor = self.spark_costs.get(paasta_cluster, dict()).get(paasta_pool, 0) min_dollars = round(min_cores * cost_factor, 5) max_dollars = round(max_cores * cost_factor, 5)