From 46aac3af6c2e721ad2bb9d70008b17c3a0c11a14 Mon Sep 17 00:00:00 2001 From: Jeff Ohrstrom Date: Fri, 14 Apr 2023 13:13:36 -0400 Subject: [PATCH] Max mem workers (#29) Apply a maximum of 60GB to executor memory. --- template/before.sh.erb | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/template/before.sh.erb b/template/before.sh.erb index cf33ac9..a8ea0a1 100755 --- a/template/before.sh.erb +++ b/template/before.sh.erb @@ -31,6 +31,11 @@ end end + def exec_memory + calculated = avail_mem/context.num_workers.to_i + [60, calculated].min + end + spark_config = { "spark.ui.reverseProxy" => "true", #"spark.ui.reverseProxyUrl" => "https://ondemand.osc.edu/node/${SPARK_MASTER_HOST}/${SPARK_MASTER_WEBUI_PORT}", @@ -46,7 +51,7 @@ def pyspark_submit_args args = [] - args.concat(["--executor-memory #{avail_mem/context.num_workers.to_i}G"]) unless extra_spark_config.has_key?('spark.executor.memory') + args.concat(["--executor-memory #{exec_memory}G"]) unless extra_spark_config.has_key?('spark.executor.memory') args.concat(['--conf spark.driver.maxResultSize=0']) unless extra_spark_config.has_key?("spark.driver.maxResultSize") if extra_spark_config.has_key?('spark.driver.memory')