From bf18cc9b925fdd5d2074ae5ff1f7488dde006ee7 Mon Sep 17 00:00:00 2001 From: "Oguz Ulgen (Meta Employee)" Date: Mon, 23 Dec 2024 09:39:09 -0800 Subject: [PATCH] Rename cache limit to recompile limit in configs (#143709) Summary: This PR renames every cache_limit to recompile_limit via sed. Old config options are maintained via Config(alias='xyz') X-link: https://github.com/pytorch/pytorch/pull/143709 Approved by: https://github.com/jansel Reviewed By: aorenste Differential Revision: D67580275 Pulled By: oulgen fbshipit-source-id: 5f44f01a2d43ee280a4872b112aff7eaf7b71700 --- userbenchmark/dynamo/dynamobench/_dynamo/utils.py | 12 ++++++------ userbenchmark/dynamo/dynamobench/torchao_backend.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/userbenchmark/dynamo/dynamobench/_dynamo/utils.py b/userbenchmark/dynamo/dynamobench/_dynamo/utils.py index 742e1893d..1f8bca919 100644 --- a/userbenchmark/dynamo/dynamobench/_dynamo/utils.py +++ b/userbenchmark/dynamo/dynamobench/_dynamo/utils.py @@ -2401,16 +2401,16 @@ def format_func_info(code): @contextlib.contextmanager def disable_cache_limit(): - prior = config.cache_size_limit - config.cache_size_limit = sys.maxsize - prior_acc_limit = config.accumulated_cache_size_limit - config.accumulated_cache_size_limit = sys.maxsize + prior = config.recompile_limit + config.recompile_limit = sys.maxsize + prior_acc_limit = config.accumulated_recompile_limit + config.accumulated_recompile_limit = sys.maxsize try: yield finally: - config.cache_size_limit = prior - config.accumulated_cache_size_limit = prior_acc_limit + config.recompile_limit = prior + config.accumulated_recompile_limit = prior_acc_limit # map from transformed code back to original user code diff --git a/userbenchmark/dynamo/dynamobench/torchao_backend.py b/userbenchmark/dynamo/dynamobench/torchao_backend.py index 385485378..17876005a 100644 --- a/userbenchmark/dynamo/dynamobench/torchao_backend.py +++ b/userbenchmark/dynamo/dynamobench/torchao_backend.py @@ -8,7 +8,7 @@ def setup_baseline(): recommended_inductor_config_setter() torch._dynamo.config.automatic_dynamic_shapes = False - torch._dynamo.config.cache_size_limit = 10000 + torch._dynamo.config.recompile_limit = 10000 def torchao_optimize_ctx(quantization: str):