From c93f3ea0ea9bb667fbd6241399a6046cac737d0f Mon Sep 17 00:00:00 2001 From: Felipe Mello Date: Thu, 12 Sep 2024 17:13:31 -0700 Subject: [PATCH] add compile --- recipes/full_finetune_distributed.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/recipes/full_finetune_distributed.py b/recipes/full_finetune_distributed.py index 5f5e3bef90..2112cd886d 100644 --- a/recipes/full_finetune_distributed.py +++ b/recipes/full_finetune_distributed.py @@ -362,6 +362,9 @@ def _setup_model( with training.set_default_dtype(self._dtype), torch.device("meta"): model = config.instantiate(cfg_model) + if self._compile: + training.compile_model(model, verbose=self._is_rank_zero) + # We currently have two versions of activation checkpointing in this recipe # for testing and BC purposes. ``enable_activation_checkpointing`` controls # the older version of AC and this behavior is unchanged