activation_checkpointing_functions.py 924 B

1234567891011121314151617181920212223242526272829
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. from functools import partial
  4. from torch.distributed.algorithms._checkpoint.checkpoint_wrapper import (
  5. checkpoint_wrapper,
  6. CheckpointImpl,
  7. apply_activation_checkpointing,
  8. )
  9. from transformers.models.llama.modeling_llama import LlamaDecoderLayer
  10. non_reentrant_wrapper = partial(
  11. checkpoint_wrapper,
  12. checkpoint_impl=CheckpointImpl.NO_REENTRANT,
  13. )
  14. check_fn = lambda submodule: isinstance(submodule, LlamaDecoderLayer)
  15. def apply_fsdp_checkpointing(model):
  16. """apply activation checkpointing to model
  17. returns None as model is updated directly
  18. """
  19. print(f"--> applying fsdp activation checkpointing...")
  20. apply_activation_checkpointing(
  21. model, checkpoint_wrapper_fn=non_reentrant_wrapper, check_fn=check_fn
  22. )