activation_checkpointing_functions.py 1.0 KB

123456789101112131415161718192021222324252627282930313233
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. import torch
  4. import os
  5. import torch.distributed as dist
  6. from torch.distributed.algorithms._checkpoint.checkpoint_wrapper import (
  7. checkpoint_wrapper,
  8. CheckpointImpl,
  9. apply_activation_checkpointing,
  10. )
  11. from transformers.models.t5.modeling_t5 import T5Block
  12. from transformers.models.llama.modeling_llama import LlamaDecoderLayer
  13. from functools import partial
  14. non_reentrant_wrapper = partial(
  15. checkpoint_wrapper,
  16. checkpoint_impl=CheckpointImpl.NO_REENTRANT,
  17. )
  18. check_fn = lambda submodule: isinstance(submodule, LlamaDecoderLayer)
  19. def apply_fsdp_checkpointing(model):
  20. """apply activation checkpointing to model
  21. returns None as model is updated directly
  22. """
  23. print(f"--> applying fsdp activation checkpointing...")
  24. apply_activation_checkpointing(
  25. model, checkpoint_wrapper_fn=non_reentrant_wrapper, check_fn=check_fn
  26. )