lchu 1 year ago
parent
commit
0c51b47262
1 changed files with 2 additions and 1 deletions
  1. 2 1
      llama_finetuning.py

+ 2 - 1
llama_finetuning.py

@@ -86,7 +86,8 @@ def main(**kwargs):
         world_size = int(os.environ["WORLD_SIZE"])
 
     if torch.distributed.is_initialized():
-        torch.cuda.set_device(rank)
+        torch.cuda.set_device(local_rank)
+        clear_gpu_cache(local_rank)
         setup_environ_flags(rank)
 
     # Calculate gradient accumulation steps