Browse Source

remove relative imports

Matthias Reso 1 year ago
parent
commit
ce9501f22c

+ 3 - 3
src/llama_recipes/configs/__init__.py

@@ -1,6 +1,6 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .peft import lora_config, llama_adapter_config, prefix_config
-from .fsdp import fsdp_config
-from .training import train_config
+from llama_recipes.configs.peft import lora_config, llama_adapter_config, prefix_config
+from llama_recipes.configs.fsdp import fsdp_config
+from llama_recipes.configs.training import train_config

+ 3 - 3
src/llama_recipes/datasets/__init__.py

@@ -1,6 +1,6 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .grammar_dataset import get_dataset as get_grammar_dataset
-from .alpaca_dataset import InstructionDataset as get_alpaca_dataset
-from .samsum_dataset import get_preprocessed_samsum as get_samsum_dataset
+from llama_recipes.datasets.grammar_dataset.grammar_dataset import get_dataset as get_grammar_dataset
+from llama_recipes.datasets.alpaca_dataset import InstructionDataset as get_alpaca_dataset
+from llama_recipes.datasets.samsum_dataset import get_preprocessed_samsum as get_samsum_dataset

+ 0 - 1
src/llama_recipes/datasets/grammar_dataset/__init__.py

@@ -1,4 +1,3 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .grammar_dataset import get_dataset

+ 1 - 1
src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py

@@ -10,7 +10,7 @@ from pathlib import Path
 
 from torch.utils.data import Dataset
 
-from ..utils import ConcatDataset
+from llama_recipes.datasets.utils import ConcatDataset
 
 
 class grammar(Dataset):

+ 1 - 1
src/llama_recipes/datasets/samsum_dataset.py

@@ -5,7 +5,7 @@
 
 import datasets
 
-from .utils import Concatenator
+from llama_recipes.datasets.utils import Concatenator
 
 def get_preprocessed_samsum(dataset_config, tokenizer, split):
     dataset = datasets.load_dataset("samsum", split=split)

+ 6 - 6
src/llama_recipes/finetuning.py

@@ -22,18 +22,18 @@ from transformers import (
 )
 from transformers.models.llama.modeling_llama import LlamaDecoderLayer
 
-from .configs import fsdp_config, train_config
-from .policies import AnyPrecisionAdamW, apply_fsdp_checkpointing
+from llama_recipes.configs import fsdp_config, train_config
+from llama_recipes.policies import AnyPrecisionAdamW, apply_fsdp_checkpointing
 
-from .utils import fsdp_auto_wrap_policy
-from .utils.config_utils import (
+from llama_recipes.utils import fsdp_auto_wrap_policy
+from llama_recipes.utils.config_utils import (
     update_config,
     generate_peft_config,
     generate_dataset_config,
 )
-from .utils.dataset_utils import get_preprocessed_dataset
+from llama_recipes.utils.dataset_utils import get_preprocessed_dataset
 
-from .utils.train_utils import (
+from llama_recipes.utils.train_utils import (
     train,
     freeze_transformer_layers,
     setup,

+ 1 - 1
src/llama_recipes/inference/__main__.py

@@ -3,7 +3,7 @@
 
 import fire
 
-from .inference import main
+from llama_recipes.inference.inference import main
 
 if __name__ == "__main__":
     fire.Fire(main)

+ 3 - 3
src/llama_recipes/inference/chat_completion.py

@@ -10,9 +10,9 @@ import sys
 import torch
 from transformers import LlamaTokenizer
 
-from .chat_utils import read_dialogs_from_file, format_tokens
-from .model_utils import load_model, load_peft_model
-from .safety_utils import get_safety_checker
+from llama_recipes.inference.chat_utils import read_dialogs_from_file, format_tokens
+from llama_recipes.inference.model_utils import load_model, load_peft_model
+from llama_recipes.inference.safety_utils import get_safety_checker
 
 
 def main(

+ 1 - 1
src/llama_recipes/inference/checkpoint_converter_fsdp_hf.py

@@ -10,7 +10,7 @@ import yaml
 
 from transformers import LlamaTokenizer
 
-from .model_utils import  load_llama_from_config
+from llama_recipes.inference.model_utils import  load_llama_from_config
 
 # Get the current file's directory
 current_directory = os.path.dirname(os.path.abspath(__file__))

+ 2 - 2
src/llama_recipes/inference/code_llama/code_completion_example.py

@@ -11,8 +11,8 @@ import time
 import torch
 from transformers import AutoTokenizer
 
-from ..safety_utils import get_safety_checker
-from ..model_utils import load_model, load_peft_model
+from llama_recipes.inference.safety_utils import get_safety_checker
+from llama_recipes.inference.model_utils import load_model, load_peft_model
 
 
 def main(

+ 2 - 2
src/llama_recipes/inference/code_llama/code_infilling_example.py

@@ -11,8 +11,8 @@ import time
 
 from transformers import AutoTokenizer
 
-from ..safety_utils import get_safety_checker
-from ..model_utils import load_model, load_peft_model
+from llama_recipes.inference.safety_utils import get_safety_checker
+from llama_recipes.inference.model_utils import load_model, load_peft_model
 
 def main(
     model_name,

+ 2 - 2
src/llama_recipes/inference/inference.py

@@ -11,8 +11,8 @@ import time
 import torch
 from transformers import LlamaTokenizer
 
-from .safety_utils import get_safety_checker
-from .model_utils import load_model, load_peft_model
+from llama_recipes.inference.safety_utils import get_safety_checker
+from llama_recipes.inference.model_utils import load_model, load_peft_model
 
 
 def main(

+ 1 - 1
src/llama_recipes/model_checkpointing/__init__.py

@@ -1,7 +1,7 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .checkpoint_handler import (
+from llama_recipes.model_checkpointing.checkpoint_handler import (
     load_model_checkpoint,
     save_model_checkpoint,
     load_optimizer_checkpoint,

+ 4 - 4
src/llama_recipes/policies/__init__.py

@@ -1,7 +1,7 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .mixed_precision import *
-from .wrapping import *
-from .activation_checkpointing_functions import apply_fsdp_checkpointing
-from .anyprecision_optimizer import AnyPrecisionAdamW
+from llama_recipes.policies.mixed_precision import *
+from llama_recipes.policies.wrapping import *
+from llama_recipes.policies.activation_checkpointing_functions import apply_fsdp_checkpointing
+from llama_recipes.policies.anyprecision_optimizer import AnyPrecisionAdamW

+ 4 - 4
src/llama_recipes/utils/__init__.py

@@ -1,7 +1,7 @@
 # Copyright (c) Meta Platforms, Inc. and affiliates.
 # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
 
-from .memory_utils import MemoryTrace
-from .dataset_utils import *
-from .fsdp_utils import fsdp_auto_wrap_policy
-from .train_utils import *
+from llama_recipes.utils.memory_utils import MemoryTrace
+from llama_recipes.utils.dataset_utils import *
+from llama_recipes.utils.fsdp_utils import fsdp_auto_wrap_policy
+from llama_recipes.utils.train_utils import *

+ 2 - 2
src/llama_recipes/utils/config_utils.py

@@ -10,8 +10,8 @@ from peft import (
     PrefixTuningConfig,
 )
 
-from ..configs import datasets, lora_config, llama_adapter_config, prefix_config, train_config
-from .dataset_utils import DATASET_PREPROC
+from llama_recipes.configs import datasets, lora_config, llama_adapter_config, prefix_config, train_config
+from llama_recipes.utils.dataset_utils import DATASET_PREPROC
 
 
 def update_config(config, **kwargs):

+ 1 - 1
src/llama_recipes/utils/dataset_utils.py

@@ -5,7 +5,7 @@ from functools import partial
 
 import torch
 
-from ..datasets import (
+from llama_recipes.datasets import (
     get_grammar_dataset,
     get_alpaca_dataset,
     get_samsum_dataset,

+ 3 - 3
src/llama_recipes/utils/train_utils.py

@@ -17,9 +17,9 @@ from tqdm import tqdm
 from transformers import LlamaTokenizer
 
 
-from .memory_utils import MemoryTrace
-from ..model_checkpointing import save_model_checkpoint, save_model_and_optimizer_sharded, save_optimizer_checkpoint
-from ..policies import fpSixteen,bfSixteen_mixed, get_llama_wrapper
+from llama_recipes.model_checkpointing import save_model_checkpoint, save_model_and_optimizer_sharded, save_optimizer_checkpoint
+from llama_recipes.policies import fpSixteen,bfSixteen_mixed, get_llama_wrapper
+from llama_recipes.utils.memory_utils import MemoryTrace
 
 
 def set_tokenizer_params(tokenizer: LlamaTokenizer):