|
@@ -1,7 +1,9 @@
|
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
|
|
# This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
|
|
|
|
|
|
|
|
+import importlib
|
|
from functools import partial
|
|
from functools import partial
|
|
|
|
+from pathlib import Path
|
|
|
|
|
|
import torch
|
|
import torch
|
|
|
|
|
|
@@ -12,10 +14,46 @@ from llama_recipes.datasets import (
|
|
)
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
+def load_module_from_py_file(py_file: str) -> object:
|
|
|
|
+ """
|
|
|
|
+ This method loads a module from a py file which is not in the Python path
|
|
|
|
+ """
|
|
|
|
+ module_name = Path(py_file).name
|
|
|
|
+ loader = importlib.machinery.SourceFileLoader(module_name, py_file)
|
|
|
|
+ spec = importlib.util.spec_from_loader(module_name, loader)
|
|
|
|
+ module = importlib.util.module_from_spec(spec)
|
|
|
|
+
|
|
|
|
+ loader.exec_module(module)
|
|
|
|
+
|
|
|
|
+ return module
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+def get_custom_dataset(dataset_config, tokenizer, split: str):
|
|
|
|
+ if ":" in dataset_config.file:
|
|
|
|
+ module_path, func_name = dataset_config.file.split(":")
|
|
|
|
+ else:
|
|
|
|
+ module_path, func_name = dataset_config.file, "get_custom_dataset"
|
|
|
|
+
|
|
|
|
+ if not module_path.endswith(".py"):
|
|
|
|
+ raise ValueError(f"Dataset file {module_path} is not a .py file.")
|
|
|
|
+
|
|
|
|
+ module_path = Path(module_path)
|
|
|
|
+ if not module_path.is_file():
|
|
|
|
+ raise FileNotFoundError(f"Dataset py file {module_path.as_posix()} does not exist or is not a file.")
|
|
|
|
+
|
|
|
|
+ module = load_module_from_py_file(module_path.as_posix())
|
|
|
|
+ try:
|
|
|
|
+ return getattr(module, func_name)(dataset_config, tokenizer, split)
|
|
|
|
+ except AttributeError as e:
|
|
|
|
+ print(f"It seems like the given method name ({func_name}) is not present in the dataset .py file ({module_path.as_posix()}).")
|
|
|
|
+ raise e
|
|
|
|
+
|
|
|
|
+
|
|
DATASET_PREPROC = {
|
|
DATASET_PREPROC = {
|
|
"alpaca_dataset": partial(get_alpaca_dataset, max_words=224),
|
|
"alpaca_dataset": partial(get_alpaca_dataset, max_words=224),
|
|
"grammar_dataset": get_grammar_dataset,
|
|
"grammar_dataset": get_grammar_dataset,
|
|
"samsum_dataset": get_samsum_dataset,
|
|
"samsum_dataset": get_samsum_dataset,
|
|
|
|
+ "custom_dataset": get_custom_dataset,
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|