pyproject.toml 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. [build-system]
  2. requires = ["hatchling", "hatch-requirements-txt"]
  3. build-backend = "hatchling.build"
  4. [project]
  5. name = "llama-recipes"
  6. version = "0.0.1"
  7. authors = [
  8. { name="Hamid Shojanazeri", email="hamidnazeri@meta.com" },
  9. { name="Matthias Reso", email="mreso@meta.com" },
  10. { name="Geeta Chauhan", email="gchauhan@meta.com" },
  11. ]
  12. description = "Llama-recipes is a companion project to the Llama 2 model. It's goal is to provide examples to quickly get started with fine-tuning for domain adaptation and how to run inference for the fine-tuned models. "
  13. readme = "README.md"
  14. requires-python = ">=3.8"
  15. classifiers = [
  16. "Programming Language :: Python :: 3",
  17. "License :: Other/Proprietary License",
  18. "Operating System :: OS Independent",
  19. ]
  20. dynamic = ["dependencies"]
  21. [project.optional-dependencies]
  22. vllm = ["vllm"]
  23. tests = ["pytest-mock"]
  24. auditnlg = ["auditnlg"]
  25. [project.urls]
  26. "Homepage" = "https://github.com/facebookresearch/llama-recipes/"
  27. "Bug Tracker" = "https://github.com/facebookresearch/llama-recipes/issues"
  28. [tool.hatch.build]
  29. exclude = [
  30. "dist/*",
  31. ]
  32. [tool.hatch.build.targets.wheel]
  33. packages = ["src/llama_recipes"]
  34. [tool.hatch.metadata.hooks.requirements_txt]
  35. files = ["requirements.txt"]
  36. [tool.pytest.ini_options]
  37. markers = [
  38. "skip_missing_tokenizer: skip tests when we can not access meta-llama/Llama-2-7b-hf on huggingface hub (Log in with `huggingface-cli login` to unskip).",
  39. ]