pyproject.toml 1.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. [build-system]
  2. requires = ["setuptools>=61.0", "wheel"]
  3. build-backend = "setuptools.build_meta"
  4. [project]
  5. name = "llms-from-scratch"
  6. version = "1.0.17"
  7. description = "Implement a ChatGPT-like LLM in PyTorch from scratch, step by step"
  8. readme = "README.md"
  9. requires-python = ">=3.10,<3.13"
  10. dependencies = [
  11. "torch>=2.3.0",
  12. "jupyterlab>=4.0",
  13. "tiktoken>=0.5.1",
  14. "matplotlib>=3.7.1",
  15. "tensorflow>=2.18.0",
  16. "tqdm>=4.66.1",
  17. "numpy>=1.26,<2.1",
  18. "pandas>=2.2.1",
  19. "pip>=25.0.1",
  20. "pytest>=8.3.5",
  21. ]
  22. [tool.uv.sources]
  23. llms-from-scratch = { workspace = true }
  24. [dependency-groups]
  25. dev = [
  26. "build>=1.2.2.post1",
  27. "llms-from-scratch",
  28. "twine>=6.1.0",
  29. "tokenizers>=0.21.1",
  30. "safetensors>=0.6.2",
  31. ]
  32. [tool.ruff]
  33. line-length = 140
  34. [tool.ruff.lint]
  35. exclude = [".venv"]
  36. # Ignored rules (W504 removed)
  37. ignore = [
  38. "C406", "E226", "E402", "E702", "E703",
  39. "E722", "E731", "E741"
  40. ]
  41. # `llms_from_scratch` PyPI package
  42. [tool.setuptools]
  43. package-dir = {"" = "pkg"}
  44. [tool.setuptools.packages.find]
  45. where = ["pkg"]