feat: initial project setup with LLM architecture and HF integration

- Add LLM library with GPT model implementation
- Add hf-proxy for HuggingFace integration
- Add experiments for training and generation
- Add comprehensive documentation and examples
- Configure uv workspace with proper dependencies
This commit is contained in:
Sergey Penkovsky
2025-10-04 22:40:21 +03:00
commit ec07546ea8
54 changed files with 9337 additions and 0 deletions

35
pyproject.toml Normal file
View File

@@ -0,0 +1,35 @@
[project]
name = "llm-arch-research"
version = "0.1.0"
description = "Research workspace for LLM architectures"
authors = [
{ name = "Sergey Penkovsky", email = "sergey.penkovsky@gmail.com" }
]
requires-python = ">=3.10"
dependencies = [
"accelerate>=0.26.0",
"hf-proxy",
"llm",
"tqdm>=4,<5",
]
[project.optional-dependencies]
dev = [
"pytest>=8.0.0",
"black>=24.0.0",
"ruff>=0.3.0",
"mypy>=1.8.0",
"jupyter>=1.0.0",
]
test = [
"pytest>=8.0.0",
"pytest-cov>=4.1.0",
]
[tool.uv.sources]
llm = { workspace = true, editable = true }
hf-proxy = { workspace = true, editable = true }
[tool.uv.workspace]
members = ["llm", "hf-proxy"]
exclude = []