Added MultiHeadSelfAttention.
This commit is contained in:
@ -3,7 +3,23 @@ name = "llmfs"
|
||||
version = "0.1.0"
|
||||
description = "LLM from scratch"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
requires-python = ">=3.13,<3.15"
|
||||
dependencies = [
|
||||
"tiktoken>=0.12.0",
|
||||
"tiktoken>=0.12.0",
|
||||
"torch>=2.11.0",
|
||||
"torchvision>=0.26.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"mypy>=1.20.2",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
torch = { index = "torch" }
|
||||
torchvision = { index = "torch" }
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "torch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
explicit = true
|
||||
|
||||
Reference in New Issue
Block a user