Files
text-generation-webui/pyproject.toml
Naxdy ef28a673ed custom: add flake
fix uv

add package
2026-03-16 21:36:34 +01:00

65 lines
1.9 KiB
TOML

[project]
name = "text-generation-webui"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"accelerate==1.12.*",
"audioop-lts<1.0; python_version >= '3.13'",
"bitsandbytes==0.49.*",
"datasets",
"diffusers==0.36.*",
"einops",
"fastapi==0.112.4",
"flash-linear-attention==0.4.*",
"html2text==2025.4.15",
"huggingface-hub==1.5.*",
"jinja2==3.1.6",
"markdown",
"numpy==2.2.*",
"pandas",
"peft==0.18.*",
"Pillow>=9.5.0",
"pydantic==2.11.0",
"pymupdf==1.27.1",
"python-docx==1.1.2",
"pyyaml",
"requests",
"rich",
"safetensors==0.7.*",
"scipy",
"sentencepiece",
"tensorboard",
"torchao==0.15.*",
"transformers==5.3.*",
"tqdm",
"wandb",
"flask_cloudflared==0.0.15",
"sse-starlette==1.6.5",
"tiktoken",
"gradio",
"gradio-client",
"llama-cpp-binaries",
"exllamav3",
"flash-attn",
]
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
packages = { find = { where = ["."] } }
[tool.setuptools.package-data]
"*" = ["css", "js"]
[tool.uv.sources]
gradio = { url = "https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.9/gradio-4.37.2+custom.9-py3-none-any.whl" }
gradio-client = { url = "https://github.com/oobabooga/gradio/releases/download/4.37.2-custom.9/gradio_client-1.0.2+custom.9-py3-none-any.whl" }
llama-cpp-binaries = { url = "https://github.com/oobabooga/llama-cpp-binaries/releases/download/v0.87.0/llama_cpp_binaries-0.87.0+cu124-py3-none-linux_x86_64.whl" }
exllamav3 = { url = "https://github.com/turboderp-org/exllamav3/releases/download/v0.0.23/exllamav3-0.0.23+cu128.torch2.9.0-cp313-cp313-linux_x86_64.whl" }
flash-attn = { url = "https://github.com/kingbri1/flash-attention/releases/download/v2.8.3/flash_attn-2.8.3+cu128torch2.9.0cxx11abiFALSE-cp313-cp313-linux_x86_64.whl" }