Update pyproject.toml

This commit is contained in:
Daniel Han-Chen 2024-01-19 03:41:00 +11:00
parent 396c7245dd
commit 9e2dec16fb

View file

@ -32,7 +32,7 @@ include-package-data = false
exclude = ["images*"]
[project.optional-dependencies]
huggingface_dev = [
huggingfacedev = [
"transformers @ git+https://github.com/huggingface/transformers",
"datasets",
"sentencepiece",
@ -62,12 +62,12 @@ cu121only = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.22.post7-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.22.post7-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu118only_torch211 = [
cu118onlytorch211 = [
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu118/xformers-0.0.23%2Bcu118-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
]
cu121only_torch211 = [
cu121onlytorch211 = [
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp39-cp39-manylinux2014_x86_64.whl ; python_version=='3.9'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp310-cp310-manylinux2014_x86_64.whl ; python_version=='3.10'",
"xformers @ https://download.pytorch.org/whl/cu121/xformers-0.0.23-cp311-cp311-manylinux2014_x86_64.whl ; python_version=='3.11'",
@ -85,12 +85,12 @@ cu121 = [
cu118_torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118only_torch211]",
"unsloth[cu118onlytorch211]",
]
cu121_torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121only_torch211]",
"unsloth[cu121onlytorch211]",
]
kaggle = [
"unsloth[huggingface]",
@ -108,12 +108,12 @@ colab_ampere = [
"flash-attn",
]
colab_dev = [
"unsloth[huggingface_dev]",
"unsloth[huggingfacedev]",
"bitsandbytes",
"unsloth[cu121only]",
]
colab_ampere_dev = [
"unsloth[huggingface_dev]",
"unsloth[huggingfacedev]",
"bitsandbytes",
"unsloth[cu121only]",
"packaging",
@ -139,7 +139,7 @@ cu121_ampere = [
cu118_ampere_torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu118only_torch211]",
"unsloth[cu118onlytorch211]",
"packaging",
"ninja",
"flash-attn",
@ -147,7 +147,7 @@ cu118_ampere_torch211 = [
cu121_ampere_torch211 = [
"unsloth[huggingface]",
"bitsandbytes",
"unsloth[cu121only_torch211]",
"unsloth[cu121onlytorch211]",
"packaging",
"ninja",
"flash-attn",