llama.cpp fixes

This commit is contained in:
Daniel Han 2025-12-23 05:50:26 -08:00
parent cbfa7a20f9
commit 1ff6fc85f0
2 changed files with 3 additions and 3 deletions

View file

@ -60,7 +60,7 @@ huggingfacenotorch = [
]
huggingface = [
"unsloth[huggingfacenotorch]",
"unsloth_zoo>=2025.12.6",
"unsloth_zoo>=2025.12.7",
"torchvision",
"unsloth[triton]",
]
@ -523,7 +523,7 @@ colab-ampere-torch220 = [
"flash-attn>=2.6.3 ; ('linux' in sys_platform)",
]
colab-new = [
"unsloth_zoo>=2025.12.6",
"unsloth_zoo>=2025.12.7",
"packaging",
"tyro",
"transformers>=4.51.3,!=4.52.0,!=4.52.1,!=4.52.2,!=4.52.3,!=4.53.0,!=4.54.0,!=4.55.0,!=4.55.1,!=4.57.0,<=4.57.3",

View file

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "2025.12.8"
__version__ = "2025.12.9"
__all__ = [
"SUPPORTS_BFLOAT16",