fix: change lora_dropout from int to float for type consistency (#2949)

Fixes "Argument of type 'float' cannot be assigned to parameter 'lora_dropout' of type 'int'" error by ensuring lora_dropout is consistently a float (0.0) rather than int (0) across vision.py, llama.py, and unsloth-cli.py
This commit is contained in:
Muzammil Khan 2025-07-14 14:12:07 +05:30 committed by GitHub
parent 665a8e4b1d
commit 1eaa52ae55
3 changed files with 3 additions and 3 deletions

View file

@ -182,7 +182,7 @@ if __name__ == "__main__":
lora_group = parser.add_argument_group("🧠 LoRA Options", "These options are used to configure the LoRA model.")
lora_group.add_argument('--r', type=int, default=16, help="Rank for Lora model, default is 16. (common values: 8, 16, 32, 64, 128)")
lora_group.add_argument('--lora_alpha', type=int, default=16, help="LoRA alpha parameter, default is 16. (common values: 8, 16, 32, 64, 128)")
lora_group.add_argument('--lora_dropout', type=float, default=0, help="LoRA dropout rate, default is 0.0 which is optimized.")
lora_group.add_argument('--lora_dropout', type=float, default=0.0, help="LoRA dropout rate, default is 0.0 which is optimized.")
lora_group.add_argument('--bias', type=str, default="none", help="Bias setting for LoRA")
lora_group.add_argument('--use_gradient_checkpointing', type=str, default="unsloth", help="Use gradient checkpointing")
lora_group.add_argument('--random_state', type=int, default=3407, help="Random state for reproducibility, default is 3407.")

View file

@ -2220,7 +2220,7 @@ class FastLlamaModel:
target_modules = ["q_proj", "k_proj", "v_proj", "o_proj",
"gate_proj", "up_proj", "down_proj"],
lora_alpha = 16,
lora_dropout = 0,
lora_dropout = 0.0,
bias = "none",
layers_to_transform = None,
layers_pattern = None,

View file

@ -554,7 +554,7 @@ class FastBaseModel:
r = 16,
target_modules = None,
lora_alpha = 16,
lora_dropout = 0,
lora_dropout = 0.0,
bias = "none",
finetune_vision_layers = True,
finetune_language_layers = True,