summaryrefslogtreecommitdiff
path: root/finetuning.py
diff options
context:
space:
mode:
authorpks <pks@pks.rocks>2025-11-30 22:43:09 +0100
committerpks <pks@pks.rocks>2025-11-30 22:43:09 +0100
commit2706ad84c1a94e3144a4f6761447db5b2e6ed3a3 (patch)
treeedd051c0463916a0ee5c5bc02fec2dc17b03beb1 /finetuning.py
parent6b8cbec22fd55b0006953b7cad8599ae9011e79b (diff)
WIP
Diffstat (limited to 'finetuning.py')
-rwxr-xr-xfinetuning.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/finetuning.py b/finetuning.py
index cd246df..044cda5 100755
--- a/finetuning.py
+++ b/finetuning.py
@@ -80,7 +80,7 @@ def main():
parser.add_argument("--max-length", default=512, type=int)
args = parser.parse_args()
- if args.bnb4bit:
+ if args.bnb_4bit:
bnb_config = BitsAndBytesConfig(
load_in_4bit=True,
bnb_4bit_use_double_quant=True,
@@ -102,10 +102,10 @@ def main():
if args.lora_small:
target_modules = ["q_proj", "o_proj", "k_proj", "v_proj", "gate_proj", "up_proj", "down_proj"]
- modules_to_save = []
+ modules_to_save = None
else:
target_modules="all-linear",
- modules_to_save=["lm_head", "embed_tokens"],
+ modules_to_save=["lm_head", "embed_tokens"]
peft_config = LoraConfig(
lora_alpha=args.lora_alpha,