From e82be8356620659497d5aaccac8d76435a8391c4 Mon Sep 17 00:00:00 2001 From: pks Date: Sun, 30 Nov 2025 22:46:45 +0100 Subject: WIP --- finetuning.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'finetuning.py') diff --git a/finetuning.py b/finetuning.py index 044cda5..379f9b0 100755 --- a/finetuning.py +++ b/finetuning.py @@ -104,8 +104,8 @@ def main(): target_modules = ["q_proj", "o_proj", "k_proj", "v_proj", "gate_proj", "up_proj", "down_proj"] modules_to_save = None else: - target_modules="all-linear", - modules_to_save=["lm_head", "embed_tokens"] + target_modules = "all-linear" + modules_to_save = ["lm_head", "embed_tokens"] peft_config = LoraConfig( lora_alpha=args.lora_alpha, @@ -114,7 +114,7 @@ def main(): task_type="CAUSAL_LM", bias="none", target_modules=target_modules, - modules_to_save=modules_to_save, + #modules_to_save=modules_to_save, ) dataset = load_dataset("asdf2k/caption_translation") -- cgit v1.2.3