From 8c77b1f424f0b00bf76fe959e66e1858fd0672b1 Mon Sep 17 00:00:00 2001 From: pks Date: Sun, 30 Nov 2025 22:40:03 +0100 Subject: WIP --- finetuning.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) (limited to 'finetuning.py') diff --git a/finetuning.py b/finetuning.py index 490855e..ed7e954 100755 --- a/finetuning.py +++ b/finetuning.py @@ -99,14 +99,21 @@ def main(): low_cpu_mem_usage=True, ) + if args.lora_small: + target_modules = ["q_proj", "o_proj", "k_proj", "v_proj", "gate_proj", "up_proj", "down_proj"] + modules_to_save = [] + else: + target_modules="all-linear", + modules_to_save=["lm_head", "embed_tokens"], + peft_config = LoraConfig( lora_alpha=args.lora_alpha, lora_dropout=args.lora_dropout, r=args.lora_r, task_type="CAUSAL_LM", bias="none", - target_modules="all-linear", - modules_to_save=["lm_head", "embed_tokens"], + target_modules=target_modules, + modules_to_save=modules_to_save, ) dataset = load_dataset("asdf2k/caption_translation") -- cgit v1.2.3