From 150c67061b5fed5363f2d765f946fbf18a1d0787 Mon Sep 17 00:00:00 2001 From: Deepankar Sharma <74599435+ideepankarsharma2003@users.noreply.github.com> Date: Wed, 16 Aug 2023 15:14:52 +0530 Subject: [PATCH] Update finetune_pp_peft.py Corrected transformers.LlamaForCausalLM --- finetune_pp_peft.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/finetune_pp_peft.py b/finetune_pp_peft.py index d530d4a..2788481 100644 --- a/finetune_pp_peft.py +++ b/finetune_pp_peft.py @@ -99,7 +99,7 @@ def main(): device_map[f"model.layers.{layer_i}.post_attention_layernorm.weight"] = device_id device_map[f"model.layers.{layer_i}.self_attn.rotary_emb.inv_freq"] = device_id - model = transformers.LLaMAForCausalLM.from_pretrained( + model = transformers.LlamaForCausalLM.from_pretrained( args.model_path, load_in_8bit=True, device_map=device_map,