Skip to content

Commit

Permalink
apply peft as well when using a pre-finetuned model
Browse files Browse the repository at this point in the history
  • Loading branch information
AngledLuffa committed Jan 3, 2025
1 parent b15e266 commit 56ebbf0
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions stanza/models/constituency/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,18 +273,18 @@ def build_trainer(args, train_transitions, train_constituents, tags, words, rare
args = temp_args

peft_name = None
if args['use_peft']:
peft_name = "constituency"
bert_model, bert_tokenizer = load_bert(args['bert_model'])
bert_model = build_peft_wrapper(bert_model, temp_args, tlogger, adapter_name=peft_name)
elif args['bert_finetune'] or args['stage1_bert_finetune']:
if args['use_peft'] or args['bert_finetune'] or args['stage1_bert_finetune']:
bert_model, bert_tokenizer = load_bert(args['bert_model'])
else:
bert_model, bert_tokenizer = load_bert(args['bert_model'], foundation_cache)
# TODO: perhaps we could force_bert_saved here, then have the weights saved in the model file?
if args['bert_weights']:
bert_dict = torch.load(args['bert_weights'], map_location=torch.device("cpu"))
bert_dict = {x[6:]: bert_dict[x] for x in bert_dict if x.startswith("model.")}
bert_model.load_state_dict(bert_dict)
if args['use_peft']:
peft_name = "constituency"
bert_model = build_peft_wrapper(bert_model, temp_args, tlogger, adapter_name=peft_name)
model = LSTMModel(pt,
forward_charlm,
backward_charlm,
Expand Down

0 comments on commit 56ebbf0

Please sign in to comment.