Skip to content

Commit

Permalink
support configuration for save_total_limit
Browse files Browse the repository at this point in the history
  • Loading branch information
SeanLee97 committed Oct 19, 2024
1 parent ca9e956 commit 9e111a5
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 1 addition & 1 deletion angle_emb/angle.py
Original file line number Diff line number Diff line change
Expand Up @@ -1431,7 +1431,7 @@ def fit(self,
evaluation_strategy: str = 'steps',
save_steps: int = 100,
save_strategy: str = 'steps',
save_total_limit: int = 10,
save_total_limit: int = 1,
gradient_accumulation_steps: int = 1,
fp16: Optional[bool] = None,
argument_kwargs: Optional[Dict] = None,
Expand Down
2 changes: 2 additions & 0 deletions angle_emb/angle_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
parser.add_argument('--max_steps', type=int, default=-1,
help='Specify max steps, default -1 (Automatically calculated from epochs)')
parser.add_argument('--save_steps', type=int, default=100, help='Specify save_steps, default 1000')
parser.add_argument('--save_total_limit', type=int, default=1, help='Specify save_total_limit, default 1')
parser.add_argument('--save_strategy', type=str, default='steps', choices=['steps', 'epoch', 'no'],
help='Specify save_strategy, default steps')
parser.add_argument('--eval_steps', type=int, default=1000, help='Specify eval_steps, default 1000')
Expand Down Expand Up @@ -294,6 +295,7 @@ def main():
learning_rate=args.learning_rate,
save_steps=args.save_steps,
save_strategy=args.save_strategy,
save_total_limit=args.save_total_limit,
eval_steps=args.eval_steps,
evaluation_strategy=args.evaluation_strategy,
warmup_steps=args.warmup_steps,
Expand Down

0 comments on commit 9e111a5

Please sign in to comment.