From e1d2c70aa05a31aa84b05a0920079181c2e0b8c7 Mon Sep 17 00:00:00 2001 From: John Bauer Date: Thu, 2 Nov 2023 01:31:38 -0700 Subject: [PATCH] Don't save the optimizer as part of the best F1 model --- stanza/models/coref/model.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/stanza/models/coref/model.py b/stanza/models/coref/model.py index 47d884b23c..2e1eac6364 100644 --- a/stanza/models/coref/model.py +++ b/stanza/models/coref/model.py @@ -301,13 +301,14 @@ def run(self, # pylint: disable=too-many-locals return res - def save_weights(self, save_path=None): + def save_weights(self, save_path=None, save_optimizers=True): """ Saves trainable models as state dicts. """ to_save: List[Tuple[str, Any]] = \ [(key, value) for key, value in self.trainable.items() if self.config.bert_finetune or key != "bert"] - to_save.extend(self.optimizers.items()) - to_save.extend(self.schedulers.items()) + if save_optimizers: + to_save.extend(self.optimizers.items()) + to_save.extend(self.schedulers.items()) time = datetime.strftime(datetime.now(), "%Y.%m.%d_%H.%M") if save_path is None: @@ -380,7 +381,7 @@ def train(self): # TODO: choose a different default save dir save_path = os.path.join(self.config.data_dir, f"{self.config.section}.pt") - self.save_weights(save_path) + self.save_weights(save_path, save_optimizers=False) # TODO: make save_each an option here self.save_weights()