Skip to content

Commit

Permalink
black eww
Browse files Browse the repository at this point in the history
  • Loading branch information
i.beskrovnyy committed Jan 30, 2024
1 parent d0af06e commit deada38
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions src/core/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ def train_dim(args):
)

# Early stopping -----------------------------------------------
if earl_stop:
if early_stop:
logger.info(
"--> Early stopping. best_r_p {:0.2f} best_rmse {:0.2f}".format(
early_stop.best_r_p, early_stop.best_rmse
Expand All @@ -350,7 +350,7 @@ def train_dim(args):
return

# Training done --------------------------------------------------------
logger.info("--> Training done. best_r_p {:0.2f} best_rmse {:0.2f}".format(
early_stop.best_r_p, early_stop.best_rmse)
logger.info(
"--> Training done. best_r_p {:0.2f} best_rmse {:0.2f}".format(early_stop.best_r_p, early_stop.best_rmse)
)
return
2 changes: 1 addition & 1 deletion src/utils/train_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def get_loss(self, yb, yb_hat, idx):
b = b[idx, :]

yb_hat_map = (
b[:, 0] + b[:, 1] * yb_hat[:, 0] + b[:, 2] * yb_hat[:, 0] ** 2 + b[:, 3] * yb_hat[:, 0] ** 3
b[:, 0] + b[:, 1] * yb_hat[:, 0] + b[:, 2] * yb_hat[:, 0] ** 2 + b[:, 3] * yb_hat[:, 0] ** 3
).view(-1, 1)

loss_bias = self._nan_mse(yb_hat_map, yb)
Expand Down

0 comments on commit deada38

Please sign in to comment.