Skip to content

Commit

Permalink
fix lint error
Browse files Browse the repository at this point in the history
  • Loading branch information
mikecovlee committed Jan 19, 2024
1 parent 901c4ab commit 278ce89
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 4 deletions.
1 change: 0 additions & 1 deletion mlora/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,6 @@ def evaluate(model: LLMModel,
tokenizer: Tokenizer,
configs: List[EvaluateConfig],
max_seq_len: int = 512):
device = torch.device(model.device_)
max_iterations = 0
for config in configs:
config.init_task()
Expand Down
6 changes: 3 additions & 3 deletions mlora/train.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from mlora.modelargs import MultiLoraBatchData, LoraConfig, MixConfig
from mlora.modelargs import LoraConfig, MixConfig
from mlora.dispatcher import Dispatcher
from mlora.mix_lora import router_loss_factory
from mlora.tasks import train_task_factory
Expand Down Expand Up @@ -75,8 +75,8 @@ def prepare(self, train_paramas: List[torch.Tensor]):

def step_lr_scheduler(self, total_epoch, len_dataset):
if self.lr_scheduler_ is None:
total_steps = (len_dataset // self.batch_size_)*total_epoch if len_dataset % self.batch_size_ == 0 else (
len_dataset // self.batch_size_ + 1)*total_epoch
total_steps = (len_dataset // self.batch_size_) * total_epoch if len_dataset % self.batch_size_ == 0 else (
len_dataset // self.batch_size_ + 1) * total_epoch
warmup_steps = self.warmup_steps_ * \
total_steps if isinstance(
self.warmup_steps_, float) else self.warmup_steps_
Expand Down

0 comments on commit 278ce89

Please sign in to comment.