Skip to content

Commit

Permalink
remove legacy options
Browse files Browse the repository at this point in the history
  • Loading branch information
mikecovlee committed Jan 5, 2024
1 parent 92da048 commit def320e
Showing 1 changed file with 2 additions and 4 deletions.
6 changes: 2 additions & 4 deletions mlora/MixLoRA.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ def __init__(self, in_features: int, config: MixConfig) -> None:
self.adapter_name_: str = config.adapter_name_
self.gate_ = torch.nn.Linear(
in_features, config.num_experts_, bias=False, device=config.device_)
self.act_ = ACT2FN["silu" if config.act_fn_ ==
"default" else config.act_fn_]
self.act_ = ACT2FN[config.act_fn_]
self.experts_ = config.num_experts_

self.topk_ = config.top_k_
Expand Down Expand Up @@ -177,8 +176,7 @@ def __init__(self, in_features: int, config: MixConfig) -> None:
self.adapter_name_: str = config.adapter_name_
self.gate_ = torch.nn.Linear(
in_features, config.num_experts_, bias=False, device=config.device_)
self.act_ = ACT2FN["gelu_new" if config.act_fn_ ==
"default" else config.act_fn_]
self.act_ = ACT2FN[config.act_fn_]
self.experts_: int = config.num_experts_

self.expert_capacity_: int = config.expert_capacity_
Expand Down

0 comments on commit def320e

Please sign in to comment.