From 1f7b9fbb1e5bb90a04960d229d917665e6061793 Mon Sep 17 00:00:00 2001 From: arjun Date: Fri, 17 Jan 2025 12:11:43 -0800 Subject: [PATCH 1/5] use _ConvNd for conv1d lora --- src/peft/tuners/lora/layer.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/peft/tuners/lora/layer.py b/src/peft/tuners/lora/layer.py index 20bef8ed10..fdd271f09e 100644 --- a/src/peft/tuners/lora/layer.py +++ b/src/peft/tuners/lora/layer.py @@ -67,6 +67,8 @@ def __init__(self, base_layer: nn.Module, ephemeral_gpu_offload: bool = False, * base_layer = self.get_base_layer() if isinstance(base_layer, nn.Linear): in_features, out_features = base_layer.in_features, base_layer.out_features + elif isinstance(base_layer, nn.Conv1d): + in_features, out_features = base_layer.in_channels, base_layer.out_channels elif isinstance(base_layer, nn.Conv2d): in_features, out_features = base_layer.in_channels, base_layer.out_channels elif isinstance(base_layer, nn.Conv3d): @@ -1296,6 +1298,13 @@ def __init__(self, *args, **kwargs): def _get_dora_layer_class(self): return DoraConv2dLayer +class Conv1d(_ConvNd): + # Lora implemented in a conv3d layer + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not self._kernel_dim == 3: + raise ValueError(f"Conv1d layer kernel must have 3 dimensions, not {self._kernel_dim}") + self.conv_fn = F.conv1d class Conv3d(_ConvNd): # Lora implemented in a conv3d layer @@ -1679,6 +1688,9 @@ def dispatch_default( elif isinstance(target_base_layer, torch.nn.Conv3d): kwargs.update(lora_config.loftq_config) new_module = Conv3d(target, adapter_name, **kwargs) + elif isinstance(target_base_layer, nn.Conv1d): + kwargs.update(lora_config.loftq_config) + new_module = Conv1d(target, adapter_name, **kwargs) elif isinstance(target_base_layer, torch.nn.MultiheadAttention): kwargs.update(lora_config.loftq_config) new_module = MultiheadAttention(target, adapter_name, **kwargs) From bae612f54f06d568bc9f42fc72015dd7bfb43320 Mon Sep 17 00:00:00 2001 From: Arjun Date: Wed, 22 Jan 2025 14:06:17 -0800 Subject: [PATCH 2/5] Comment Co-authored-by: Benjamin Bossan --- src/peft/tuners/lora/layer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/peft/tuners/lora/layer.py b/src/peft/tuners/lora/layer.py index fdd271f09e..31ec27b6ec 100644 --- a/src/peft/tuners/lora/layer.py +++ b/src/peft/tuners/lora/layer.py @@ -1299,7 +1299,7 @@ def _get_dora_layer_class(self): return DoraConv2dLayer class Conv1d(_ConvNd): - # Lora implemented in a conv3d layer + # Lora implemented in a conv1d layer def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if not self._kernel_dim == 3: From ba03b7a009090924c6382d633a188ed49ca5db3c Mon Sep 17 00:00:00 2001 From: arjun Date: Wed, 22 Jan 2025 14:30:47 -0800 Subject: [PATCH 3/5] raise not implemented error for conv1d dora --- src/peft/tuners/lora/layer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/peft/tuners/lora/layer.py b/src/peft/tuners/lora/layer.py index 31ec27b6ec..b866f6f067 100644 --- a/src/peft/tuners/lora/layer.py +++ b/src/peft/tuners/lora/layer.py @@ -1306,6 +1306,9 @@ def __init__(self, *args, **kwargs): raise ValueError(f"Conv1d layer kernel must have 3 dimensions, not {self._kernel_dim}") self.conv_fn = F.conv1d + def _get_dora_layer_class(self): + raise NotImplementedError + class Conv3d(_ConvNd): # Lora implemented in a conv3d layer def __init__(self, *args, **kwargs): From 84e14f4c023ca0376e4f45c430c3e6abaf4bb3c0 Mon Sep 17 00:00:00 2001 From: arjun Date: Wed, 22 Jan 2025 14:31:47 -0800 Subject: [PATCH 4/5] add tests --- tests/test_custom_models.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/test_custom_models.py b/tests/test_custom_models.py index 22472e445c..a881207a43 100644 --- a/tests/test_custom_models.py +++ b/tests/test_custom_models.py @@ -103,6 +103,7 @@ LoraConfig, {"target_modules": ["emb", "conv1d"], "use_dora": True}, ), + ("Conv1d LoRA", "Conv1d", LoraConfig, {"target_modules": ["conv1d"]}), ("Conv2d 1 LoRA", "Conv2d", LoraConfig, {"target_modules": ["conv2d"]}), ("Conv2d 2 LoRA", "Conv2d", LoraConfig, {"target_modules": ["conv2d", "lin0"]}), ("Conv2d 1 LoRA with DoRA", "Conv2d", LoraConfig, {"target_modules": ["conv2d"], "use_dora": True}), @@ -810,6 +811,25 @@ def get_output_embeddings(self): return None +class ModelConv1D(nn.Module): + def __init__(self): + super().__init__() + self.conv1d = nn.Conv1d(1, 1, 2) + self.relu = nn.ReLU() + self.flat = nn.Flatten() + self.lin0 = nn.Linear(9, 2) + self.sm = nn.LogSoftmax(dim=-1) + + def forward(self, X): + X = X.float().reshape(-1, 1, 10) + X = self.conv1d(X) + X = self.relu(X) + X = self.flat(X) + X = self.lin0(X) + X = self.sm(X) + return X + + class ModelConv2D(nn.Module): def __init__(self): super().__init__() @@ -910,6 +930,9 @@ def from_pretrained(cls, model_id, torch_dtype=None): if model_id == "EmbConv1D": return ModelEmbConv1D().to(torch_dtype) + if model_id == "Conv1d": + return ModelConv1D().to(torch_dtype) + if model_id == "Conv2d": return ModelConv2D().to(torch_dtype) From 944b4d19e8e3502cff5e67995b0cde00e21d0866 Mon Sep 17 00:00:00 2001 From: arjun Date: Thu, 23 Jan 2025 10:14:39 -0800 Subject: [PATCH 5/5] style --- src/peft/tuners/lora/layer.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/peft/tuners/lora/layer.py b/src/peft/tuners/lora/layer.py index b866f6f067..bd5a16a783 100644 --- a/src/peft/tuners/lora/layer.py +++ b/src/peft/tuners/lora/layer.py @@ -1298,6 +1298,7 @@ def __init__(self, *args, **kwargs): def _get_dora_layer_class(self): return DoraConv2dLayer + class Conv1d(_ConvNd): # Lora implemented in a conv1d layer def __init__(self, *args, **kwargs): @@ -1309,6 +1310,7 @@ def __init__(self, *args, **kwargs): def _get_dora_layer_class(self): raise NotImplementedError + class Conv3d(_ConvNd): # Lora implemented in a conv3d layer def __init__(self, *args, **kwargs):