-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathexport.py
29 lines (23 loc) · 1.05 KB
/
export.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import torch
from transformers import BertForSequenceClassification
# Load your trained model
model = BertForSequenceClassification.from_pretrained('./trained_model')
# Define a wrapper to handle model outputs
class ModelWrapper(torch.nn.Module):
def __init__(self, model):
super(ModelWrapper, self).__init__()
self.model = model
def forward(self, input_ids, attention_mask):
outputs = self.model(input_ids=input_ids, attention_mask=attention_mask)
# Assuming you want to use the logits for your task
logits = outputs.logits
return logits
# Create a wrapped model instance
wrapped_model = ModelWrapper(model)
# Dummy input for tracing
dummy_input_ids = torch.zeros(1, 128, dtype=torch.long) # Adjust dimensions as needed
dummy_attention_mask = torch.ones(1, 128, dtype=torch.long) # Adjust dimensions as needed
# Convert to TorchScript
wrapped_model.eval()
traced_model = torch.jit.trace(wrapped_model, (dummy_input_ids, dummy_attention_mask), strict=False)
traced_model.save('model.pt')