Skip to content

Commit

Permalink
Add custom model export test (#677)
Browse files Browse the repository at this point in the history
* Add  custom model export test

* format
  • Loading branch information
echarlaix authored May 31, 2024
1 parent 683133f commit 813d7c0
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
"transformers_stream_generator",
"einops",
"tiktoken",
"sentence_transformers",
"sentence-transformers",
]

QUALITY_REQUIRE = ["black~=23.1", "ruff>=0.0.241"]
Expand Down
29 changes: 27 additions & 2 deletions tests/openvino/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@
from tempfile import TemporaryDirectory
from typing import Optional

import torch
from parameterized import parameterized
from transformers import AutoConfig
from sentence_transformers import SentenceTransformer, models
from transformers import AutoConfig, AutoTokenizer
from utils_tests import MODEL_NAMES

from optimum.exporters.onnx.constants import SDPA_ARCHS_ONNX_EXPORT_NOT_SUPPORTED
Expand Down Expand Up @@ -124,7 +126,7 @@ def test_export(self, model_type: str):


class CustomExportModelTest(unittest.TestCase):
def test_export_custom_model(self):
def test_custom_export_config_model(self):
class BertOnnxConfigWithPooler(BertOnnxConfig):
@property
def outputs(self):
Expand Down Expand Up @@ -157,3 +159,26 @@ def outputs(self):

self.assertIsInstance(ov_model, OVBaseModel)
self.assertTrue(ov_model.output_names == {"last_hidden_state": 0, "pooler_output": 1})

def test_export_custom_model(self):
model_id = "hf-internal-testing/tiny-random-BertModel"
word_embedding_model = models.Transformer(model_id, max_seq_length=256)
pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension())
dense_model = models.Dense(
in_features=pooling_model.get_sentence_embedding_dimension(),
out_features=256,
)
model = SentenceTransformer(modules=[word_embedding_model, pooling_model, dense_model])

with TemporaryDirectory() as tmpdirname:
export_from_model(model, output=tmpdirname, task="feature-extraction")
ov_model = OVModelForCustomTasks.from_pretrained(tmpdirname)

tokenizer = AutoTokenizer.from_pretrained(model_id)
tokens = tokenizer("This is a sample input", return_tensors="pt")
with torch.no_grad():
model_outputs = model(tokens)

ov_outputs = ov_model(**tokens)
self.assertTrue(torch.allclose(ov_outputs.token_embeddings, model_outputs.token_embeddings, atol=1e-4))
self.assertTrue(torch.allclose(ov_outputs.sentence_embedding, model_outputs.sentence_embedding, atol=1e-4))

0 comments on commit 813d7c0

Please sign in to comment.