Skip to content

Commit

Permalink
Fix SigLipPipeline model size calculation.
Browse files Browse the repository at this point in the history
  • Loading branch information
RyanJDick authored and psychedelicious committed Mar 5, 2025
1 parent b6b21db commit 8e28888
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 0 deletions.
2 changes: 2 additions & 0 deletions invokeai/backend/model_manager/load/model_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from invokeai.backend.model_manager.config import AnyModel
from invokeai.backend.onnx.onnx_runtime import IAIOnnxRuntimeModel
from invokeai.backend.patches.model_patch_raw import ModelPatchRaw
from invokeai.backend.sig_lip.sig_lip_pipeline import SigLipPipeline
from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
from invokeai.backend.textual_inversion import TextualInversionModelRaw
from invokeai.backend.util.calc_tensor_size import calc_tensor_size
Expand Down Expand Up @@ -48,6 +49,7 @@ def calc_model_size_by_data(logger: logging.Logger, model: AnyModel) -> int:
GroundingDinoPipeline,
SegmentAnythingPipeline,
DepthAnythingPipeline,
SigLipPipeline,
),
):
return model.calc_size()
Expand Down
7 changes: 7 additions & 0 deletions invokeai/backend/sig_lip/sig_lip_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,10 @@ def encode_image(self, x: Image.Image, device: torch.device, dtype: torch.dtype)
imgs = self._siglip_processor.preprocess(images=[x], do_resize=True, return_tensors="pt", do_convert_rgb=True)
encoded_x = self._siglip_model(**imgs.to(device=device, dtype=dtype)).last_hidden_state
return encoded_x

def calc_size(self) -> int:
"""Get size of the model in memory in bytes."""
# HACK(ryand): Fix this issue with circular imports.
from invokeai.backend.model_manager.load.model_util import calc_module_size

return calc_module_size(self._siglip_model)

0 comments on commit 8e28888

Please sign in to comment.