Skip to content

Commit

Permalink
prep kolors
Browse files Browse the repository at this point in the history
  • Loading branch information
vladmandic committed Jul 10, 2024
1 parent 03a6e25 commit 8e170f5
Showing 1 changed file with 7 additions and 8 deletions.
15 changes: 7 additions & 8 deletions modules/model_kolors.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,25 @@
import torch
import transformers
import diffusers


repo_id = 'Kwai-Kolors/Kolors'
encoder_id = 'THUDM/chatglm3-6b'


def load_kolors(_checkpoint_info, diffusers_load_config={}):
from modules import shared, devices, modelloader
modelloader.hf_login()
diffusers_load_config['variant'] = "fp16"
if 'torch_dtype' not in diffusers_load_config:
diffusers_load_config['torch_dtype'] = 'torch.float16'
diffusers_load_config['torch_dtype'] = torch.float16

text_encoder = transformers.AutoModel.from_pretrained(encoder_id, torch_dtype=torch.float16, trust_remote_code=True, cache_dir=shared.opts.diffusers_dir)
# import torch
# import transformers
# encoder_id = 'THUDM/chatglm3-6b'
# text_encoder = transformers.AutoModel.from_pretrained(encoder_id, torch_dtype=torch.float16, trust_remote_code=True, cache_dir=shared.opts.diffusers_dir)
# text_encoder = transformers.AutoModel.from_pretrained("THUDM/chatglm3-6b", torch_dtype=torch.float16, trust_remote_code=True).quantize(4).cuda()
tokenizer = transformers.AutoTokenizer.from_pretrained(encoder_id, trust_remote_code=True, cache_dir=shared.opts.diffusers_dir)
pipe = diffusers.StableDiffusionXLPipeline.from_pretrained(
# tokenizer = transformers.AutoTokenizer.from_pretrained(encoder_id, trust_remote_code=True, cache_dir=shared.opts.diffusers_dir)
pipe = diffusers.KolorsPipeline.from_pretrained(
repo_id,
tokenizer=tokenizer,
text_encoder=text_encoder,
cache_dir = shared.opts.diffusers_dir,
**diffusers_load_config,
)
Expand Down

0 comments on commit 8e170f5

Please sign in to comment.