Skip to content

Commit

Permalink
remove duplicate diffusers and move aux models to hfcache
Browse files Browse the repository at this point in the history
Signed-off-by: Vladimir Mandic <[email protected]>
  • Loading branch information
vladmandic committed Nov 1, 2024
1 parent 2c43d52 commit 5ff8473
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 5 deletions.
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,11 @@ tunableop_results*.csv
!webui.sh
!package.json

# pyinstaller
*.spec
build/
dist/

# dynamically generated
/repositories/ip-instruct/

Expand All @@ -69,3 +74,4 @@ tunableop_results*.csv
!/models/VAE-approx/model.pt
!/models/Reference
!/models/Reference/**/*

1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
- detailer min/max size as fractions of image size
- ipadapter load on-demand
- ipadapter face use correct yolo model
- list diffusers remove duplicates
- fix legacy extensions access to shared objects
- fix diffusers load from folder

Expand Down
2 changes: 1 addition & 1 deletion modules/face/faceid.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def face_id(
ip_ckpt = FACEID_MODELS[model]
folder, filename = os.path.split(ip_ckpt)
basename, _ext = os.path.splitext(filename)
model_path = hf.hf_hub_download(repo_id=folder, filename=filename, cache_dir=shared.opts.diffusers_dir)
model_path = hf.hf_hub_download(repo_id=folder, filename=filename, cache_dir=shared.opts.hfcache_dir)
if model_path is None:
shared.log.error(f'FaceID download failed: model={model} file="{ip_ckpt}"')
return None
Expand Down
2 changes: 1 addition & 1 deletion modules/face/insightface.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_app(mp_name):
repo_id='vladmandic/insightface-faceanalysis',
filename=f'{mp_name}.zip',
local_dir_use_symlinks=False,
cache_dir=opts.diffusers_dir,
cache_dir=opts.hfcache_dir,
local_dir=local_dir
)
if not os.path.exists(extract_dir):
Expand Down
2 changes: 1 addition & 1 deletion modules/model_sd3.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def load_sd3(checkpoint_info, cache_dir=None, config=None):
else:
kwargs['variant'] = 'fp16'

shared.log.debug(f'Load model: type=SD3 kwargs={list(kwargs)}')
shared.log.debug(f'Load model: type=SD3 kwargs={list(kwargs)} repo="{repo_id}"')

kwargs = model_quant.create_bnb_config(kwargs)
pipe = loader(
Expand Down
5 changes: 5 additions & 0 deletions modules/modelloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@ def load_diffusers_models(clear=True):
place = os.path.join(models_path, 'Diffusers')
if clear:
diffuser_repos.clear()
already_found = []
try:
for folder in os.listdir(place):
try:
Expand Down Expand Up @@ -303,7 +304,11 @@ def load_diffusers_models(clear=True):
if (not os.path.exists(index)) and (not os.path.exists(info)) and (not os.path.exists(config)):
debug(f'Diffusers skip model no info: {name}')
continue
if name in already_found:
debug(f'Diffusers skip model already found: {name}')
continue
repo = { 'name': name, 'filename': name, 'friendly': friendly, 'folder': folder, 'path': commit, 'hash': snapshot, 'mtime': mtime, 'model_info': info, 'model_index': index, 'model_config': config }
already_found.append(name)
diffuser_repos.append(repo)
if os.path.exists(os.path.join(folder, 'hidden')):
continue
Expand Down
4 changes: 2 additions & 2 deletions scripts/prompt_enhance.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ def show(self, is_img2img):

def load(self):
if self.tokenizer is None:
self.tokenizer = AutoTokenizer.from_pretrained('gokaygokay/Flux-Prompt-Enhance', cache_dir=shared.opts.diffusers_dir)
self.tokenizer = AutoTokenizer.from_pretrained('gokaygokay/Flux-Prompt-Enhance', cache_dir=shared.opts.hfcache_dir)
if self.model is None:
shared.log.info(f'Prompt enhance: model="{repo_id}"')
self.model = AutoModelForSeq2SeqLM.from_pretrained('gokaygokay/Flux-Prompt-Enhance', cache_dir=shared.opts.diffusers_dir).to(device=devices.cpu, dtype=devices.dtype)
self.model = AutoModelForSeq2SeqLM.from_pretrained('gokaygokay/Flux-Prompt-Enhance', cache_dir=shared.opts.hfcache_dir).to(device=devices.cpu, dtype=devices.dtype)

def enhance(self, prompt, auto_apply: bool = False, temperature: float = 0.7, repetition_penalty: float = 1.2, max_length: int = 128):
self.load()
Expand Down

0 comments on commit 5ff8473

Please sign in to comment.