Skip to content

Commit

Permalink
fix(diffusers): LoRA adapters, handle several base models
Browse files Browse the repository at this point in the history
Signed-off-by: Raphael Glon <oOraph@users.noreply.github.com>
  • Loading branch information
oOraph committed Sep 26, 2024
1 parent dd29413 commit 75decca
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions docker_images/diffusers/app/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ def _load_textual_embeddings(self, adapter, model_data):
logger.info("Text embeddings loaded for adapter %s", adapter)
else:
logger.info(
"No text embeddings were loaded due to invalid embeddings or a mismatch of token sizes for adapter %s",
"No text embeddings were loaded due to invalid embeddings or a mismatch of token sizes "
"for adapter %s",
adapter,
)
self.current_tokens_loaded = tokens_to_add
Expand All @@ -157,7 +158,8 @@ def _load_lora_adapter(self, kwargs):
logger.error(msg)
raise ValueError(msg)
base_model = model_data.cardData["base_model"]
if self.model_id != base_model:
if (isinstance(base_model, list) and (self.model_id not in base_model)) or \
(self.model_id != base_model):
msg = f"Requested adapter {adapter:s} is not a LoRA adapter for base model {self.model_id:s}"
logger.error(msg)
raise ValueError(msg)
Expand Down

0 comments on commit 75decca

Please sign in to comment.