Skip to content

Commit 127fe9b

Browse files
committed
Added prompt to Flux Prior Redux.
1 parent 01ad879 commit 127fe9b

File tree

2 files changed

+10
-8
lines changed

2 files changed

+10
-8
lines changed

examples/community/pipeline_hunyuandit_differential_img2img.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1107,13 +1107,14 @@ def __call__(
11071107
callback_kwargs[k] = locals()[k]
11081108
callback_outputs = callback_on_step_end(self, i, t, callback_kwargs)
11091109

1110-
latents = callback_outputs.pop("latents", latents)
1111-
prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds)
1112-
negative_prompt_embeds = callback_outputs.pop("negative_prompt_embeds", negative_prompt_embeds)
1113-
prompt_embeds_2 = callback_outputs.pop("prompt_embeds_2", prompt_embeds_2)
1114-
negative_prompt_embeds_2 = callback_outputs.pop(
1115-
"negative_prompt_embeds_2", negative_prompt_embeds_2
1116-
)
1110+
if callback_outputs is not None:
1111+
latents = callback_outputs.pop("latents", latents)
1112+
prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds)
1113+
negative_prompt_embeds = callback_outputs.pop("negative_prompt_embeds", negative_prompt_embeds)
1114+
prompt_embeds_2 = callback_outputs.pop("prompt_embeds_2", prompt_embeds_2)
1115+
negative_prompt_embeds_2 = callback_outputs.pop(
1116+
"negative_prompt_embeds_2", negative_prompt_embeds_2
1117+
)
11171118

11181119
if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):
11191120
progress_bar.update()

src/diffusers/pipelines/flux/pipeline_flux_prior_redux.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,7 @@ def encode_prompt(
334334
def __call__(
335335
self,
336336
image: PipelineImageInput,
337+
prompt: Optional[str] = "",
337338
return_dict: bool = True,
338339
):
339340
r"""
@@ -378,7 +379,7 @@ def __call__(
378379
pooled_prompt_embeds,
379380
_,
380381
) = self.encode_prompt(
381-
prompt=[""] * batch_size,
382+
prompt=[prompt] * batch_size,
382383
prompt_2=None,
383384
prompt_embeds=None,
384385
pooled_prompt_embeds=None,

0 commit comments

Comments
 (0)