Skip to content

Commit 95fcb53

Browse files
committed
add AttentionMixin to AuraFlow.
1 parent 6dbee1e commit 95fcb53

1 file changed

Lines changed: 2 additions & 1 deletion

File tree

src/diffusers/models/transformers/auraflow_transformer_2d.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from ...loaders import FromOriginalModelMixin, PeftAdapterMixin
2424
from ...utils import USE_PEFT_BACKEND, logging, scale_lora_layers, unscale_lora_layers
2525
from ...utils.torch_utils import maybe_allow_in_graph
26+
from ..attention import AttentionMixin
2627
from ..attention_processor import (
2728
Attention,
2829
AuraFlowAttnProcessor2_0,
@@ -274,7 +275,7 @@ def forward(
274275
return encoder_hidden_states, hidden_states
275276

276277

277-
class AuraFlowTransformer2DModel(ModelMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin):
278+
class AuraFlowTransformer2DModel(ModelMixin, AttentionMixin, ConfigMixin, PeftAdapterMixin, FromOriginalModelMixin):
278279
r"""
279280
A 2D Transformer model as introduced in AuraFlow (https://blog.fal.ai/auraflow/).
280281

0 commit comments

Comments
 (0)