From 1d4312db5b328c0dc8768f15a5e8a249ffef4328 Mon Sep 17 00:00:00 2001 From: songkey-home Date: Mon, 26 Jan 2026 18:07:19 +0800 Subject: [PATCH] Resolve Flux2 Klein 4B/9B LoRA loading errors --- src/diffusers/loaders/lora_conversion_utils.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/diffusers/loaders/lora_conversion_utils.py b/src/diffusers/loaders/lora_conversion_utils.py index 8f7309d4ed1e..e7584b071a68 100644 --- a/src/diffusers/loaders/lora_conversion_utils.py +++ b/src/diffusers/loaders/lora_conversion_utils.py @@ -2321,8 +2321,14 @@ def _convert_non_diffusers_flux2_lora_to_diffusers(state_dict): prefix = "diffusion_model." original_state_dict = {k[len(prefix) :]: v for k, v in state_dict.items()} - num_double_layers = 8 - num_single_layers = 48 + num_double_layers = 0 + num_single_layers = 0 + for key in original_state_dict.keys(): + if key.startswith("single_blocks."): + num_single_layers = max(num_single_layers, int(key.split('.')[1])+1) + elif key.startswith("double_blocks."): + num_double_layers = max(num_double_layers, int(key.split('.')[1])+1) + lora_keys = ("lora_A", "lora_B") attn_types = ("img_attn", "txt_attn")