diff --git a/.github/workflows/wasm-bindgen-test.yaml b/.github/workflows/wasm-bindgen-test.yaml index f6b35b1..a4c6e24 100644 --- a/.github/workflows/wasm-bindgen-test.yaml +++ b/.github/workflows/wasm-bindgen-test.yaml @@ -10,5 +10,5 @@ jobs: run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh # - run: cargo test - - run: wasm-pack test --headless --chrome - - run: wasm-pack test --headless --firefox + - run: wasm-pack test --headless --chrome crates/lora-inspector-wasm + - run: wasm-pack test --headless --firefox crates/lora-inspector-wasm diff --git a/.gitignore b/.gitignore index 99a743f..e98cd89 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ /target +pkg/ +pkg + new.html diff --git a/Cargo.lock b/Cargo.lock index 0fe403f..c35b685 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -25,9 +25,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "base64" -version = "0.21.6" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79fed4cdb43e993fcdadc7e58a09fd0e3e649c4436fa11da71c9f1f3ee7feb9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "bitflags" @@ -155,9 +155,9 @@ checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] @@ -173,36 +173,28 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" -version = "0.9.15" +version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "autocfg", - "cfg-if", "crossbeam-utils", - "memoffset", - "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crunchy" @@ -438,9 +430,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "js-sys", @@ -538,6 +530,25 @@ dependencies = [ "serde", ] +[[package]] +name = "inspector" +version = "1.0.0" +dependencies = [ + "candle-core", + "getrandom", + "insta", + "memmap2 0.9.0", + "num", + "pest", + "pest_derive", + "safetensors", + "serde", + "serde-wasm-bindgen", + "serde_json", + "serde_with", + "wasm-bindgen", +] + [[package]] name = "insta" version = "1.34.0" @@ -554,9 +565,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "js-sys" @@ -598,19 +609,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] -name = "lora-inspector-rs" -version = "0.1.0" +name = "lora-inspector" +version = "1.0.0" +dependencies = [ + "inspector", +] + +[[package]] +name = "lora-inspector-wasm" +version = "1.0.0" dependencies = [ "candle-core", "console_error_panic_hook", - "getrandom", + "inspector", "insta", "memmap2 0.9.0", - "num", - "pest", - "pest_derive", "safetensors", - "serde", "serde-wasm-bindgen", "serde_json", "serde_with", @@ -620,11 +634,15 @@ dependencies = [ "web-sys", ] +[[package]] +name = "lora_inspector" +version = "1.0.0" + [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "memmap2" @@ -645,15 +663,6 @@ dependencies = [ "libc", ] -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - [[package]] name = "num" version = "0.4.1" @@ -756,9 +765,9 @@ checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pest" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" +checksum = "1f200d8d83c44a45b21764d1916299752ca035d15ecd46faca3e9a2a2bf6ad06" dependencies = [ "memchr", "thiserror", @@ -767,9 +776,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81d78524685f5ef2a3b3bd1cafbc9fcabb036253d9b1463e726a91cd16e2dfc2" +checksum = "bcd6ab1236bbdb3a49027e920e693192ebfe8913f6d60e294de57463a493cfde" dependencies = [ "pest", "pest_generator", @@ -777,9 +786,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68bd1206e71118b5356dae5ddc61c8b11e28b09ef6a31acbd15ea48a28e0c227" +checksum = "2a31940305ffc96863a735bef7c7994a00b325a7138fdbc5bda0f1a0476d3275" dependencies = [ "pest", "pest_meta", @@ -790,9 +799,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6" +checksum = "a7ff62f5259e53b78d1af898941cdcdccfae7385cf7d793a6e55de5d05bb4b7d" dependencies = [ "once_cell", "pest", @@ -813,9 +822,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" dependencies = [ "unicode-ident", ] @@ -834,9 +843,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -918,9 +927,9 @@ checksum = "03251193000f4bd3b042892be858ee50e8b3719f2b08e5833ac4353724632430" [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "safetensors" @@ -938,12 +947,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - [[package]] name = "seq-macro" version = "0.3.5" @@ -952,9 +955,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.192" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" dependencies = [ "serde_derive", ] @@ -972,9 +975,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.192" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" dependencies = [ "proc-macro2", "quote", @@ -983,9 +986,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" dependencies = [ "itoa", "ryu", @@ -1052,9 +1055,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "syn" -version = "2.0.39" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -1075,18 +1078,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", @@ -1226,9 +1229,9 @@ checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-bindgen-test" -version = "0.3.38" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6433b7c56db97397842c46b67e11873eda263170afeb3a2dc74a7cb370fee0d" +checksum = "2cf9242c0d27999b831eae4767b2a146feb0b27d332d553e605864acd2afd403" dependencies = [ "console_error_panic_hook", "js-sys", @@ -1240,9 +1243,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.38" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735" +checksum = "794645f5408c9a039fd09f4d113cdfb2e7eba5ff1956b07bcf701cf4b394fe89" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index 5d1bbe9..319b9b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,78 +1,51 @@ [package] -name = "lora-inspector-rs" -version = "0.1.0" +name = "lora_inspector" +version = "1.0.0" edition = "2021" +license = "MIT" +repository = "https://github.com/rockerBOO/lora-inspector-rs" +description = "LoRA inspector for Stable Diffusion" [profile.dev.package] insta.opt-level = 3 -[lib] -crate-type = ["cdylib"] +[workspace] +members = ["crates/*"] +resolver = "2" + +[workspace.package] +version = "1.0.0" +authors = ["Dave Lage"] +edition = "2021" +license = "MIT" +repository = "https://github.com/rockerBOO/lora-inspector-rs" +description = "LoRA inspector for Stable Diffusion" # [features] # default = ["console_error_panic_hook"] -[dependencies] -wasm-bindgen = "0.2" -safetensors = "0.3.1" -serde = { version = "1.0", features = ["derive"] } -serde-wasm-bindgen = "0.4" -serde_with = "3.4.0" -console_error_panic_hook = { version = "0.1.6" } +# [dependencies] +# candle-core = { version = "0.3.2" } +# console_error_panic_hook = { version = "0.1.6", optional = true } +# safetensors = "0.3.1" +# serde-wasm-bindgen = "0.4" +# serde_json = "1.0.108" +# serde_with = "3.4.0" +# wasm-bindgen = "0.2" +# +[workspace.dependencies] candle-core = { version = "0.3.2" } -# candle-core = { git = "https://github.com/huggingface/candle.git", version = "0.3.2" } -# candle-core = { path = "/home/rockerboo/code/others/candle/candle-core", version = "0.3.2" } getrandom = { version = "0.2", features = ["js"] } -serde_json = "1.0.108" num = "0.4.1" -# web-sys = { version = "0.3.65", features = ["console"] } pest = "2.6" pest_derive = "2.6" - - -[dependencies.web-sys] -version = "0.3.65" -features = [ - 'console', - # 'Document', - # 'HtmlElement', - # 'HtmlInputElement', - # 'FileList', - # 'File', - # 'Blob', - # 'FileReader', - # 'MessageEvent', - # 'ProgressEvent', - # 'MessagePort', - # 'Window', - # 'Worker', - # 'SharedWorker', - # 'WorkerOptions', - # 'RequestCredentials' -] - -[dev-dependencies] -wasm-bindgen-test = "0.3.13" -wasm-bindgen-futures = "0.4.39" -insta = { version = "1.34.0", features = ["json"] } -memmap2 = "0.9.0" - -[dev-dependencies.web-sys] -version = "0.3.65" -features = [ - 'Headers', - 'Request', - 'RequestInit', - 'RequestMode', - 'Response', - 'Worker', - 'Blob', - 'Performance', - 'Window' -] +safetensors = "0.3.1" +serde = { version = "1.0", features = ["derive"] } +serde-wasm-bindgen = "0.4" +serde_json = "1.0.108" +serde_with = "3.4.0" +wasm-bindgen = "0.2" [profile.release] lto = true opt-level = 's' - - diff --git a/LICENSE b/LICENSE index 31ddadf..44fccf1 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Dave Lage (rockerBOO) +Copyright (c) Dave Lage (rockerBOO) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index b2d8d55..6fff1db 100644 --- a/README.md +++ b/README.md @@ -33,13 +33,15 @@ Welcome X Loading a new file doesn't unload the previous LoRA properly (be aware of loading multiple LoRAS in the future) X Loading some LoRAs fail to be loaded into the buffer + - Loading some LoRAs fail to load their block weights (undefined error) - Average TE/UNet blocks are now invalid (generally) - Skeletor_v1 bf16 safetensors do not process block weights X show precision type of LoRA -### Metadata options +### Metadata options + - CLIP skip - LR Warmup @@ -57,8 +59,6 @@ X Loading some LoRAs fail to be loaded into the buffer - ss_num_train_images - - ss_caption_dropout_every_n_epochs - ss_caption_dropout_rate - ss_caption_tag_dropout_rate - diff --git a/assets/js/main.js b/assets/js/main.js index 38f8364..16a19e8 100644 --- a/assets/js/main.js +++ b/assets/js/main.js @@ -211,9 +211,11 @@ function DiagOFTNetwork({ metadata }) { function LoRANetwork({ metadata }) { const [alphas, setAlphas] = React.useState([ - metadata.get("ss_network_alpha"), + (metadata && metadata.get("ss_network_alpha")) ?? undefined, + ]); + const [dims, setDims] = React.useState([ + (metadata && metadata.get("ss_network_dim")) ?? undefined, ]); - const [dims, setDims] = React.useState([metadata.get("ss_network_dim")]); React.useEffect(() => { trySyncMessage({ messageType: "alphas", name: mainFilename }).then( (resp) => { @@ -347,6 +349,12 @@ function Weight({ metadata, filename }) { // Chart.defaults.font.size = 16; // Chart.defaults.font.family = "monospace"; +function scale_weight() { + // get base_names + // get scale weight + // get progress +} + function Blocks({ metadata, filename }) { // console.log("!!!! BLOCKS !!!!! METADATA FILENAME", filename); const [hasBlockWeights, setHasBlockWeights] = React.useState(false); @@ -361,6 +369,15 @@ function Blocks({ metadata, filename }) { const [currentBaseName, setCurrentBaseName] = React.useState(""); const [canHaveBlockWeights, setCanHaveBlockWeights] = React.useState(false); + const [scaleWeightProgress, setScaleWeightProgress] = React.useState(0); + const [currentScaleWeightCount, setCurrentScaleWeightCount] = + React.useState(0); + const [totalScaleWeightCount, setTotalScaleWeightCount] = React.useState(0); + + // setCurrentScaleWeightCount(value.currentCount); + // setTotalScaleWeightCount(value.totalCount); + // setScaleWeightProgress(value.currentCount / value.totalCount); + const teChartRef = React.useRef(null); const unetChartRef = React.useRef(null); @@ -369,22 +386,29 @@ function Blocks({ metadata, filename }) { return; } - // const averageMagnitudes = get_average_magnitude_by_block(buffer); - // const averageStrength = get_average_strength_by_block(buffer); - // - trySyncMessage({ - messageType: "scale_weights", + messageType: "scale_weights_with_progress", name: filename, reply: true, }).then(() => { console.log("getting l2 norms..."); + + listenProgress("l2_norms_progress").then(async (getProgress) => { + while ((progress = await getProgress().next())) { + const value = progress.value; + setCurrentBaseName(value.baseName); + setCurrentCount(value.currentCount); + setTotalCount(value.totalCount); + setNormProgress(value.currentCount / value.totalCount); + } + }); + trySyncMessage({ messageType: "l2_norm", name: filename, reply: true, }).then((resp) => { - console.log(resp); + // console.log(resp); // setTEMagBlocks(averageMagnitudes.get("text_encoder")); setTEMagBlocks(resp.norms.te); setUnetMagBlocks(resp.norms.unet); @@ -403,7 +427,6 @@ function Blocks({ metadata, filename }) { name: filename, reply: true, }).then((resp) => { - console.log("network type", resp); if ( resp.networkType === "LoRA" || resp.networkType === "LoRAFA" || @@ -432,13 +455,16 @@ function Blocks({ metadata, filename }) { setStartTime(performance.now()); - listenProgress("l2_norms_progress").then(async (getProgress) => { + listenProgress("scale_weight_progress").then(async (getProgress) => { while ((progress = await getProgress().next())) { const value = progress.value; + if (!value) { + break; + } setCurrentBaseName(value.baseName); - setCurrentCount(value.currentCount); - setTotalCount(value.totalCount); - setNormProgress(value.currentCount / value.totalCount); + setCurrentScaleWeightCount(value.currentCount); + setTotalScaleWeightCount(value.totalCount); + setScaleWeightProgress(value.currentCount / value.totalCount); } }); @@ -460,7 +486,7 @@ function Blocks({ metadata, filename }) { // dataset.map(([k, v]) => strBlocks.get(k)), ], }; - console.log("chartdata", data); + // console.log("chartdata", data); const chart = new Chartist.Line(chartRef.current, data, { chartPadding: { right: 60, @@ -527,11 +553,23 @@ function Blocks({ metadata, filename }) { }); }; + console.log("te blocks", teMagBlocks); + console.log("te str blocks", teStrBlocks); if (teMagBlocks.size > 0) { - makeChart(Array.from(teMagBlocks), teChartRef, teStrBlocks); + makeChart( + // We are removing elements that are 0 because they cause the chart to find them as undefined + Array.from(teMagBlocks).filter(([k, v]) => v["mean"] !== 0), + teChartRef, + teStrBlocks, + ); } if (unetMagBlocks.size > 0) { - makeChart(Array.from(unetMagBlocks), unetChartRef, unetStrBlocks); + makeChart( + // We are removing elements that are 0 because they cause the chart to find them as undefined + Array.from(unetMagBlocks).filter(([k, v]) => v["mean"] !== 0), + unetChartRef, + unetStrBlocks, + ); } }, [teMagBlocks, teStrBlocks, unetMagBlocks, unetStrBlocks]); @@ -631,7 +669,27 @@ function Blocks({ metadata, filename }) { const perSecond = currentCount / (elapsedTime / 1_000); if (currentCount === 0) { - return h("div", null, "waiting for worker... please wait"); + const elapsedTime = performance.now() - startTime; + const perSecond = currentScaleWeightCount / (elapsedTime / 1_000); + + const remaining = + (elapsedTime * totalScaleWeightCount) / scaleWeightProgress - + elapsedTime * totalScaleWeightCount; + return h( + "div", + { className: "block-weights-container" }, + h( + "span", + null, + `Scaling weights... ${(scaleWeightProgress * 100).toFixed( + 2, + )}% ${currentScaleWeightCount}/${totalScaleWeightCount} ${perSecond.toFixed( + 2, + )}it/s ${(remaining / 1_000_000).toFixed( + 2, + )}s remaining ${currentBaseName} `, + ), + ); } return h( @@ -850,12 +908,12 @@ function Buckets({ dataset, metadata }) { } function BucketInfo({ metadata, dataset }) { - // No bucket info + // No bucket info if (!dataset["bucket_info"]) { return; } - // No buckets data + // No buckets data if (!dataset["bucket_info"]["buckets"]) { return; } @@ -2480,7 +2538,7 @@ function Main({ metadata, filename }) { h("div", null, "No metadata for this file"), h(Headline, { filename }), h(Weight, { metadata, filename }), - h(Advanced, { metadata, filename }), + // h(Advanced, { metadata, filename }), ]); } @@ -2495,7 +2553,7 @@ function Main({ metadata, filename }) { h(Noise, { metadata }), h(Loss, { metadata }), h(Dataset, { metadata }), - h(Advanced, { metadata, filename }), + // h(Advanced, { metadata, filename }), ]); } @@ -2778,11 +2836,11 @@ async function processFile(file) { worker.postMessage({ messageType: "network_type", name: mainFilename }); worker.postMessage({ messageType: "weight_keys", name: mainFilename }); worker.postMessage({ messageType: "alpha_keys", name: mainFilename }); - trySyncMessage({ messageType: "keys", name: mainFilename }).then( - (keys) => { - console.log("keys", keys); - }, - ); + // trySyncMessage({ messageType: "keys", name: mainFilename }).then( + // (keys) => { + // console.log("keys", keys); + // }, + // ); worker.postMessage({ messageType: "base_names", name: mainFilename }); worker.postMessage({ messageType: "weight_norms", name: mainFilename }); // worker.postMessage({ messageType: "alphas", name: mainFilename }); @@ -2924,17 +2982,35 @@ async function trySyncMessage(message, matches = []) { }); } +const listenProgressListeners = []; +const listenProgressFinishedListeners = []; + async function listenProgress(messageType) { let isFinished = false; function finishedWorkerHandler(e) { + console.log( + "FINISHED", + e.data, + e.data.messageType === `${messageType}_finished`, + ); if (e.data.messageType === `${messageType}_finished`) { worker.removeEventListener("message", finishedWorkerHandler); isFinished = true; - } else { - // console.log("unhandled finished message", e.data); + listenProgressFinishedListeners.pop(); + console.log( + "Remove finished worker", + listenProgressFinishedListeners.length, + isFinished, + ); } } + listenProgressFinishedListeners.push(1); + console.log( + "Adding finished worker", + listenProgressFinishedListeners.length, + isFinished, + ); worker.addEventListener("message", finishedWorkerHandler); return async function* listen() { @@ -2947,10 +3023,19 @@ async function listenProgress(messageType) { function workerHandler(e) { if (e.data.messageType === messageType) { worker.removeEventListener("message", workerHandler); + listenProgressListeners.pop(); + console.log( + "Remove worker", + messageType, + listenProgressListeners.length, + isFinished, + ); resolve(e.data); } } + listenProgressListeners.push(1); + console.log("Adding worker", listenProgressListeners.length, isFinished); worker.addEventListener("message", workerHandler); }); }; diff --git a/assets/js/worker.js b/assets/js/worker.js index 9d0c382..71cc984 100644 --- a/assets/js/worker.js +++ b/assets/js/worker.js @@ -1,7 +1,7 @@ // The worker has its own scope and no direct access to functions/objects of the // global scope. We import the generated JS file to make `wasm_bindgen` // available which we need to initialize our Wasm code. -importScripts("/pkg/lora_inspector_rs.js"); +importScripts("/pkg/lora_inspector_wasm.js"); // In the worker, we have a different struct that we want to use as in // `index.js`. @@ -14,14 +14,14 @@ const { LoraWorker } = wasm_bindgen; let loraWorkers = new Map(); function addWorker(name, worker) { - console.log("Adding worker ", name); + // console.log("Adding worker ", name); loraWorkers.set(name, worker); return loraWorkers.get(name); } function removeWorker(workerName) { - console.log("Removing worker ", workerName); + // console.log("Removing worker ", workerName); loraWorkers.remove(workerName); } @@ -37,7 +37,7 @@ function getWorker(workerName) { function init_wasm_in_worker() { // Load the wasm file by awaiting the Promise returned by `wasm_bindgen`. - wasm_bindgen("/pkg/lora_inspector_rs_bg.wasm").then(() => { + wasm_bindgen("/pkg/lora_inspector_wasm_bg.wasm").then(() => { onerror = (event) => { console.log("There is an error inside your worker!", event); }; @@ -47,9 +47,7 @@ function init_wasm_in_worker() { // unload old workers for now... // console.log("Clearing workers"); // loraWorkers.clear(); - fileUploadHandler(e).then(() => { - console.log("LoRA Workers: ", [...loraWorkers.keys()]) - }); + fileUploadHandler(e); } else if (e.data.messageType === "network_module") { getNetworkModule(e).then((networkModule) => { if (e.data.reply) { @@ -81,7 +79,6 @@ function init_wasm_in_worker() { getWeightKeys(e); } else if (e.data.messageType === "keys") { getKeys(e).then((keys) => { - console.log(keys); if (e.data.reply) { self.postMessage({ messageType: "keys", @@ -124,6 +121,22 @@ function init_wasm_in_worker() { }); } }); + } else if (e.data.messageType === "scale_weights_with_progress") { + iterScaleWeights(e).then((baseNames) => { + if (e.data.reply) { + self.postMessage({ + messageType: "scale_weights_with_progress", + }); + } + }); + } else if (e.data.messageType === "scale_weight") { + scaleWeight(e).then((baseNames) => { + if (e.data.reply) { + self.postMessage({ + messageType: "scale_weight", + }); + } + }); } else if (e.data.messageType === "l2_norm") { // We must lock if we are getting scaled weights @@ -211,7 +224,7 @@ async function fileUploadHandler(e) { metadata: loraWorker.metadata(), }); } catch (err) { - console.error("Could not upload the LoRA", err) + console.error("Could not upload the LoRA", err); self.postMessage({ messageType: "metadata_error", message: "could not parse the LoRA", @@ -242,8 +255,8 @@ async function getKeys(e) { async function scaleWeights(e) { console.log("scaling weights..."); console.time("scale_weights"); - // console.log(performance.memory); - await navigator.locks.request(`scaled-weights`, async (lock) => { + // console.log(performance.memory); + await navigator.locks.request(`scale-weights`, async (lock) => { const name = e.data.name; const loraWorker = loraWorkers.get(name); @@ -252,6 +265,66 @@ async function scaleWeights(e) { }); } +async function iterScaleWeights(e) { + const name = e.data.name; + const loraWorker = loraWorkers.get(name); + + await navigator.locks.request(`scale-weights`, async (lock) => { + const baseNames = loraWorker.base_names(); + const totalCount = baseNames.length; + + let currentCount = 0; + + console.time("scale_weights"); + await Promise.allSettled( + baseNames.map((baseName) => { + currentCount += 1; + + try { + loraWorker.scale_weight(baseName); + + self.postMessage({ + messageType: "scale_weight_progress", + currentCount, + totalCount, + baseName: baseName, + }); + } catch (e) { + console.error(e); + self.postMessage({ + messageType: "scale_weight_progress", + currentCount, + totalCount, + baseName: baseName, + }); + } + }), + ).then(() => { + console.log("Finished scaled weight progress") + self.postMessage({ + messageType: "scale_weight_progress_finished", + }); + + console.time("scale_weights"); + }); + }); +} + +async function scaleWeight(e) { + console.log("scaling weight..."); + console.time("scale_weight"); + // console.log(performance.memory); + await navigator.locks.request(`scale-weights`, async (lock) => { + const name = e.data.name; + const baseName = e.data.baseName; + + const loraWorker = loraWorkers.get(name); + loraWorker.scale_weight(baseName); + + console.timeEnd("scale_weight"); + }); +} + async function getTextEncoderKeys(e) { const loraWorker = getWorker(e.data.name); @@ -279,7 +352,7 @@ async function getNorms(e) { const loraWorker = loraWorkers.get(name); const baseName = e.data.baseName; - console.log("Getting norm for ", baseName); + console.log("Getting norm for ", baseName); const scaled = loraWorker.norms(baseName, [ "l1_norm", @@ -287,8 +360,8 @@ async function getNorms(e) { "matrix_norm", "max", "min", - "std_dev", - "median", + // "std_dev", + // "median", ]); return scaled; @@ -297,12 +370,12 @@ async function getNorms(e) { async function getL2Norms(e) { const loraWorker = getWorker(e.data.name); - await navigator.locks.request(`scaled-weights`, async (lock) => { - const name = e.data.name; - - const loraWorker = loraWorkers.get(name); - loraWorker.scale_weights(); - }); + // await navigator.locks.request(`scaled-weights`, async (lock) => { + // const name = e.data.name; + // + // const loraWorker = loraWorkers.get(name); + // loraWorker.scale_weights(); + // }); console.time("Calculating norms"); console.log("Calculating l2 norms..."); @@ -322,10 +395,20 @@ async function getL2Norms(e) { totalCount, baseName: base_name, }); - return [base_name, loraWorker.l2_norm(base_name)]; + + try { + return [base_name, loraWorker.l2_norm(base_name)]; + } catch (e) { + console.error(e); + return [base_name, undefined]; + } }) .reduce( (acc, [base_name, norm]) => { + if (norm === undefined) { + return acc; + } + // loraWorker.parse_key(key); const parts = parseSDKey(base_name); @@ -354,26 +437,26 @@ async function getL2Norms(e) { ); self.postMessage({ - messageType: "l2_norm_progress_finished", + messageType: "l2_norms_progress_finished", }); - console.log( - "weight_norms block", - Array.from(l2Norms["block"]).sort(([k, _], [k2, _v]) => { - return k > k2; - }), - ); - console.log( - "weight_norms count", - Array.from(l2Norms["block_count"]).sort(([k, _], [k2, _v]) => { - return k > k2; - }), - ); + // console.log( + // "weight_norms block", + // Array.from(l2Norms["block"]).sort(([k, _], [k2, _v]) => { + // return k > k2; + // }), + // ); + // console.log( + // "weight_norms count", + // Array.from(l2Norms["block_count"]).sort(([k, _], [k2, _v]) => { + // return k > k2; + // }), + // ); const norms = Array.from(l2Norms["block_mean"]).sort(([k, _], [k2, _v]) => { return k > k2; }); - console.log("weight_norms mean", norms); + // console.log("weight_norms mean", norms); console.timeEnd("Calculating norms"); // Split between TE and UNet diff --git a/crates/inspector/Cargo.toml b/crates/inspector/Cargo.toml new file mode 100644 index 0000000..c69e406 --- /dev/null +++ b/crates/inspector/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "inspector" +version.workspace = true +edition.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +candle-core = { workspace = true } +getrandom = { workspace = true, features = ["js"] } +num = { workspace = true } +pest = { workspace = true } +pest_derive = { workspace = true } +safetensors = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +serde_with = { workspace = true } +wasm-bindgen = "0.2" +serde-wasm-bindgen = "0.4" + +[dev-dependencies] +insta = { version = "1.34.0", features = ["json"] } +memmap2 = "0.9.0" diff --git a/crates/inspector/keys.json b/crates/inspector/keys.json new file mode 100644 index 0000000..706e468 --- /dev/null +++ b/crates/inspector/keys.json @@ -0,0 +1,9027 @@ +[ + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.alpha", + "lora_unet_output_blocks_4_1_proj_out.alpha", + "lora_unet_output_blocks_6_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_8_0_emb_layers_1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_6_0_op.alpha", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_8_0_out_layers_3.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_unet_middle_block_0_in_layers_2.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_2_0_emb_layers_1.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_7_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_input_blocks_6_0_op.lokr_w1", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_0_emb_layers_1.lokr_w1", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.lokr_w1", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_proj_in.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.lokr_w2", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_5_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_proj_in.lokr_w2", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_0_in_layers_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_0_out_layers_3.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_0_skip_connection.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_4_0_in_layers_2.lokr_w2", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_1_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_0_out_layers_3.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_8_0_skip_connection.lokr_w2", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_7_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.alpha", + "lora_unet_output_blocks_8_0_in_layers_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_0_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_1_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_middle_block_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_proj_in.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.alpha", + "lora_unet_output_blocks_3_0_skip_connection.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_0_out_layers_3.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_output_blocks_0_0_in_layers_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_0_in_layers_2.alpha", + "lora_unet_input_blocks_8_0_out_layers_3.lokr_w1", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_0_in_layers_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_output_blocks_2_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_3_0_in_layers_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_0_in_layers_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_proj_in.lokr_w2", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_3_0_skip_connection.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_4_1_proj_in.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_proj_in.lokr_w2", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_5_0_in_layers_2.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_5_1_proj_in.alpha", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_proj_in.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_6_0_in_layers_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_4_0_skip_connection.lokr_w2", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha", + "lora_unet_output_blocks_5_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_6_0_out_layers_3.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_5_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_8_0_in_layers_2.lokr_w2", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_unet_middle_block_0_emb_layers_1.lokr_w1", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.lokr_w2", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_unet_output_blocks_4_1_proj_in.lokr_w1", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_0_in_layers_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_5_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_output_blocks_5_1_proj_out.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_1_0_emb_layers_1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_5_2_conv.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_input_blocks_4_0_skip_connection.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_0_skip_connection.lokr_w1", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.lokr_w2", + "lora_unet_middle_block_2_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_input_blocks_6_0_op.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_4_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_4_0_emb_layers_1.alpha", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_4_0_emb_layers_1.lokr_w1", + "lora_unet_input_blocks_7_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_2_in_layers_2.lokr_w1", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.alpha", + "lora_unet_output_blocks_4_0_out_layers_3.lokr_w1", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_0_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_output_blocks_1_0_skip_connection.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_8_0_skip_connection.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.lokr_w2", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_proj_in.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.alpha", + "lora_unet_output_blocks_1_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_proj_out.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.lokr_w1", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_proj_in.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.alpha", + "lora_unet_input_blocks_5_0_emb_layers_1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_3_0_emb_layers_1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_proj_out.lokr_w1", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_1_0_emb_layers_1.lokr_w1", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_output_blocks_6_0_skip_connection.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_7_1_proj_out.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_0_in_layers_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_4_1_proj_out.alpha", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_2_conv.lokr_w1", + "lora_unet_input_blocks_3_0_op.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_4_0_skip_connection.alpha", + "lora_unet_middle_block_1_proj_out.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.lokr_w1", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_3_1_proj_out.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_0_skip_connection.lokr_w1", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_0_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_4_1_proj_in.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_2_0_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_3_0_out_layers_3.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_4_1_proj_out.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_proj_out.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_0_skip_connection.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_0_emb_layers_1.lokr_w2", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_4_0_skip_connection.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_3_0_in_layers_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_3_0_emb_layers_1.alpha", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_output_blocks_4_0_emb_layers_1.lokr_w1", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_proj_out.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_middle_block_2_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.alpha", + "lora_unet_middle_block_2_out_layers_3.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_2_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_input_blocks_4_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_4_0_skip_connection.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_4_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_2_0_out_layers_3.alpha", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_0_skip_connection.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_5_1_proj_out.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_7_0_emb_layers_1.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_0_out_layers_3.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_2_conv.lokr_w2", + "lora_unet_output_blocks_3_1_proj_out.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_proj_out.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_5_2_conv.lokr_w1", + "lora_unet_output_blocks_7_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.lokr_w1", + "lora_unet_middle_block_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_proj_out.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_0_skip_connection.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_unet_output_blocks_4_1_proj_in.lokr_w2", + "lora_unet_output_blocks_5_0_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_proj_out.lokr_w1", + "lora_unet_input_blocks_2_0_in_layers_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_6_0_out_layers_3.lokr_w1", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_2_0_emb_layers_1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_7_0_skip_connection.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_proj_in.lokr_w1", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_8_0_emb_layers_1.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.alpha", + "lora_unet_output_blocks_0_0_skip_connection.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_4_0_out_layers_3.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_1_0_out_layers_3.alpha", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_5_0_out_layers_3.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.alpha", + "lora_unet_input_blocks_5_0_out_layers_3.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_8_0_out_layers_3.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_0_out_layers_3.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_5_0_skip_connection.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_input_blocks_8_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_7_0_in_layers_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_7_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_4_0_in_layers_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_3_0_out_layers_3.alpha", + "lora_unet_output_blocks_4_1_proj_out.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_0_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_unet_middle_block_2_out_layers_3.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_5_0_in_layers_2.lokr_w2", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_6_0_in_layers_2.lokr_w1", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.lokr_w2", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_input_blocks_5_0_emb_layers_1.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_6_0_emb_layers_1.lokr_w1", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_proj_out.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_8_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_0_in_layers_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.lokr_w2", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_5_1_proj_in.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.alpha", + "lora_unet_input_blocks_4_0_out_layers_3.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_0_0_skip_connection.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_0_skip_connection.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.lokr_w2", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_2_0_in_layers_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_0_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_unet_middle_block_0_out_layers_3.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_7_0_out_layers_3.alpha", + "lora_unet_output_blocks_7_0_skip_connection.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_5_1_proj_out.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_proj_in.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_7_0_out_layers_3.lokr_w2", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_6_0_skip_connection.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_5_0_out_layers_3.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_3_0_op.lokr_w1", + "lora_unet_input_blocks_5_1_proj_in.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_8_0_skip_connection.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_2_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_input_blocks_7_1_proj_in.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_input_blocks_2_0_in_layers_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_input_blocks_4_1_proj_in.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_5_0_out_layers_3.lokr_w1", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_2_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_2_0_out_layers_3.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_proj_in.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_3_0_in_layers_2.lokr_w2", + "lora_unet_output_blocks_3_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_2_1_proj_in.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_0_out_layers_3.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_2_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_6_0_emb_layers_1.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_1_1_proj_out.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_output_blocks_1_0_emb_layers_1.lokr_w2", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_proj_out.lokr_w2", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.lokr_w2", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_0_out_layers_3.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_4_0_in_layers_2.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_0_out_layers_3.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_0_emb_layers_1.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_5_2_conv.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_3_0_skip_connection.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_8_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_5_1_proj_in.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_unet_middle_block_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_6_0_emb_layers_1.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_proj_out.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.lokr_w2", + "lora_unet_input_blocks_4_1_proj_out.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_0_skip_connection.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_middle_block_2_out_layers_3.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_4_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_1_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_3_0_op.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_5_0_in_layers_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_proj_out.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_4_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_0_0_out_layers_3.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_proj_out.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.alpha", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_2_conv.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_1_0_out_layers_3.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_1_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_unet_middle_block_2_in_layers_2.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_5_0_skip_connection.lokr_w2", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_5_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_5_0_out_layers_3.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.lokr_w2", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_proj_in.lokr_w1", + "lora_unet_output_blocks_2_0_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_3_1_proj_in.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_0_out_layers_3.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_6_0_out_layers_3.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_5_1_proj_in.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_0_in_layers_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.lokr_w2", + "lora_unet_output_blocks_3_1_proj_out.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_4_0_skip_connection.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.lokr_w2", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_input_blocks_1_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.alpha", + "lora_unet_output_blocks_7_0_skip_connection.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.lokr_w2", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.lokr_w1", + "lora_unet_input_blocks_7_1_proj_in.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_0_skip_connection.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.lokr_w1", + "lora_unet_input_blocks_4_1_proj_out.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_1_0_emb_layers_1.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.lokr_w1", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_6_0_skip_connection.lokr_w2", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.lokr_w2", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_0_skip_connection.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_input_blocks_1_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.alpha", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.lokr_w2", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_4_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_8_0_emb_layers_1.lokr_w1", + "lora_unet_output_blocks_7_0_in_layers_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.alpha", + "lora_unet_output_blocks_5_0_emb_layers_1.alpha", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.lokr_w2", + "lora_unet_output_blocks_0_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.alpha", + "lora_unet_input_blocks_5_1_proj_out.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_middle_block_0_out_layers_3.lokr_w1", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.lokr_w2", + "lora_unet_middle_block_1_proj_out.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.lokr_w1", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.lokr_w1", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.lokr_w1", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.lokr_w2", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.lokr_w2", + "lora_unet_input_blocks_5_1_proj_out.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.lokr_w1", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.lokr_w2", + "lora_unet_output_blocks_1_1_proj_in.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.alpha", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_4_0_out_layers_3.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_5_1_proj_out.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.lokr_w1", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.alpha", + "lora_unet_middle_block_2_in_layers_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_proj_out.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.lokr_w2", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.lokr_w1", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.lokr_w1", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.lokr_w2", + "lora_unet_output_blocks_4_1_proj_in.alpha", + "lora_unet_output_blocks_8_0_out_layers_3.lokr_w2", + "lora_unet_output_blocks_3_0_out_layers_3.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.lokr_w1", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_middle_block_0_emb_layers_1.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.lokr_w2", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.lokr_w1", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.lokr_w2", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.lokr_w1", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.lokr_w2", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.lokr_w2", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.lokr_w2", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.lokr_w1", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.lokr_w2", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.lokr_w1", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.lokr_w2", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.lokr_w2", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.lokr_w1", + "lora_unet_input_blocks_5_0_in_layers_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.lokr_w1", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_proj_out.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.lokr_w1", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.lokr_w1", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.lokr_w2", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.lokr_w1", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.lokr_w1", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.lokr_w2", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.lokr_w2", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.lokr_w1", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.lokr_w2", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_7_0_in_layers_2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_1_0_out_layers_3.hada_w1_b", + "lora_unet_middle_block_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_4_0_skip_connection.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_7_1_proj_in.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_6_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_1_0_out_layers_3.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_4_0_emb_layers_1.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_unet_middle_block_2_in_layers_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_6_0_emb_layers_1.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_1_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_5_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_0_out_layers_3.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_7_1_proj_in.alpha", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_5_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_5_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_4_0_skip_connection.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_5_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_0_in_layers_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_0_in_layers_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_0_out_layers_3.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_proj_out.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_middle_block_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_5_1_proj_out.hada_w2_a", + "lora_unet_input_blocks_7_1_proj_in.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_4_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_middle_block_0_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_5_0_emb_layers_1.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_2_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_4_0_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_proj_out.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_1_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_0_in_layers_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_0_skip_connection.alpha", + "lora_unet_output_blocks_5_2_conv.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_3_0_out_layers_3.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_3_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_8_0_skip_connection.hada_w1_b", + "lora_unet_input_blocks_7_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.alpha", + "lora_unet_output_blocks_3_1_proj_out.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_3_0_in_layers_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_2_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_8_0_emb_layers_1.hada_w2_b", + "lora_unet_middle_block_1_proj_out.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_proj_out.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_8_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_0_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_4_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.alpha", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_5_2_conv.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_7_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_input_blocks_8_0_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_4_1_proj_out.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_6_0_op.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_5_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_4_0_in_layers_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_5_0_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_proj_in.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_7_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_5_0_out_layers_3.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.alpha", + "lora_unet_input_blocks_6_0_op.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_3_1_proj_out.hada_w2_b", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_input_blocks_2_0_in_layers_2.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_8_0_emb_layers_1.hada_w1_b", + "lora_unet_output_blocks_3_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_3_0_op.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_1_0_in_layers_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_proj_in.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_proj_out.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_0_skip_connection.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_1_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_1_0_emb_layers_1.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_5_0_out_layers_3.hada_w2_a", + "lora_unet_input_blocks_5_0_out_layers_3.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.hada_w1_a", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_6_0_op.hada_w1_b", + "lora_unet_output_blocks_7_0_emb_layers_1.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_5_0_in_layers_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_4_0_out_layers_3.alpha", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_proj_out.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_4_1_proj_in.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_5_0_skip_connection.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_2_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_input_blocks_5_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_8_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_0_emb_layers_1.hada_w1_b", + "lora_te1_text_model_encoder_layers_6_self_attn_q_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_7_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_8_0_in_layers_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_5_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_5_2_conv.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_0_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_8_1_proj_out.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_proj_out.hada_w2_b", + "lora_unet_output_blocks_3_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_3_0_out_layers_3.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_middle_block_2_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_8_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_1_0_skip_connection.hada_w2_a", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_4_0_in_layers_2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_7_1_proj_in.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_4_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_proj_out.hada_w2_a", + "lora_unet_input_blocks_7_1_proj_out.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_7_0_skip_connection.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.hada_w1_a", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.alpha", + "lora_unet_input_blocks_4_0_emb_layers_1.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_2_0_emb_layers_1.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_0_out_layers_3.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_5_1_proj_in.hada_w1_b", + "lora_unet_middle_block_2_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_0_skip_connection.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_4_0_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_0_out_layers_3.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_4_0_in_layers_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_0_out_layers_3.alpha", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_7_0_out_layers_3.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_3_0_op.hada_w1_a", + "lora_unet_input_blocks_4_1_proj_out.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_0_emb_layers_1.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_2_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_2_2_conv.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.hada_w2_a", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_2_0_in_layers_2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_3_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_proj_in.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_0_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_proj_out.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_5_0_skip_connection.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_2_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_8_0_in_layers_2.hada_w1_b", + "lora_unet_middle_block_2_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_3_0_in_layers_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_proj_in.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_5_0_skip_connection.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_8_0_out_layers_3.hada_w2_a", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_0_skip_connection.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_8_0_skip_connection.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_2_conv.hada_w1_b", + "lora_unet_output_blocks_2_0_out_layers_3.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_0_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.alpha", + "lora_unet_input_blocks_5_0_emb_layers_1.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_0_in_layers_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.alpha", + "lora_unet_output_blocks_7_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_8_0_skip_connection.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_0_0_emb_layers_1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_7_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_7_0_in_layers_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_proj_out.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_1_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.hada_w1_a", + "lora_unet_middle_block_0_emb_layers_1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_3_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.hada_w2_a", + "lora_unet_middle_block_2_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_proj_out.hada_w1_a", + "lora_unet_output_blocks_4_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_0_skip_connection.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_proj_out.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_0_skip_connection.hada_w2_b", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_8_0_skip_connection.hada_w2_b", + "lora_unet_output_blocks_4_1_proj_in.hada_w2_b", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_7_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_middle_block_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_3_0_skip_connection.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_0_skip_connection.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_2_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_2_0_skip_connection.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_4_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_middle_block_2_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_proj_out.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_3_0_skip_connection.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_1_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.alpha", + "lora_unet_middle_block_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_6_0_op.alpha", + "lora_unet_input_blocks_7_0_skip_connection.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_8_0_out_layers_3.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_proj_out.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_2_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_0_out_layers_3.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_4_1_proj_in.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_7_0_in_layers_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_proj_out.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_3_1_proj_out.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_0_out_layers_3.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_8_0_emb_layers_1.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_0_emb_layers_1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_1_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.alpha", + "lora_unet_input_blocks_3_0_op.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_proj_out.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_input_blocks_7_0_in_layers_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_7_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_7_0_emb_layers_1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_1_0_in_layers_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_5_2_conv.alpha", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_7_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_5_1_proj_out.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_0_0_out_layers_3.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_1_0_in_layers_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_0_skip_connection.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_0_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_4_0_skip_connection.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.hada_w2_b", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_4_1_proj_in.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_4_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_8_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_1_0_out_layers_3.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_6_0_skip_connection.hada_w2_b", + "lora_unet_middle_block_1_proj_out.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_proj_in.hada_w2_a", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_8_0_skip_connection.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_middle_block_1_proj_in.hada_w2_a", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_proj_in.hada_w1_a", + "lora_unet_input_blocks_4_0_out_layers_3.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_4_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_proj_out.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_6_0_in_layers_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_4_0_skip_connection.hada_w2_b", + "lora_unet_output_blocks_5_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_0_in_layers_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_0_0_out_layers_3.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_5_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_4_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_0_out_layers_3.hada_w2_a", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_6_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_3_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_0_skip_connection.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_8_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_proj_out.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_6_0_op.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.alpha", + "lora_unet_output_blocks_6_0_out_layers_3.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_proj_out.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_output_blocks_4_0_skip_connection.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_0_0_emb_layers_1.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_6_0_out_layers_3.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_3_1_proj_in.hada_w2_a", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_2_0_out_layers_3.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_0_emb_layers_1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_8_0_out_layers_3.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_11_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_4_0_skip_connection.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_0_emb_layers_1.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_4_0_in_layers_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.alpha", + "lora_unet_output_blocks_6_0_out_layers_3.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_5_1_proj_in.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_q_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_6_0_skip_connection.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_mlp_fc2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_3_0_skip_connection.hada_w2_a", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_20_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_14_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_0_emb_layers_1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_1_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_2_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_2_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_1_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_20_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_middle_block_2_in_layers_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_14_self_attn_v_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_2_emb_layers_1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_16_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_2_0_out_layers_3.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_5_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_10_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_8_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_4_0_skip_connection.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_6_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.alpha", + "lora_unet_input_blocks_7_0_skip_connection.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_3_0_op.hada_w2_b", + "lora_unet_input_blocks_7_1_proj_out.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_0_out_layers_3.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_6_0_skip_connection.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_7_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_4_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_proj_in.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.alpha", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_2_conv.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_proj_out.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_5_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_input_blocks_4_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_2_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_0_emb_layers_1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_7_0_skip_connection.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_proj_in.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_4_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_5_0_emb_layers_1.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_output_blocks_4_1_proj_out.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_30_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_2_0_emb_layers_1.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.hada_w1_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.alpha", + "lora_unet_input_blocks_1_0_in_layers_2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_9_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_output_blocks_3_1_proj_out.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_output_blocks_5_1_proj_out.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_4_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_4_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_5_1_proj_out.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.alpha", + "lora_te1_text_model_encoder_layers_1_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_2_1_proj_in.alpha", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_output_blocks_2_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_8_0_in_layers_2.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_2_0_emb_layers_1.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_15_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_0_in_layers_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_19_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_31_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_3_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_0_skip_connection.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_21_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_0_skip_connection.hada_w1_a", + "lora_unet_output_blocks_5_0_emb_layers_1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_7_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_proj_in.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_0_skip_connection.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_3_0_emb_layers_1.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_15_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_middle_block_0_emb_layers_1.hada_w2_a", + "lora_unet_output_blocks_1_0_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_2_0_in_layers_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_proj_in.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_7_0_emb_layers_1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_7_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_19_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_self_attn_out_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_9_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_5_2_conv.hada_w2_a", + "lora_unet_middle_block_1_proj_in.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.alpha", + "lora_unet_output_blocks_8_0_out_layers_3.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_proj_in.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_8_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_0_emb_layers_1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_4_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_2_0_out_layers_3.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_9_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_31_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_7_0_skip_connection.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_te1_text_model_encoder_layers_0_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.hada_w1_a", + "lora_unet_middle_block_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_8_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_6_0_emb_layers_1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_7_0_out_layers_3.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_self_attn_q_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_3_0_emb_layers_1.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_proj_out.alpha", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_2_0_skip_connection.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_7_0_skip_connection.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_5_1_proj_in.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_10_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_6_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_4_1_proj_out.hada_w2_b", + "lora_unet_output_blocks_6_0_emb_layers_1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_8_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_mlp_fc2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_te1_text_model_encoder_layers_0_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_mlp_fc2.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc1.alpha", + "lora_unet_middle_block_2_out_layers_3.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_out_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_21_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_11_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_1_0_emb_layers_1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_self_attn_k_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_proj_in.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.alpha", + "lora_te1_text_model_encoder_layers_2_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_proj_in.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_3_1_proj_in.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_0_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_2_0_out_layers_3.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_19_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_1_0_emb_layers_1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_7_0_in_layers_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_1_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_6_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_0_0_skip_connection.alpha", + "lora_te2_text_model_encoder_layers_24_self_attn_out_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_4_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_6_0_skip_connection.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_2_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_27_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_1_0_in_layers_2.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_proj_out.hada_w2_b", + "lora_unet_middle_block_1_proj_out.hada_w1_a", + "lora_unet_output_blocks_3_1_proj_out.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_1_proj_out.alpha", + "lora_unet_input_blocks_4_1_proj_in.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.alpha", + "lora_unet_output_blocks_6_0_in_layers_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_proj_out.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_0_0_emb_layers_1.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_8_0_emb_layers_1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_proj_in.hada_w1_a", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_8_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_1_proj_out.hada_w2_a", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_8_mlp_fc2.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_unet_output_blocks_4_0_out_layers_3.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_4_1_proj_out.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_q.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_proj_out.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_3_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_15_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_11_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_0_self_attn_k_proj.hada_w2_a", + "lora_unet_middle_block_1_proj_out.alpha", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_v_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_18_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_0_in_layers_2.alpha", + "lora_te2_text_model_encoder_layers_12_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_3_0_op.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_12_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_q.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_4_0_skip_connection.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_2_emb_layers_1.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_k_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_20_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_5_mlp_fc2.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_4_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_self_attn_out_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_0_out_layers_3.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_6_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_k.alpha", + "lora_unet_output_blocks_1_0_skip_connection.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.alpha", + "lora_unet_output_blocks_2_0_emb_layers_1.alpha", + "lora_unet_output_blocks_1_0_out_layers_3.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_3_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_3_0_emb_layers_1.hada_w2_b", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_17_self_attn_v_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_20_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_5_0_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_5_1_proj_in.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_0_mlp_fc1.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_self_attn_q_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_te1_text_model_encoder_layers_1_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_7_self_attn_v_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_27_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_5_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_9_ff_net_0_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.alpha", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_15_mlp_fc2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_1_0_in_layers_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_1_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_unet_input_blocks_4_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_8_0_in_layers_2.alpha", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_3_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_14_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_23_mlp_fc1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_30_self_attn_out_proj.alpha", + "lora_te1_text_model_encoder_layers_7_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_input_blocks_5_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_0_in_layers_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_5_1_proj_out.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_q.hada_w2_b", + "lora_te2_text_model_encoder_layers_15_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_out_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_13_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_0_in_layers_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.alpha", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_proj_in.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_22_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_5_1_proj_out.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_q.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_5_0_skip_connection.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_self_attn_out_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.hada_w1_b", + "lora_unet_middle_block_2_out_layers_3.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_1_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_0_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_2_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn1_to_out_0.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn1_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn2_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_0_0_skip_connection.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_proj_out.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_1_mlp_fc1.alpha", + "lora_te1_text_model_encoder_layers_11_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn1_to_out_0.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_mlp_fc2.hada_w2_b", + "lora_te2_text_model_encoder_layers_5_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.alpha", + "lora_te2_text_model_encoder_layers_9_mlp_fc2.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_0_in_layers_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_4_0_out_layers_3.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_7_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_v.hada_w2_a", + "lora_unet_output_blocks_7_0_skip_connection.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_v.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_output_blocks_7_0_in_layers_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_9_self_attn_q_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_24_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_14_mlp_fc1.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_te1_text_model_encoder_layers_5_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_5_0_in_layers_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_ff_net_2.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc2.hada_w2_a", + "lora_te1_text_model_encoder_layers_6_self_attn_out_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_7_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_te2_text_model_encoder_layers_19_self_attn_k_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.hada_w2_b", + "lora_te2_text_model_encoder_layers_1_mlp_fc2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_31_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_te2_text_model_encoder_layers_18_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.hada_w1_a", + "lora_unet_middle_block_2_emb_layers_1.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_5_0_emb_layers_1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_4_1_proj_out.alpha", + "lora_unet_output_blocks_1_0_skip_connection.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.hada_w1_a", + "lora_unet_output_blocks_1_1_proj_in.alpha", + "lora_unet_input_blocks_5_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_output_blocks_1_0_skip_connection.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_28_mlp_fc2.alpha", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_3_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_unet_output_blocks_4_0_emb_layers_1.alpha", + "lora_te2_text_model_encoder_layers_25_mlp_fc2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn1_to_q.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_q_proj.hada_w1_b", + "lora_unet_middle_block_2_in_layers_2.hada_w1_b", + "lora_unet_output_blocks_5_1_proj_out.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_2_2_conv.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_0_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_mlp_fc1.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn1_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_31_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_29_self_attn_out_proj.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_2_mlp_fc1.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_0_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_0_self_attn_out_proj.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_input_blocks_5_0_out_layers_3.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_out_0.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_0_skip_connection.alpha", + "lora_te2_text_model_encoder_layers_4_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_28_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_0_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_k.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_q.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_0_proj.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_11_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.hada_w1_b", + "lora_unet_input_blocks_8_0_emb_layers_1.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_te2_text_model_encoder_layers_26_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_5_0_emb_layers_1.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_ff_net_0_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_out_0.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_out_0.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_9_ff_net_2.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.alpha", + "lora_unet_output_blocks_0_0_emb_layers_1.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_8_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_v.hada_w1_a", + "lora_te1_text_model_encoder_layers_10_self_attn_out_proj.alpha", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_8_mlp_fc1.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_29_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_27_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_5_0_in_layers_2.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_6_self_attn_v_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_out_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_q.hada_w2_a", + "lora_te2_text_model_encoder_layers_10_self_attn_k_proj.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_out_0.alpha", + "lora_unet_output_blocks_4_0_emb_layers_1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_self_attn_q_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_6_0_emb_layers_1.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_v_proj.alpha", + "lora_unet_output_blocks_0_1_proj_out.alpha", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_8_attn2_to_out_0.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_7_self_attn_q_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_4_mlp_fc2.hada_w1_b", + "lora_te2_text_model_encoder_layers_17_mlp_fc2.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_1_ff_net_2.alpha", + "lora_te2_text_model_encoder_layers_10_mlp_fc1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_q.hada_w1_a", + "lora_te1_text_model_encoder_layers_11_mlp_fc1.hada_w2_a", + "lora_unet_output_blocks_4_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_26_self_attn_q_proj.hada_w2_b", + "lora_unet_input_blocks_5_0_in_layers_2.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_22_self_attn_v_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_4_1_proj_out.hada_w1_b", + "lora_te1_text_model_encoder_layers_3_self_attn_k_proj.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_q.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_v.hada_w2_b", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.alpha", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_k.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_1_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_22_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn2_to_q.hada_w1_a", + "lora_unet_input_blocks_4_1_proj_in.hada_w2_a", + "lora_te2_text_model_encoder_layers_12_self_attn_q_proj.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_2.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_q.alpha", + "lora_unet_middle_block_1_transformer_blocks_9_ff_net_0_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_te2_text_model_encoder_layers_18_mlp_fc1.alpha", + "lora_unet_input_blocks_8_1_proj_in.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_3_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_ff_net_0_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_1_0_in_layers_2.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_1_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_0_out_layers_3.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_ff_net_0_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn1_to_out_0.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_k.hada_w1_a", + "lora_te1_text_model_encoder_layers_8_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_3_0_emb_layers_1.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_30_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_ff_net_2.alpha", + "lora_unet_input_blocks_5_1_proj_out.hada_w2_b", + "lora_te2_text_model_encoder_layers_23_self_attn_v_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_4_0_emb_layers_1.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_q.hada_w2_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_out_0.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_ff_net_0_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_9_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_2_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_k.alpha", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_input_blocks_5_0_in_layers_2.hada_w1_a", + "lora_te1_text_model_encoder_layers_1_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.hada_w2_a", + "lora_te2_text_model_encoder_layers_11_mlp_fc1.hada_w2_a", + "lora_te2_text_model_encoder_layers_14_mlp_fc2.hada_w2_a", + "lora_te2_text_model_encoder_layers_6_mlp_fc1.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn2_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn1_to_out_0.alpha", + "lora_unet_input_blocks_5_0_emb_layers_1.hada_w2_a", + "lora_unet_input_blocks_7_0_out_layers_3.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_2_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_q_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_8_self_attn_out_proj.hada_w1_a", + "lora_unet_output_blocks_2_2_conv.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_v.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_unet_output_blocks_6_0_in_layers_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_8_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_5_attn2_to_k.hada_w2_b", + "lora_te2_text_model_encoder_layers_21_self_attn_k_proj.hada_w2_b", + "lora_te1_text_model_encoder_layers_3_mlp_fc1.hada_w1_a", + "lora_te2_text_model_encoder_layers_0_mlp_fc2.hada_w1_a", + "lora_te2_text_model_encoder_layers_21_self_attn_v_proj.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_2_attn1_to_k.alpha", + "lora_unet_output_blocks_6_0_skip_connection.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_6_0_out_layers_3.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_output_blocks_3_0_skip_connection.hada_w1_b", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_17_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_10_self_attn_v_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_1_attn2_to_k.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_mlp_fc2.hada_w1_b", + "lora_te1_text_model_encoder_layers_6_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_5_1_proj_out.alpha", + "lora_unet_input_blocks_8_0_emb_layers_1.alpha", + "lora_unet_middle_block_1_transformer_blocks_3_attn2_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_q_proj.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_8_attn2_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_k.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_24_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_7_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_3_ff_net_2.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_attn2_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_20_self_attn_v_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_3_0_out_layers_3.hada_w1_a", + "lora_te2_text_model_encoder_layers_1_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn2_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_10_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn1_to_out_0.hada_w2_b", + "lora_te2_text_model_encoder_layers_9_self_attn_v_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_13_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn2_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn2_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_te1_text_model_encoder_layers_11_self_attn_k_proj.hada_w2_b", + "lora_te2_text_model_encoder_layers_7_mlp_fc2.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_4_ff_net_0_proj.alpha", + "lora_te2_text_model_encoder_layers_30_mlp_fc1.hada_w1_b", + "lora_te2_text_model_encoder_layers_23_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_4_0_in_layers_2.alpha", + "lora_te2_text_model_encoder_layers_13_self_attn_q_proj.alpha", + "lora_te2_text_model_encoder_layers_6_self_attn_out_proj.alpha", + "lora_unet_input_blocks_4_1_transformer_blocks_0_attn1_to_v.hada_w1_b", + "lora_te2_text_model_encoder_layers_25_self_attn_out_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_3_1_proj_in.alpha", + "lora_unet_input_blocks_4_0_in_layers_2.alpha", + "lora_unet_middle_block_1_transformer_blocks_5_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_4_mlp_fc1.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_5_0_out_layers_3.hada_w1_b", + "lora_te2_text_model_encoder_layers_9_mlp_fc1.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_28_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_q.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_14_self_attn_q_proj.hada_w2_a", + "lora_unet_output_blocks_5_1_transformer_blocks_1_attn1_to_q.hada_w1_b", + "lora_te1_text_model_encoder_layers_0_self_attn_out_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_2_self_attn_q_proj.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_te2_text_model_encoder_layers_25_self_attn_v_proj.hada_w1_a", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_te1_text_model_encoder_layers_9_self_attn_k_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn2_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn2_to_q.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_7_ff_net_2.hada_w2_b", + "lora_te2_text_model_encoder_layers_3_self_attn_q_proj.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_9_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_ff_net_0_proj.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn2_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_9_attn1_to_k.hada_w2_a", + "lora_te1_text_model_encoder_layers_1_self_attn_out_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_6_self_attn_k_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_5_mlp_fc2.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn2_to_out_0.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_v.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn2_to_v.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_attn2_to_k.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_2_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_4_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_attn1_to_q.hada_w1_a", + "lora_unet_output_blocks_1_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_4_1_transformer_blocks_1_attn1_to_k.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_5_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_2_ff_net_2.hada_w1_a", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_q.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_out_0.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn2_to_v.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_6_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn1_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_7_1_transformer_blocks_3_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_mlp_fc1.alpha", + "lora_unet_output_blocks_3_1_transformer_blocks_0_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn2_to_v.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_5_attn2_to_out_0.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_9_attn2_to_q.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.hada_w2_b", + "lora_te1_text_model_encoder_layers_9_mlp_fc1.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_2_attn2_to_out_0.hada_w1_a", + "lora_unet_output_blocks_0_1_transformer_blocks_9_attn1_to_k.hada_w2_b", + "lora_unet_input_blocks_5_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_4_attn1_to_out_0.hada_w1_b", + "lora_te2_text_model_encoder_layers_16_self_attn_out_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_25_self_attn_k_proj.alpha", + "lora_unet_input_blocks_7_1_transformer_blocks_6_attn2_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_7_ff_net_2.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_out_proj.hada_w2_a", + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj.alpha", + "lora_te1_text_model_encoder_layers_7_mlp_fc1.hada_w1_a", + "lora_unet_input_blocks_8_1_transformer_blocks_5_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_2_attn1_to_v.hada_w2_b", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_0_attn1_to_k.hada_w1_b", + "lora_unet_input_blocks_7_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_3_attn1_to_q.hada_w2_b", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_4_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn2_to_k.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_self_attn_out_proj.hada_w1_b", + "lora_te1_text_model_encoder_layers_11_self_attn_q_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn1_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_31_self_attn_v_proj.hada_w1_a", + "lora_te1_text_model_encoder_layers_11_self_attn_v_proj.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_te2_text_model_encoder_layers_2_self_attn_v_proj.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_6_attn2_to_out_0.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_6_attn2_to_q.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_attn1_to_k.hada_w1_a", + "lora_unet_output_blocks_2_1_transformer_blocks_6_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_5_0_in_layers_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_4_ff_net_2.hada_w2_a", + "lora_unet_output_blocks_2_1_transformer_blocks_3_attn2_to_out_0.hada_w1_a", + "lora_unet_input_blocks_4_0_out_layers_3.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_7_attn1_to_k.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_q.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_9_attn1_to_out_0.hada_w1_b", + "lora_unet_middle_block_1_transformer_blocks_4_attn1_to_v.hada_w1_a", + "lora_te2_text_model_encoder_layers_7_self_attn_k_proj.hada_w1_b", + "lora_unet_output_blocks_5_1_transformer_blocks_0_attn2_to_v.hada_w2_b", + "lora_unet_output_blocks_0_1_transformer_blocks_0_ff_net_2.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn2_to_q.hada_w1_b", + "lora_te2_text_model_encoder_layers_2_self_attn_q_proj.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_ff_net_2.hada_w2_b", + "lora_unet_output_blocks_2_1_transformer_blocks_7_attn1_to_k.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_5_attn1_to_v.alpha", + "lora_unet_output_blocks_0_1_transformer_blocks_4_attn1_to_k.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.hada_w2_b", + "lora_unet_input_blocks_8_1_transformer_blocks_2_attn2_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_out_0.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_0_ff_net_2.hada_w2_a", + "lora_unet_input_blocks_7_0_out_layers_3.hada_w2_a", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.hada_w1_b", + "lora_te2_text_model_encoder_layers_4_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_4_0_skip_connection.alpha", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_3_attn2_to_q.hada_w2_b", + "lora_unet_input_blocks_7_1_transformer_blocks_2_attn1_to_k.hada_w1_a", + "lora_te2_text_model_encoder_layers_12_self_attn_v_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_6_attn2_to_q.hada_w1_b", + "lora_unet_output_blocks_0_1_transformer_blocks_3_attn1_to_out_0.hada_w1_a", + "lora_unet_middle_block_1_transformer_blocks_8_attn1_to_q.alpha", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_q.hada_w1_a", + "lora_unet_input_blocks_5_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_te2_text_model_encoder_layers_19_self_attn_v_proj.alpha", + "lora_unet_output_blocks_2_1_transformer_blocks_8_ff_net_0_proj.hada_w1_b", + "lora_unet_input_blocks_8_1_transformer_blocks_1_attn1_to_v.hada_w1_b", + "lora_te1_text_model_encoder_layers_4_self_attn_k_proj.hada_w2_a", + "lora_unet_input_blocks_4_1_transformer_blocks_1_attn2_to_v.hada_w2_a", + "lora_unet_middle_block_1_transformer_blocks_7_ff_net_2.alpha", + "lora_unet_output_blocks_1_1_transformer_blocks_4_ff_net_0_proj.hada_w1_b", + "lora_unet_output_blocks_0_1_proj_in.hada_w2_a", + "lora_unet_output_blocks_0_1_transformer_blocks_8_attn1_to_out_0.hada_w2_b", + "lora_unet_output_blocks_1_1_transformer_blocks_7_attn1_to_out_0.hada_w2_a", + "lora_te2_text_model_encoder_layers_5_mlp_fc1.hada_w1_b", + "lora_unet_output_blocks_1_1_transformer_blocks_0_attn1_to_q.hada_w2_a", + "lora_unet_input_blocks_8_1_transformer_blocks_0_attn1_to_out_0.hada_w1_b", + "lora_unet_output_blocks_2_1_transformer_blocks_1_attn1_to_v.alpha", + "lora_unet_output_blocks_4_1_transformer_blocks_0_ff_net_0_proj.hada_w1_a", + "lora_te2_text_model_encoder_layers_13_self_attn_out_proj.hada_w2_b", + "lora_unet_mid_block_resnets_0_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_3_self_attn_v_proj.oft_diag", + "lora_te_text_model_encoder_layers_2_self_attn_q_proj.oft_diag", + "lora_unet_up_blocks_0_resnets_1_conv2.oft_diag", + "lora_unet_up_blocks_1_attentions_1_proj_out.oft_diag", + "lora_unet_up_blocks_3_attentions_2_proj_out.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_3_attentions_1_proj_out.oft_diag", + "lora_unet_up_blocks_1_resnets_2_conv1.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_3_resnets_0_conv_shortcut.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_down_blocks_1_downsamplers_0_conv.oft_diag", + "lora_te_text_model_encoder_layers_1_self_attn_q_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_0_proj_out.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_down_blocks_3_resnets_0_time_emb_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_11_self_attn_out_proj.oft_diag", + "lora_te_text_model_encoder_layers_1_self_attn_k_proj.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_te_text_model_encoder_layers_5_self_attn_v_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_2_proj_out.oft_diag", + "lora_unet_up_blocks_2_attentions_0_proj_out.oft_diag", + "lora_unet_up_blocks_3_attentions_2_proj_in.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_down_blocks_1_attentions_1_proj_out.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_2_downsamplers_0_conv.oft_diag", + "lora_unet_down_blocks_2_resnets_1_time_emb_proj.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_0_self_attn_q_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_3_resnets_2_conv1.oft_diag", + "lora_unet_down_blocks_2_resnets_0_conv_shortcut.oft_diag", + "lora_unet_up_blocks_0_resnets_2_conv1.oft_diag", + "lora_unet_down_blocks_0_resnets_1_conv2.oft_diag", + "lora_unet_time_embedding_linear_2.oft_diag", + "lora_te_text_model_encoder_layers_3_mlp_fc1.oft_diag", + "lora_unet_mid_block_resnets_1_conv2.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_down_blocks_0_resnets_0_conv1.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_2_resnets_2_conv_shortcut.oft_diag", + "lora_unet_up_blocks_1_resnets_1_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_11_self_attn_q_proj.oft_diag", + "lora_te_text_model_encoder_layers_4_self_attn_out_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_1_attentions_1_proj_in.oft_diag", + "lora_te_text_model_encoder_layers_5_self_attn_k_proj.oft_diag", + "lora_unet_time_embedding_linear_1.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_te_text_model_encoder_layers_8_mlp_fc2.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_1_resnets_2_time_emb_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_mid_block_resnets_0_conv1.oft_diag", + "lora_unet_up_blocks_2_resnets_0_conv2.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_te_text_model_encoder_layers_5_mlp_fc2.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_te_text_model_encoder_layers_9_self_attn_q_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_0_resnets_0_conv_shortcut.oft_diag", + "lora_unet_up_blocks_3_resnets_0_conv1.oft_diag", + "lora_te_text_model_encoder_layers_11_mlp_fc1.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_2_attentions_2_proj_out.oft_diag", + "lora_unet_down_blocks_0_resnets_0_conv2.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_mid_block_resnets_1_time_emb_proj.oft_diag", + "lora_unet_down_blocks_0_resnets_0_time_emb_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_1_resnets_0_conv_shortcut.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_te_text_model_encoder_layers_0_self_attn_out_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_2_resnets_2_conv2.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_2_proj_in.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_down_blocks_1_resnets_1_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_5_mlp_fc1.oft_diag", + "lora_te_text_model_encoder_layers_7_self_attn_v_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_mid_block_attentions_0_proj_out.oft_diag", + "lora_te_text_model_encoder_layers_8_self_attn_out_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_mid_block_attentions_0_proj_in.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_te_text_model_encoder_layers_0_mlp_fc2.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_10_self_attn_q_proj.oft_diag", + "lora_te_text_model_encoder_layers_11_mlp_fc2.oft_diag", + "lora_te_text_model_encoder_layers_10_mlp_fc2.oft_diag", + "lora_te_text_model_encoder_layers_8_self_attn_q_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_1_resnets_1_conv_shortcut.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_0_proj_in.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_3_resnets_1_conv2.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_2_attentions_1_proj_out.oft_diag", + "lora_te_text_model_encoder_layers_2_self_attn_v_proj.oft_diag", + "lora_unet_up_blocks_2_upsamplers_0_conv.oft_diag", + "lora_unet_up_blocks_2_resnets_2_conv1.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_0_resnets_2_conv_shortcut.oft_diag", + "lora_unet_up_blocks_3_resnets_2_conv2.oft_diag", + "lora_unet_up_blocks_0_resnets_0_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_10_self_attn_k_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_te_text_model_encoder_layers_3_self_attn_k_proj.oft_diag", + "lora_te_text_model_encoder_layers_5_self_attn_out_proj.oft_diag", + "lora_te_text_model_encoder_layers_11_self_attn_k_proj.oft_diag", + "lora_te_text_model_encoder_layers_1_mlp_fc1.oft_diag", + "lora_unet_up_blocks_1_attentions_0_proj_out.oft_diag", + "lora_unet_up_blocks_2_resnets_1_conv2.oft_diag", + "lora_te_text_model_encoder_layers_1_self_attn_out_proj.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_te_text_model_encoder_layers_0_self_attn_v_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_3_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_0_attentions_0_proj_in.oft_diag", + "lora_unet_down_blocks_0_attentions_1_proj_in.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_0_resnets_1_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_te_text_model_encoder_layers_7_mlp_fc1.oft_diag", + "lora_te_text_model_encoder_layers_4_self_attn_k_proj.oft_diag", + "lora_unet_up_blocks_0_resnets_0_conv2.oft_diag", + "lora_te_text_model_encoder_layers_10_self_attn_out_proj.oft_diag", + "lora_te_text_model_encoder_layers_2_self_attn_out_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_1_proj_in.oft_diag", + "lora_unet_down_blocks_3_resnets_1_time_emb_proj.oft_diag", + "lora_unet_up_blocks_3_resnets_1_time_emb_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_3_resnets_2_conv_shortcut.oft_diag", + "lora_unet_down_blocks_1_resnets_0_conv2.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_te_text_model_encoder_layers_3_self_attn_out_proj.oft_diag", + "lora_te_text_model_encoder_layers_2_self_attn_k_proj.oft_diag", + "lora_unet_up_blocks_2_resnets_0_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_mid_block_resnets_0_conv2.oft_diag", + "lora_unet_up_blocks_1_resnets_0_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_resnets_2_conv_shortcut.oft_diag", + "lora_unet_down_blocks_2_resnets_0_conv1.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_down_blocks_1_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_down_blocks_2_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_1_resnets_0_conv_shortcut.oft_diag", + "lora_te_text_model_encoder_layers_0_self_attn_k_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_te_text_model_encoder_layers_8_mlp_fc1.oft_diag", + "lora_unet_up_blocks_3_resnets_0_conv2.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_te_text_model_encoder_layers_10_self_attn_v_proj.oft_diag", + "lora_te_text_model_encoder_layers_7_self_attn_out_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_0_proj_in.oft_diag", + "lora_te_text_model_encoder_layers_1_self_attn_v_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_1_resnets_0_conv1.oft_diag", + "lora_unet_up_blocks_2_resnets_1_conv_shortcut.oft_diag", + "lora_unet_up_blocks_0_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_1_resnets_0_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_down_blocks_1_resnets_0_conv1.oft_diag", + "lora_unet_down_blocks_3_resnets_1_conv1.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_3_resnets_0_conv1.oft_diag", + "lora_te_text_model_encoder_layers_10_mlp_fc1.oft_diag", + "lora_te_text_model_encoder_layers_3_self_attn_q_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_7_self_attn_k_proj.oft_diag", + "lora_unet_conv_in.oft_diag", + "lora_te_text_model_encoder_layers_8_self_attn_k_proj.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_te_text_model_encoder_layers_7_mlp_fc2.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_down_blocks_0_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_mid_block_resnets_1_conv1.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_3_attentions_0_proj_out.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_te_text_model_encoder_layers_2_mlp_fc2.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_te_text_model_encoder_layers_9_self_attn_out_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_6_self_attn_k_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_2_resnets_2_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_2_mlp_fc1.oft_diag", + "lora_te_text_model_encoder_layers_9_mlp_fc1.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_down_blocks_1_attentions_0_proj_in.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_1_upsamplers_0_conv.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_3_resnets_0_time_emb_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_1_proj_out.oft_diag", + "lora_unet_down_blocks_2_resnets_1_conv2.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_down_blocks_1_attentions_1_proj_in.oft_diag", + "lora_unet_up_blocks_1_resnets_1_conv2.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_3_mlp_fc2.oft_diag", + "lora_te_text_model_encoder_layers_9_self_attn_v_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_te_text_model_encoder_layers_1_mlp_fc2.oft_diag", + "lora_unet_down_blocks_2_attentions_0_proj_in.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_2_attentions_0_proj_in.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_3_resnets_1_conv_shortcut.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_te_text_model_encoder_layers_5_self_attn_q_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_te_text_model_encoder_layers_8_self_attn_v_proj.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_te_text_model_encoder_layers_9_self_attn_k_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_te_text_model_encoder_layers_6_self_attn_q_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_1_proj_out.oft_diag", + "lora_unet_up_blocks_0_resnets_2_conv2.oft_diag", + "lora_unet_up_blocks_0_resnets_0_conv1.oft_diag", + "lora_te_text_model_encoder_layers_4_mlp_fc2.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_4_self_attn_q_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_0_resnets_2_time_emb_proj.oft_diag", + "lora_te_text_model_encoder_layers_7_self_attn_q_proj.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_down_blocks_0_downsamplers_0_conv.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.oft_diag", + "lora_te_text_model_encoder_layers_4_mlp_fc1.oft_diag", + "lora_unet_up_blocks_0_upsamplers_0_conv.oft_diag", + "lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_6_mlp_fc2.oft_diag", + "lora_unet_down_blocks_2_resnets_0_time_emb_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_down_blocks_2_resnets_0_conv2.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_3_resnets_2_time_emb_proj.oft_diag", + "lora_unet_down_blocks_3_resnets_1_conv2.oft_diag", + "lora_unet_conv_out.oft_diag", + "lora_unet_down_blocks_1_attentions_0_proj_out.oft_diag", + "lora_te_text_model_encoder_layers_9_mlp_fc2.oft_diag", + "lora_unet_down_blocks_1_resnets_1_conv2.oft_diag", + "lora_unet_up_blocks_1_resnets_2_conv2.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_down_blocks_3_resnets_0_conv2.oft_diag", + "lora_unet_down_blocks_0_resnets_1_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.oft_diag", + "lora_unet_up_blocks_2_resnets_1_conv1.oft_diag", + "lora_unet_down_blocks_2_attentions_0_proj_out.oft_diag", + "lora_te_text_model_encoder_layers_6_mlp_fc1.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_te_text_model_encoder_layers_11_self_attn_v_proj.oft_diag", + "lora_te_text_model_encoder_layers_0_mlp_fc1.oft_diag", + "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_te_text_model_encoder_layers_6_self_attn_v_proj.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.oft_diag", + "lora_unet_down_blocks_2_attentions_1_proj_in.oft_diag", + "lora_unet_up_blocks_0_resnets_1_conv_shortcut.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.oft_diag", + "lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_te_text_model_encoder_layers_6_self_attn_out_proj.oft_diag", + "lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_2_resnets_0_conv_shortcut.oft_diag", + "lora_unet_up_blocks_2_resnets_1_time_emb_proj.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.oft_diag", + "lora_unet_up_blocks_2_attentions_1_proj_in.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.oft_diag", + "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.oft_diag", + "lora_unet_up_blocks_2_attentions_2_proj_in.oft_diag", + "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_up_blocks_2_resnets_0_conv1.oft_diag", + "lora_unet_up_blocks_1_resnets_0_conv2.oft_diag", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.oft_diag", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.oft_diag", + "lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.oft_diag", + "lora_te_text_model_encoder_layers_4_self_attn_v_proj.oft_diag", + "lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.oft_diag", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", + "lora_unet_down_blocks_2_resnets_0_conv_shortcut.lora_up.weight", + "lora_unet_down_blocks_2_resnets_1_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_conv_shortcut.lora_up.weight", + "lora_unet_mid_block_attentions_0_proj_out.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_proj_out.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_mid_block_resnets_0_conv2.lora_up.weight", + "lora_unet_mid_block_resnets_1_time_emb_proj.lora_up.weight", + "lora_unet_mid_block_attentions_0_proj_in.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_mid_block_attentions_0_proj_in.alpha", + "lora_unet_up_blocks_0_resnets_2_conv_shortcut.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight", + "lora_unet_down_blocks_2_downsamplers_0_conv.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_down_blocks_3_resnets_0_conv2.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_up_blocks_1_attentions_0_proj_in.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", + "lora_unet_up_blocks_0_resnets_0_conv_shortcut.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight", + "lora_unet_up_blocks_0_upsamplers_0_conv.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_up_blocks_0_resnets_1_time_emb_proj.alpha", + "lora_unet_down_blocks_3_resnets_1_conv2.alpha", + "lora_unet_up_blocks_1_resnets_0_conv1.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv1.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_proj_in.alpha", + "lora_unet_up_blocks_1_resnets_0_time_emb_proj.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_conv2.alpha", + "lora_unet_mid_block_resnets_1_conv1.lora_up.weight", + "lora_unet_down_blocks_2_resnets_0_conv1.lora_down.weight", + "lora_unet_mid_block_attentions_0_proj_in.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_conv_shortcut.lora_down.weight", + "lora_unet_down_blocks_2_resnets_1_conv1.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv2.lora_down.weight", + "lora_unet_down_blocks_3_resnets_1_conv2.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_down_blocks_2_attentions_0_proj_in.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_time_emb_proj.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_conv_shortcut.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_conv2.alpha", + "lora_unet_down_blocks_3_resnets_0_conv2.lora_up.weight", + "lora_unet_down_blocks_3_resnets_0_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_proj_out.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", + "lora_unet_up_blocks_0_resnets_1_conv2.lora_down.weight", + "lora_unet_mid_block_resnets_0_conv1.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", + "lora_unet_mid_block_resnets_1_time_emb_proj.alpha", + "lora_unet_up_blocks_0_resnets_0_conv_shortcut.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_up_blocks_1_resnets_0_conv2.lora_up.weight", + "lora_unet_down_blocks_2_resnets_1_conv1.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_proj_out.alpha", + "lora_unet_mid_block_resnets_0_time_emb_proj.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight", + "lora_unet_up_blocks_0_resnets_2_conv1.alpha", + "lora_unet_down_blocks_3_resnets_0_conv1.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", + "lora_unet_down_blocks_3_resnets_1_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_0_upsamplers_0_conv.lora_up.weight", + "lora_unet_down_blocks_3_resnets_0_time_emb_proj.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight", + "lora_unet_up_blocks_0_resnets_2_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight", + "lora_unet_up_blocks_0_resnets_2_conv1.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight", + "lora_unet_mid_block_resnets_1_conv1.alpha", + "lora_unet_down_blocks_3_resnets_0_conv2.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv_shortcut.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_up_blocks_0_resnets_0_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_time_emb_proj.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_up_blocks_0_resnets_1_conv_shortcut.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", + "lora_unet_down_blocks_3_resnets_1_time_emb_proj.alpha", + "lora_unet_up_blocks_1_attentions_0_proj_in.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_proj_out.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_proj_in.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_mid_block_resnets_1_conv2.alpha", + "lora_unet_down_blocks_2_attentions_0_proj_out.alpha", + "lora_unet_down_blocks_2_resnets_0_conv2.lora_up.weight", + "lora_unet_mid_block_resnets_0_conv2.lora_down.weight", + "lora_unet_down_blocks_2_resnets_0_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv1.lora_down.weight", + "lora_unet_up_blocks_0_resnets_1_conv2.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_down_blocks_2_resnets_1_conv2.alpha", + "lora_unet_mid_block_resnets_1_conv2.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_conv1.lora_up.weight", + "lora_unet_up_blocks_1_resnets_0_conv1.alpha", + "lora_unet_down_blocks_3_resnets_1_conv1.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_down_blocks_2_resnets_0_time_emb_proj.alpha", + "lora_unet_down_blocks_2_resnets_0_conv_shortcut.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_conv2.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_proj_in.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight", + "lora_unet_mid_block_resnets_1_conv1.lora_down.weight", + "lora_unet_up_blocks_0_resnets_1_conv1.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_down_blocks_3_resnets_1_conv2.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv1.alpha", + "lora_unet_down_blocks_3_resnets_1_conv1.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", + "lora_unet_up_blocks_0_resnets_1_conv1.lora_down.weight", + "lora_unet_up_blocks_0_resnets_1_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_conv_shortcut.alpha", + "lora_unet_down_blocks_2_downsamplers_0_conv.alpha", + "lora_unet_down_blocks_2_resnets_0_conv_shortcut.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_time_emb_proj.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", + "lora_unet_mid_block_attentions_0_proj_out.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_proj_in.lora_down.weight", + "lora_unet_up_blocks_0_resnets_1_conv1.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", + "lora_unet_mid_block_resnets_0_time_emb_proj.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight", + "lora_unet_down_blocks_2_resnets_1_conv2.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_up_blocks_1_attentions_0_proj_out.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight", + "lora_unet_up_blocks_0_resnets_2_conv1.lora_up.weight", + "lora_unet_down_blocks_3_resnets_0_conv1.lora_down.weight", + "lora_unet_mid_block_resnets_0_conv2.alpha", + "lora_unet_mid_block_resnets_1_conv2.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", + "lora_unet_down_blocks_2_resnets_0_conv2.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", + "lora_unet_down_blocks_2_resnets_1_conv2.lora_up.weight", + "lora_unet_down_blocks_2_downsamplers_0_conv.lora_down.weight", + "lora_unet_down_blocks_2_resnets_0_time_emb_proj.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_up_blocks_0_upsamplers_0_conv.lora_down.weight", + "lora_unet_down_blocks_2_resnets_0_conv2.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", + "lora_unet_down_blocks_2_resnets_0_conv1.alpha", + "lora_unet_down_blocks_2_resnets_1_time_emb_proj.alpha", + "lora_unet_up_blocks_1_attentions_0_proj_out.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight", + "lora_unet_up_blocks_0_resnets_0_conv2.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha", + "lora_unet_up_blocks_0_resnets_1_conv_shortcut.alpha", + "lora_unet_up_blocks_0_resnets_2_time_emb_proj.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight", + "lora_unet_mid_block_resnets_1_time_emb_proj.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight", + "lora_unet_up_blocks_1_resnets_0_conv2.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_proj_out.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight", + "lora_unet_mid_block_attentions_0_proj_out.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.alpha", + "lora_unet_mid_block_resnets_0_time_emb_proj.lora_up.weight", + "lora_unet_mid_block_resnets_0_conv1.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha", + "lora_unet_up_blocks_1_resnets_0_conv_shortcut.lora_down.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha", + "lora_unet_up_blocks_0_resnets_1_conv_shortcut.lora_up.weight", + "lora_unet_up_blocks_0_resnets_2_conv2.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", + "lora_unet_down_blocks_3_resnets_0_conv1.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", + "lora_unet_mid_block_resnets_0_conv1.alpha", + "lora_unet_up_blocks_0_resnets_1_time_emb_proj.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_down_blocks_3_resnets_0_time_emb_proj.alpha", + "lora_unet_up_blocks_0_resnets_0_conv2.lora_up.weight", + "lora_unet_up_blocks_0_resnets_1_conv2.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha", + "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight", + "lora_unet_down_blocks_2_attentions_1_proj_out.lora_down.weight", + "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", + "lora_unet_down_blocks_3_resnets_1_conv1.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight", + "lora_unet_down_blocks_2_resnets_1_conv1.alpha", + "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", + "lora_unet_up_blocks_1_attentions_0_proj_in.alpha", + "lora_unet_down_blocks_2_attentions_1_proj_in.alpha", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha", + "lora_unet_down_blocks_2_resnets_0_conv1.lora_up.weight", + "lora_unet_down_blocks_2_resnets_1_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight", + "lora_unet_down_blocks_3_resnets_1_time_emb_proj.lora_up.weight", + "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight" +] diff --git a/src/file.rs b/crates/inspector/src/file.rs similarity index 94% rename from src/file.rs rename to crates/inspector/src/file.rs index 8f0bfb6..e2dca06 100644 --- a/src/file.rs +++ b/crates/inspector/src/file.rs @@ -1,6 +1,5 @@ -use std::collections::{HashMap, HashSet}; - use candle_core::{DType, Device}; +use std::collections::{HashMap, HashSet}; use crate::{ metadata::Metadata, @@ -106,22 +105,6 @@ impl LoRAFile { .unwrap_or_default() } - // pub fn scaled( - // &mut self, - // base_name: &str, - // collection: Vec>, - // device: &Device, - // ) -> Result>> { - // let scaled = self.scale_weight(base_name, device)?; - // Ok(collection - // .iter() - // .map(|norm| (norm.name.to_owned(), (*norm.function)(scaled.clone()))) - // .fold(HashMap::new(), |mut acc, (k, t)| { - // acc.insert(k, t); - // acc - // })) - // } - pub fn l2_norm(&self, base_name: &str) -> Result { self.scaled_weight(base_name) .ok_or_else(|| InspectorError::NotFound) @@ -144,6 +127,16 @@ impl LoRAFile { self.scaled_weights.get(base_name) } + pub fn scale_weights( + &mut self, + device: &candle_core::Device, + ) -> Vec> { + self.base_names() + .iter() + .map(|base_name| self.scale_weight(base_name, device)) + .collect() + } + pub fn scale_weight( &mut self, base_name: &str, @@ -286,6 +279,25 @@ mod tests { insta::assert_json_snapshot!(result.sort()); } + // #[test] + // fn scale_weights() { + // // Arrange + // let buffer = load_test_file().unwrap(); + // let filename = "boo.safetensors"; + // let mut lora_file = LoRAFile::new_from_buffer(&buffer, filename); + // let base_name = "lora_unet_up_blocks_1_attentions_0_proj_in"; + // let device = &Device::Cpu; + // + // lora_file.scale_weights(device); + // + // // Act + // let result = lora_file.l1_norm::(base_name); + // + // // Assert + // assert!(result.is_ok()); + // // Add assertions to verify that the norm result is correct + // } + #[test] fn weight_norm_returns_norm_for_valid_weights() { // Arrange @@ -515,7 +527,7 @@ mod tests { Ok(()) } - + #[test] fn is_tensors_loaded() -> crate::Result<()> { let file = "boo.safetensors"; diff --git a/src/key.pest b/crates/inspector/src/key.pest similarity index 100% rename from src/key.pest rename to crates/inspector/src/key.pest diff --git a/crates/inspector/src/lib.rs b/crates/inspector/src/lib.rs new file mode 100644 index 0000000..0604cdc --- /dev/null +++ b/crates/inspector/src/lib.rs @@ -0,0 +1,184 @@ +use core::fmt; +use std::fmt::Debug; +use std::io; +// use std::alloc::Global; +use std::string::String; + +// use pest::Parser; +use pest_derive::Parser; +use wasm_bindgen::JsValue; + +#[derive(Parser)] +#[grammar = "key.pest"] +pub struct KeyParser; + +// use candle_core::safetensors; +// use std::error::Error; +// use std::marker::Send; +// use std::marker::Sync; + +pub mod file; +pub mod metadata; +pub mod network; +pub mod norms; +mod parser; +pub mod statistic; +mod weight; + +// pub use wasm_bindgen_rayon::init_thread_pool; + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub enum InspectorError { + Candle(candle_core::Error), + SafeTensor(safetensors::SafeTensorError), + Io(io::Error), + Load(String), + Msg(String), + NotFound, + UnsupportedNetworkType, + SerdeWasmBindgenError(serde_wasm_bindgen::Error), +} + +impl InspectorError { + fn candle(err: candle_core::Error) -> InspectorError { + InspectorError::Candle(err) + } + + fn safetensor(err: safetensors::SafeTensorError) -> InspectorError { + InspectorError::SafeTensor(err) + } +} + +impl From for InspectorError { + fn from(err: candle_core::Error) -> InspectorError { + InspectorError::candle(err) + } +} + +impl From for InspectorError { + fn from(err: safetensors::SafeTensorError) -> InspectorError { + InspectorError::safetensor(err) + } +} + +impl From for InspectorError { + fn from(err: io::Error) -> InspectorError { + InspectorError::Io(err) + } +} + +impl fmt::Display for InspectorError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + InspectorError::Candle(e) => write!(f, "Candle Error {:#?}", e), + InspectorError::SafeTensor(e) => write!(f, "SafeTensor Error {:#?}", e), + InspectorError::SerdeWasmBindgenError(e) => { + write!(f, "Serde WASM Bindgen error: {}", e) + } + InspectorError::Io(e) => write!(f, "IO Error: {:#?}", e), + InspectorError::Load(e) => write!(f, "Load Error {}", e), + InspectorError::Msg(e) => write!(f, "Error {}", e), + InspectorError::UnsupportedNetworkType => write!(f, "Unsupported network type"), + InspectorError::NotFound => write!(f, "Not found"), + } + } +} + +impl From for InspectorError { + fn from(value: JsValue) -> Self { + InspectorError::Msg(value.as_string().unwrap()) + } +} + +impl From for InspectorError { + fn from(value: serde_wasm_bindgen::Error) -> Self { + InspectorError::SerdeWasmBindgenError(value) + } +} + +pub fn get_base_name(name: &str) -> String { + name.split('.') + .filter(|part| { + !matches!( + *part, + "weight" + | "lora_up" + | "lora_down" + | "lokr_w1" + | "lokr_w2" + | "hada_w1_a" + | "hada_w1_b" + | "hada_w2_a" + | "oft_diag" + | "hada_w2_b" + | "alpha" + ) + }) + .fold(String::new(), |acc, v| { + if acc.is_empty() { + v.to_owned() + } else { + format!("{acc}.{v}") + } + }) +} + +#[cfg(test)] +mod tests { + + use pest::Parser; + use std::fs; + + use crate::{get_base_name, KeyParser, Rule}; + + #[test] + fn get_base_name_test() { + let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + + let base_name = + get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_down.weight"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_a"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lokr_w1"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.oft_diag"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.alpha"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + } + + fn load_keys_json() -> serde_json::Result> { + let keys = fs::read_to_string("./keys.json").expect("to read the keys json"); + serde_json::from_str::>(&keys) + } + + #[test] + fn test_key_parsing() { + let keys: Vec = load_keys_json().unwrap(); + + for key in keys { + let successful_parse = KeyParser::parse(Rule::key, &key); + assert!(successful_parse.is_ok()); + } + } +} diff --git a/src/lr_weight.pest b/crates/inspector/src/lr_weight.pest similarity index 100% rename from src/lr_weight.pest rename to crates/inspector/src/lr_weight.pest diff --git a/src/metadata.rs b/crates/inspector/src/metadata.rs similarity index 100% rename from src/metadata.rs rename to crates/inspector/src/metadata.rs diff --git a/src/network.rs b/crates/inspector/src/network.rs similarity index 100% rename from src/network.rs rename to crates/inspector/src/network.rs diff --git a/src/norms.rs b/crates/inspector/src/norms.rs similarity index 100% rename from src/norms.rs rename to crates/inspector/src/norms.rs diff --git a/src/parser.rs b/crates/inspector/src/parser.rs similarity index 100% rename from src/parser.rs rename to crates/inspector/src/parser.rs diff --git a/src/snapshots/lora_inspector_rs__file__tests__alpha_keys.snap b/crates/inspector/src/snapshots/inspector__file__tests__alpha_keys.snap similarity index 99% rename from src/snapshots/lora_inspector_rs__file__tests__alpha_keys.snap rename to crates/inspector/src/snapshots/inspector__file__tests__alpha_keys.snap index 0e8bd76..f94194b 100644 --- a/src/snapshots/lora_inspector_rs__file__tests__alpha_keys.snap +++ b/crates/inspector/src/snapshots/inspector__file__tests__alpha_keys.snap @@ -1,5 +1,5 @@ --- -source: src/file.rs +source: crates/inspector/src/file.rs expression: alpha_keys --- [ diff --git a/src/snapshots/lora_inspector_rs__file__tests__base_names.snap b/crates/inspector/src/snapshots/inspector__file__tests__base_names.snap similarity index 99% rename from src/snapshots/lora_inspector_rs__file__tests__base_names.snap rename to crates/inspector/src/snapshots/inspector__file__tests__base_names.snap index 8ff1c85..9047937 100644 --- a/src/snapshots/lora_inspector_rs__file__tests__base_names.snap +++ b/crates/inspector/src/snapshots/inspector__file__tests__base_names.snap @@ -1,5 +1,5 @@ --- -source: src/file.rs +source: crates/inspector/src/file.rs expression: base_names --- [ diff --git a/src/snapshots/lora_inspector_rs__file__tests__keys_returns_correct_keys.snap b/crates/inspector/src/snapshots/inspector__file__tests__keys_returns_correct_keys.snap similarity index 51% rename from src/snapshots/lora_inspector_rs__file__tests__keys_returns_correct_keys.snap rename to crates/inspector/src/snapshots/inspector__file__tests__keys_returns_correct_keys.snap index 5699ec5..9204387 100644 --- a/src/snapshots/lora_inspector_rs__file__tests__keys_returns_correct_keys.snap +++ b/crates/inspector/src/snapshots/inspector__file__tests__keys_returns_correct_keys.snap @@ -1,5 +1,5 @@ --- -source: src/file.rs +source: crates/inspector/src/file.rs expression: result.sort() --- null diff --git a/src/snapshots/lora_inspector_rs__file__tests__weight_keys_returns_correct_keys.snap b/crates/inspector/src/snapshots/inspector__file__tests__weight_keys_returns_correct_keys.snap similarity index 51% rename from src/snapshots/lora_inspector_rs__file__tests__weight_keys_returns_correct_keys.snap rename to crates/inspector/src/snapshots/inspector__file__tests__weight_keys_returns_correct_keys.snap index 5699ec5..9204387 100644 --- a/src/snapshots/lora_inspector_rs__file__tests__weight_keys_returns_correct_keys.snap +++ b/crates/inspector/src/snapshots/inspector__file__tests__weight_keys_returns_correct_keys.snap @@ -1,5 +1,5 @@ --- -source: src/file.rs +source: crates/inspector/src/file.rs expression: result.sort() --- null diff --git a/src/statistic.rs b/crates/inspector/src/statistic.rs similarity index 100% rename from src/statistic.rs rename to crates/inspector/src/statistic.rs diff --git a/src/weight.rs b/crates/inspector/src/weight.rs similarity index 100% rename from src/weight.rs rename to crates/inspector/src/weight.rs diff --git a/crates/lora-inspector-wasm/Cargo.toml b/crates/lora-inspector-wasm/Cargo.toml new file mode 100644 index 0000000..a3b82ad --- /dev/null +++ b/crates/lora-inspector-wasm/Cargo.toml @@ -0,0 +1,51 @@ +[package] +name = "lora-inspector-wasm" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +description.workspace = true + +[lib] +crate-type = ["cdylib"] + +[features] +default = ["console_error_panic_hook"] + +[dependencies] +candle-core = { workspace = true } +console_error_panic_hook = { version = "0.1.6", optional = true } +inspector = { path = "../inspector" } +safetensors = { workspace = true } +serde-wasm-bindgen = "0.4" +serde_json = "1.0.108" +serde_with = "3.4.0" +wasm-bindgen = "0.2" + +[dependencies.web-sys] +version = "0.3.65" +features = [ + 'console', +] + +[dev-dependencies] +insta = { version = "1.34.0", features = ["json"] } +memmap2 = "0.9.0" +wasm-bindgen-futures = "0.4.39" +wasm-bindgen-test = "0.3.13" + +[dev-dependencies.web-sys] +version = "0.3.65" +features = [ + 'Headers', + 'Request', + 'RequestInit', + 'RequestMode', + 'Response', + 'Worker', + 'Blob', + 'Performance', + 'Window', +] + diff --git a/crates/lora-inspector-wasm/src/lib.rs b/crates/lora-inspector-wasm/src/lib.rs new file mode 100644 index 0000000..e19b17b --- /dev/null +++ b/crates/lora-inspector-wasm/src/lib.rs @@ -0,0 +1,186 @@ +use core::fmt; +use std::fmt::Debug; +use std::io; +// use std::alloc::Global; +use std::string::String; + +use web_sys::wasm_bindgen::JsValue; + +// use pest::Parser; +// use pest_derive::Parser; +// use wasm_bindgen::JsValue; +// +// #[derive(Parser)] +// #[grammar = "key.pest"] +// pub struct KeyParser; + +// use candle_core::safetensors; +// use std::error::Error; +// use std::marker::Send; +// use std::marker::Sync; + +// pub mod file; +// pub mod metadata; +// mod network; +// mod norms; +// mod parser; +// mod statistic; +// mod weight; +mod worker; + +// pub use wasm_bindgen_rayon::init_thread_pool; + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub enum InspectorError { + Candle(candle_core::Error), + SafeTensor(safetensors::SafeTensorError), + Io(io::Error), + Load(String), + Msg(String), + NotFound, + UnsupportedNetworkType, + SerdeWasmBindgenError(serde_wasm_bindgen::Error), +} + +impl InspectorError { + fn candle(err: candle_core::Error) -> InspectorError { + InspectorError::Candle(err) + } + + fn safetensor(err: safetensors::SafeTensorError) -> InspectorError { + InspectorError::SafeTensor(err) + } +} + +impl From for InspectorError { + fn from(err: candle_core::Error) -> InspectorError { + InspectorError::candle(err) + } +} + +impl From for InspectorError { + fn from(err: safetensors::SafeTensorError) -> InspectorError { + InspectorError::safetensor(err) + } +} + +impl From for InspectorError { + fn from(err: io::Error) -> InspectorError { + InspectorError::Io(err) + } +} + +impl fmt::Display for InspectorError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + InspectorError::Candle(e) => write!(f, "Candle Error {:#?}", e), + InspectorError::SafeTensor(e) => write!(f, "SafeTensor Error {:#?}", e), + InspectorError::SerdeWasmBindgenError(e) => { + write!(f, "Serde WASM Bindgen error: {}", e) + } + InspectorError::Io(e) => write!(f, "IO Error: {:#?}", e), + InspectorError::Load(e) => write!(f, "Load Error {}", e), + InspectorError::Msg(e) => write!(f, "Error {}", e), + InspectorError::UnsupportedNetworkType => write!(f, "Unsupported network type"), + InspectorError::NotFound => write!(f, "Not found"), + } + } +} + +impl From for InspectorError { + fn from(value: JsValue) -> Self { + InspectorError::Msg(value.as_string().unwrap()) + } +} + +impl From for InspectorError { + fn from(value: serde_wasm_bindgen::Error) -> Self { + InspectorError::SerdeWasmBindgenError(value) + } +} + +pub fn get_base_name(name: &str) -> String { + name.split('.') + .filter(|part| { + !matches!( + *part, + "weight" + | "lora_up" + | "lora_down" + | "lokr_w1" + | "lokr_w2" + | "hada_w1_a" + | "hada_w1_b" + | "hada_w2_a" + | "oft_diag" + | "hada_w2_b" + | "alpha" + ) + }) + .fold(String::new(), |acc, v| { + if acc.is_empty() { + v.to_owned() + } else { + format!("{acc}.{v}") + } + }) +} + +#[cfg(test)] +mod tests { + + use std::fs; + + use crate::get_base_name; + + #[test] + fn get_base_name_test() { + let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + + let base_name = + get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_down.weight"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_a"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lokr_w1"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = + get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.oft_diag"); + assert_eq!( + base_name, + "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" + ); + + let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.alpha"); + assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); + } + + fn load_keys_json() -> serde_json::Result> { + let keys = fs::read_to_string("./keys.json").expect("to read the keys json"); + serde_json::from_str::>(&keys) + } + + // #[test] + // fn test_key_parsing() { + // let keys: Vec = load_keys_json().unwrap(); + // + // for key in keys { + // let successful_parse = KeyParser::parse(Rule::key, &key); + // assert!(successful_parse.is_ok()); + // } + // } +} diff --git a/src/worker.rs b/crates/lora-inspector-wasm/src/worker.rs similarity index 89% rename from src/worker.rs rename to crates/lora-inspector-wasm/src/worker.rs index c53a8d3..6a1a161 100644 --- a/src/worker.rs +++ b/crates/lora-inspector-wasm/src/worker.rs @@ -1,15 +1,15 @@ -use pest::Parser; +// use pest::Parser; use std::collections::HashMap; use std::fmt; use wasm_bindgen::prelude::*; use web_sys::console; -extern crate console_error_panic_hook; +// extern crate console_error_panic_hook; -use crate::file::LoRAFile; -use crate::metadata::Metadata; -use crate::network::NetworkModule; -use crate::{norms, statistic, InspectorError, KeyParser, Rule}; +use inspector::file::LoRAFile; +use inspector::metadata::Metadata; +use inspector::network::NetworkModule; +use inspector::{norms, InspectorError, statistic}; use std::panic; #[wasm_bindgen] @@ -91,29 +91,42 @@ impl LoraWorker { self.file.precision() } - pub fn parse_key(&self, parse_key: &str) { - let successful_parse = KeyParser::parse(Rule::key, parse_key); - if let Ok(pairs) = successful_parse { - console::log_1(&format!("{:#?}", pairs).into()); - } - } + // pub fn parse_key(&self, parse_key: &str) { + // let successful_parse = KeyParser::parse(Rule::key, parse_key); + // if let Ok(pairs) = successful_parse { + // console::log_1(&format!("{:#?}", pairs).into()); + // } + // } - pub fn scale_weights(&mut self) -> usize { + pub fn scale_weights(&mut self) -> Vec { console_error_panic_hook::set_once(); self.file - .base_names() + .scale_weights(&candle_core::Device::Cpu) .iter() - .filter_map(|base_name| { - match self.file.scale_weight(base_name, &candle_core::Device::Cpu) { - Ok(ok) => Some(ok), - Err(e) => { - console::error_1(&format!("scale weight error: {:#?}", e).into()); - None - } + .filter_map(|scaled| match scaled { + Ok(_) => None, + Err(e) => { + console::error_1(&format!("scale weight error: {:#?}", e).into()); + Some(e.to_string()) } }) - .count() + .collect() + + // + // self.file + // .base_names() + // .iter() + // .filter_map(|base_name| { + // match self.file.scale_weight(base_name, &candle_core::Device::Cpu) { + // Ok(ok) => Some(ok), + // Err(e) => { + // console::error_1(&format!("scale weight error: {:#?}", e).into()); + // None + // } + // } + // }) + // .count() } pub fn scale_weight(&mut self, base_name: &str) -> Result { @@ -310,6 +323,24 @@ mod tests { ); } + #[wasm_bindgen_test] + async fn scale_weights() { + wasm_bindgen_test_configure!(run_in_browser); + let buffer = load_test_file(file("lora_unet_down_blocks_1_resnets_1_conv2.safetensors").as_str()) + .await + .unwrap(); + + let mut worker = + LoraWorker::new_from_buffer(&buffer, "lora_unet_down_blocks_1_resnets_1_conv2.safetensors").expect("load from buffer"); + + worker.scale_weights(); + + assert_eq!( + worker.l2_norm("lora_unet_down_blocks_1_resnets_1_conv2"), + Some(0.40116092442056206) + ); + } + #[wasm_bindgen_test] async fn base_names() { wasm_bindgen_test_configure!(run_in_browser); @@ -526,21 +557,6 @@ mod tests { .expect("load from buffer"); assert!(worker.network_args().unwrap().is_object()); - // assert_eq!( - // worker.network_args().unwrap(), - // serde_wasm_bindgen::to_value(&NetworkArgs { - // dropout: Some(0.3), - // block_dims: Some(BlockUsizeSeq(vec![ - // 4, 4, 4, 4, 4, 4, 4, 4, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8 - // ])), - // block_alphas: Some(BlockUsizeSeq(vec![ - // 16, 16, 16, 16, 16, 16, 16, 16, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - // 32, 32, 32, 32, 32 - // ])), - // ..Default::default() - // }) - // .unwrap() - // ); } #[wasm_bindgen_test] diff --git a/crates/lora-inspector/Cargo.toml b/crates/lora-inspector/Cargo.toml new file mode 100644 index 0000000..fd6e2e0 --- /dev/null +++ b/crates/lora-inspector/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "lora-inspector" +version.workspace = true +authors.workspace = true +edition.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +inspector = { path = "../inspector" } diff --git a/crates/lora-inspector/src/main.rs b/crates/lora-inspector/src/main.rs new file mode 100644 index 0000000..624691c --- /dev/null +++ b/crates/lora-inspector/src/main.rs @@ -0,0 +1,17 @@ +use std::{fs::File, io::Read}; +use inspector::{Result, file, metadata}; + +fn main() -> Result<()> { + let filename = "boo.safetensors"; + + let mut f = File::open(filename)?; + let mut data = vec![]; + f.read_to_end(&mut data)?; + + let metadata = metadata::Metadata::new_from_buffer(data.as_slice()).map_err(|e| e.to_string()); + let file = file::LoRAFile::new_from_buffer(data.as_slice(), filename); + + // metadata.map(|metadata| LoraWorker { metadata, file }) + + Ok(()) +} diff --git a/dprint.json b/dprint.json new file mode 100644 index 0000000..515a97c --- /dev/null +++ b/dprint.json @@ -0,0 +1,11 @@ +{ + "markdown": { + }, + "toml": { + }, + "excludes": [], + "plugins": [ + "https://plugins.dprint.dev/markdown-0.16.3.wasm", + "https://plugins.dprint.dev/toml-0.5.4.wasm" + ] +} diff --git a/index.html b/index.html index 571f793..c7ab4f5 100644 --- a/index.html +++ b/index.html @@ -64,7 +64,7 @@

LoRA Inspector

- +
diff --git a/src/lib.rs b/src/lib.rs index ac9cacb..e69de29 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,183 +0,0 @@ -use core::fmt; -use std::fmt::Debug; -use std::io; -// use std::alloc::Global; -use std::string::String; - -// use pest::Parser; -use pest_derive::Parser; -use wasm_bindgen::JsValue; - -#[derive(Parser)] -#[grammar = "key.pest"] -pub struct KeyParser; - -// use candle_core::safetensors; -// use std::error::Error; -// use std::marker::Send; -// use std::marker::Sync; - -mod file; -mod metadata; -mod network; -mod norms; -mod weight; -mod worker; -mod statistic; -mod parser; - -pub type Result = std::result::Result; - -#[derive(Debug)] -pub enum InspectorError { - Candle(candle_core::Error), - SafeTensor(safetensors::SafeTensorError), - Io(io::Error), - Load(String), - Msg(String), - NotFound, - UnsupportedNetworkType, - SerdeWasmBindgenError(serde_wasm_bindgen::Error), -} - -impl InspectorError { - fn candle(err: candle_core::Error) -> InspectorError { - InspectorError::Candle(err) - } - - fn safetensor(err: safetensors::SafeTensorError) -> InspectorError { - InspectorError::SafeTensor(err) - } -} - -impl From for InspectorError { - fn from(err: candle_core::Error) -> InspectorError { - InspectorError::candle(err) - } -} - -impl From for InspectorError { - fn from(err: safetensors::SafeTensorError) -> InspectorError { - InspectorError::safetensor(err) - } -} - -impl From for InspectorError { - fn from(err: io::Error) -> InspectorError { - InspectorError::Io(err) - } -} - -impl fmt::Display for InspectorError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - InspectorError::Candle(e) => write!(f, "Candle Error {:#?}", e), - InspectorError::SafeTensor(e) => write!(f, "SafeTensor Error {:#?}", e), - InspectorError::SerdeWasmBindgenError(e) => { - write!(f, "Serde WASM Bindgen error: {}", e) - } - InspectorError::Io(e) => write!(f, "IO Error: {:#?}", e), - InspectorError::Load(e) => write!(f, "Load Error {}", e), - InspectorError::Msg(e) => write!(f, "Error {}", e), - InspectorError::UnsupportedNetworkType => write!(f, "Unsupported network type"), - InspectorError::NotFound => write!(f, "Not found"), - } - } -} - -impl From for InspectorError { - fn from(value: JsValue) -> Self { - InspectorError::Msg(value.as_string().unwrap()) - } -} - -impl From for InspectorError { - fn from(value: serde_wasm_bindgen::Error) -> Self { - InspectorError::SerdeWasmBindgenError(value) - } -} - -pub fn get_base_name(name: &str) -> String { - name.split('.') - .filter(|part| { - !matches!( - *part, - "weight" - | "lora_up" - | "lora_down" - | "lokr_w1" - | "lokr_w2" - | "hada_w1_a" - | "hada_w1_b" - | "hada_w2_a" - | "oft_diag" - | "hada_w2_b" - | "alpha" - ) - }) - .fold(String::new(), |acc, v| { - if acc.is_empty() { - v.to_owned() - } else { - format!("{acc}.{v}") - } - }) -} - -#[cfg(test)] -mod tests { - - use pest::Parser; - use std::fs; - - use crate::{get_base_name, KeyParser, Rule}; - - #[test] - fn get_base_name_test() { - let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight"); - assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); - - let base_name = - get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.lora_down.weight"); - assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); - - let base_name = - get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.hada_w1_a"); - assert_eq!( - base_name, - "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" - ); - - let base_name = - get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.lokr_w1"); - assert_eq!( - base_name, - "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" - ); - - let base_name = - get_base_name("lora_te1_text_model_encoder_layers_5_self_attn_q_proj.oft_diag"); - assert_eq!( - base_name, - "lora_te1_text_model_encoder_layers_5_self_attn_q_proj" - ); - - let base_name = get_base_name("lora_unet_up_blocks_1_attentions_1_proj_out.alpha"); - assert_eq!(base_name, "lora_unet_up_blocks_1_attentions_1_proj_out"); - } - - fn load_keys_json() -> serde_json::Result> { - let keys = fs::read_to_string("./keys.json").expect("to read the keys json"); - serde_json::from_str::>(&keys) - } - - #[test] - fn test_key_parsing() { - let keys: Vec = load_keys_json().unwrap(); - - for key in keys { - let successful_parse = KeyParser::parse(Rule::key, &key); - assert!(successful_parse.is_ok()); - } - } -} diff --git a/src/main.rs b/src/main.rs deleted file mode 100644 index 52c6c34..0000000 --- a/src/main.rs +++ /dev/null @@ -1,14 +0,0 @@ -use candle_core::{Device, Tensor}; - -fn main() -> Result<(), Box> { - let device = Device::Cpu; - - let a = Tensor::randn(0f32, 1., (2, 3), &device)?; - let b = Tensor::randn(0f32, 1., (3, 4), &device)?; - - let c = a.matmul(&b)?; - println!("{c}"); - Ok(()) - - // let tensors = SafeTensors::deserialize(&buffer).unwrap(); -} diff --git a/src/snapshots/lora_inspector__tests__map_down_alpha.snap b/src/snapshots/lora_inspector__tests__map_down_alpha.snap deleted file mode 100644 index 8743a7c..0000000 --- a/src/snapshots/lora_inspector__tests__map_down_alpha.snap +++ /dev/null @@ -1,186 +0,0 @@ ---- -source: src/lib.rs -expression: up ---- -[ - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_9_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_8_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_8_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_mlp_fc1.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_1_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_4_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_5_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_proj_out.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_5_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_2_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_7_mlp_fc2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_0_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_6_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_2_mlp_fc2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_9_mlp_fc2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_proj_in.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_proj_in.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_11_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_7_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_proj_in.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_proj_out.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_11_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_3_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_4_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_6_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_proj_out.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight", - "lora_unet_mid_block_attentions_0_proj_out.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_proj_in.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_0_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_1_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_3_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_proj_in.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_proj_in.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight" -] diff --git a/src/snapshots/lora_inspector__tests__sdxl_weights_by_block.snap b/src/snapshots/lora_inspector__tests__sdxl_weights_by_block.snap deleted file mode 100644 index 6ea55f4..0000000 --- a/src/snapshots/lora_inspector__tests__sdxl_weights_by_block.snap +++ /dev/null @@ -1,54 +0,0 @@ ---- -source: src/lib.rs -expression: dbg!(result) ---- -{ - "text_encoder": { - "layer_27": 2.6981724264439193, - "layer_02": 2.585883574878324, - "layer_10": 2.5828950484584086, - "layer_03": 2.5590740850556273, - "layer_30": 2.8715523784561747, - "layer_17": 2.856573554443203, - "layer_08": 2.5526433805076016, - "layer_04": 2.4886652477788647, - "layer_11": 2.2360385661059925, - "layer_24": 2.827270217244079, - "layer_19": 2.918184613260589, - "layer_21": 2.8049301858088875, - "layer_05": 2.464273559417075, - "layer_07": 2.5771845546871788, - "layer_20": 2.7940594038519673, - "layer_26": 2.90682780986022, - "layer_25": 2.7996566374047074, - "layer_09": 2.6051241374127776, - "layer_15": 2.9749570401214847, - "layer_23": 2.861240965268156, - "layer_13": 2.8646155949651084, - "layer_29": 2.798727564344976, - "layer_16": 2.9225455991238527, - "layer_22": 2.9790958653316757, - "layer_14": 2.82534215634467, - "layer_31": 3.8637997801833954, - "layer_12": 2.909897691479945, - "layer_18": 2.916504453428043, - "layer_06": 2.5168113962228156, - "layer_28": 2.906921097006807, - "layer_01": 2.436261530287356, - "layer_00": 2.398623786521228 - }, - "unet": { - "output_01": 3.7485393705952412, - "middle_00": 4.4912954465075785, - "input_02": 3.787922562475381, - "input_00": 3.936847869198427, - "middle_01": 3.7100521284501666, - "output_02": 4.57177559348496, - "output_03": 5.532903021129882, - "middle_02": 4.209200318584085, - "input_01": 3.255837980451958, - "middle_03": 7.846870343021596, - "output_00": 4.158135129624979, - "input_03": 4.395878374398331 - } -} diff --git a/src/snapshots/lora_inspector__tests__weights_by_block.snap b/src/snapshots/lora_inspector__tests__weights_by_block.snap deleted file mode 100644 index 228b897..0000000 --- a/src/snapshots/lora_inspector__tests__weights_by_block.snap +++ /dev/null @@ -1,31 +0,0 @@ ---- -source: src/lib.rs -expression: result ---- -{ - "unet": { - "up_02": 2.120655256833002, - "down_01": 2.230538075096385, - "mid_02": 2.224794629779769, - "up_01": 2.2393360019167305, - "up_00": 2.1742004173330853, - "down_02": 2.2030147237448237, - "mid_01": 2.2196846091685765, - "mid_00": 2.1526473038635956, - "down_00": 2.0547019995405065 - }, - "text_encoder": { - "layer_09": 1.2418230235341936, - "layer_01": 1.237123584120902, - "layer_10": 1.2340809383377243, - "layer_11": 1.2385596831422123, - "layer_04": 1.2323092332639434, - "layer_03": 1.232601893524758, - "layer_07": 1.229626061262309, - "layer_02": 1.238819093326412, - "layer_08": 1.233122912380843, - "layer_05": 1.227648301288342, - "layer_00": 1.2405343164401639, - "layer_06": 1.2311805503592268 - } -} diff --git a/src/snapshots/lora_inspector_rs__tests__map_down_alpha.snap b/src/snapshots/lora_inspector_rs__tests__map_down_alpha.snap deleted file mode 100644 index 200ad35..0000000 --- a/src/snapshots/lora_inspector_rs__tests__map_down_alpha.snap +++ /dev/null @@ -1,186 +0,0 @@ ---- -source: src/lib.rs -expression: up ---- -[ - "lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_1_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_proj_in.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_6_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_10_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_9_mlp_fc2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_0_mlp_fc1.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_8_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_11_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_11_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_proj_in.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_proj_in.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_0_mlp_fc2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_10_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_7_mlp_fc2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_1_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_proj_out.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_proj_out.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_proj_out.lora_up.weight", - "lora_unet_mid_block_attentions_0_proj_out.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_4_mlp_fc2.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_5_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_proj_in.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_proj_in.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_proj_out.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_3_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_6_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_proj_out.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_5_mlp_fc2.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_3_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_proj_out.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_proj_in.lora_up.weight", - "lora_te_text_model_encoder_layers_8_mlp_fc1.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight", - "lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_2_mlp_fc2.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_7_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_4_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_9_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight", - "lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight", - "lora_te_text_model_encoder_layers_2_mlp_fc1.lora_up.weight", - "lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight", - "lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight", - "lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight", - "lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight", - "lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight", - "lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight", - "lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight", - "lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight" -] diff --git a/src/snapshots/lora_inspector_rs__tests__sdxl_weights_by_block.snap b/src/snapshots/lora_inspector_rs__tests__sdxl_weights_by_block.snap deleted file mode 100644 index 7b5d72f..0000000 --- a/src/snapshots/lora_inspector_rs__tests__sdxl_weights_by_block.snap +++ /dev/null @@ -1,54 +0,0 @@ ---- -source: src/lib.rs -expression: dbg!(result) ---- -{ - "text_encoder": { - "layer_11": 2.236038566105992, - "layer_03": 2.559074085055627, - "layer_09": 2.605124137412777, - "layer_15": 2.9749570401214847, - "layer_18": 2.9165044534280433, - "layer_06": 2.516811396222816, - "layer_25": 2.7996566374047074, - "layer_31": 3.8637997801833954, - "layer_30": 2.8715523784561747, - "layer_26": 2.90682780986022, - "layer_02": 2.585883574878324, - "layer_17": 2.856573554443203, - "layer_20": 2.7940594038519677, - "layer_04": 2.4886652477788647, - "layer_19": 2.9181846132605895, - "layer_01": 2.4362615302873563, - "layer_21": 2.8049301858088875, - "layer_28": 2.906921097006807, - "layer_27": 2.6981724264439197, - "layer_23": 2.861240965268156, - "layer_24": 2.827270217244079, - "layer_10": 2.582895048458408, - "layer_08": 2.5526433805076016, - "layer_29": 2.798727564344977, - "layer_05": 2.4642735594170753, - "layer_00": 2.3986237865212283, - "layer_16": 2.9225455991238536, - "layer_13": 2.864615594965109, - "layer_07": 2.5771845546871783, - "layer_14": 2.82534215634467, - "layer_12": 2.9098976914799457, - "layer_22": 2.9790958653316753 - }, - "unet": { - "input_00": 3.93684786919843, - "middle_01": 3.710052128450166, - "middle_00": 4.49129544650758, - "middle_02": 4.209200318584085, - "middle_03": 7.846870343021595, - "output_03": 5.532903021129883, - "output_02": 4.571775593484964, - "output_00": 4.158135129624979, - "input_02": 3.78792256247538, - "input_03": 4.395878374398332, - "input_01": 3.2558379804519544, - "output_01": 3.7485393705952395 - } -} diff --git a/src/snapshots/lora_inspector_rs__tests__weights_by_block.snap b/src/snapshots/lora_inspector_rs__tests__weights_by_block.snap deleted file mode 100644 index f6e8088..0000000 --- a/src/snapshots/lora_inspector_rs__tests__weights_by_block.snap +++ /dev/null @@ -1,31 +0,0 @@ ---- -source: src/lib.rs -expression: result ---- -{ - "unet": { - "down_02": 2.2030147237448237, - "down_00": 2.0547019995405065, - "up_02": 2.1206552568330026, - "down_01": 2.230538075096385, - "mid_01": 2.2196846091685765, - "up_01": 2.239336001916731, - "mid_00": 2.152647303863596, - "mid_02": 2.224794629779768, - "up_00": 2.1742004173330853 - }, - "text_encoder": { - "layer_08": 1.233122912380843, - "layer_02": 1.238819093326412, - "layer_07": 1.229626061262309, - "layer_04": 1.2323092332639434, - "layer_01": 1.237123584120902, - "layer_10": 1.2340809383377243, - "layer_06": 1.2311805503592266, - "layer_09": 1.2418230235341936, - "layer_05": 1.227648301288342, - "layer_00": 1.2405343164401639, - "layer_03": 1.232601893524758, - "layer_11": 1.2385596831422123 - } -}