@@ -441,7 +441,9 @@ def load_lora_into_text_encoder(
441
441
adapter_name (`str`, *optional*):
442
442
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
443
443
`default_{i}` where i is the total number of adapters being loaded.
444
- metadata: TODO
444
+ metadata (`dict`):
445
+ Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
446
+ from the state dict.
445
447
low_cpu_mem_usage (`bool`, *optional*):
446
448
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
447
449
weights.
@@ -926,7 +928,9 @@ def load_lora_into_text_encoder(
926
928
adapter_name (`str`, *optional*):
927
929
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
928
930
`default_{i}` where i is the total number of adapters being loaded.
929
- metadata: TODO
931
+ metadata (`dict`):
932
+ Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
933
+ from the state dict.
930
934
low_cpu_mem_usage (`bool`, *optional*):
931
935
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
932
936
weights.
@@ -1383,7 +1387,9 @@ def load_lora_into_text_encoder(
1383
1387
adapter_name (`str`, *optional*):
1384
1388
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
1385
1389
`default_{i}` where i is the total number of adapters being loaded.
1386
- metadata: TODO
1390
+ metadata (`dict`):
1391
+ Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
1392
+ from the state dict.
1387
1393
low_cpu_mem_usage (`bool`, *optional*):
1388
1394
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
1389
1395
weights.
@@ -2320,7 +2326,9 @@ def load_lora_into_text_encoder(
2320
2326
adapter_name (`str`, *optional*):
2321
2327
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
2322
2328
`default_{i}` where i is the total number of adapters being loaded.
2323
- metadata: TODO
2329
+ metadata (`dict`):
2330
+ Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
2331
+ from the state dict.
2324
2332
low_cpu_mem_usage (`bool`, *optional*):
2325
2333
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
2326
2334
weights.
@@ -2861,7 +2869,9 @@ def load_lora_into_text_encoder(
2861
2869
adapter_name (`str`, *optional*):
2862
2870
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use
2863
2871
`default_{i}` where i is the total number of adapters being loaded.
2864
- metadata: TODO
2872
+ metadata (`dict`):
2873
+ Optional LoRA adapter metadata. When supplied, the `LoraConfig` arguments of `peft` won't be derived
2874
+ from the state dict.
2865
2875
low_cpu_mem_usage (`bool`, *optional*):
2866
2876
Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
2867
2877
weights.
0 commit comments