Anna
Upload config_lora.json with huggingface_hub
57225a4 verified
raw
history blame contribute delete
418 Bytes
{
"r": 128,
"lora_alpha": 128,
"target_modules": [
"self_attention.to_query",
"self_attention.to_key",
"self_attention.to_value",
"self_attention.out_layer",
"cross_attention.to_query",
"cross_attention.to_key",
"cross_attention.to_value",
"cross_attention.out_layer",
"feed_forward.in_layer",
"feed_forward.out_layer"
]
}