File size: 418 Bytes
57225a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
{
"r": 128,
"lora_alpha": 128,
"target_modules": [
"self_attention.to_query",
"self_attention.to_key",
"self_attention.to_value",
"self_attention.out_layer",
"cross_attention.to_query",
"cross_attention.to_key",
"cross_attention.to_value",
"cross_attention.out_layer",
"feed_forward.in_layer",
"feed_forward.out_layer"
]
} |