{ "r": 128, "lora_alpha": 128, "target_modules": [ "self_attention.to_query", "self_attention.to_key", "self_attention.to_value", "self_attention.out_layer", "cross_attention.to_query", "cross_attention.to_key", "cross_attention.to_value", "cross_attention.out_layer", "feed_forward.in_layer", "feed_forward.out_layer" ] }