Anna commited on
Commit
57225a4
·
verified ·
1 Parent(s): 1a7c801

Upload config_lora.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config_lora.json +16 -0
config_lora.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "r": 128,
3
+ "lora_alpha": 128,
4
+ "target_modules": [
5
+ "self_attention.to_query",
6
+ "self_attention.to_key",
7
+ "self_attention.to_value",
8
+ "self_attention.out_layer",
9
+ "cross_attention.to_query",
10
+ "cross_attention.to_key",
11
+ "cross_attention.to_value",
12
+ "cross_attention.out_layer",
13
+ "feed_forward.in_layer",
14
+ "feed_forward.out_layer"
15
+ ]
16
+ }