TheTempleofTwo commited on
Commit
b9ab862
·
verified ·
1 Parent(s): 6291bdc

Upload adapter_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. adapter_config.json +10 -0
adapter_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "lora_layers": 8,
3
+ "num_layers": 28,
4
+ "lora_parameters": {
5
+ "rank": 16,
6
+ "scale": 2.0,
7
+ "dropout": 0.05,
8
+ "keys": ["self_attn.q_proj", "self_attn.v_proj"]
9
+ }
10
+ }