BlackSamorez commited on
Commit
ad9c9da
1 Parent(s): 36753db

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +39 -6
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -1,18 +1,51 @@
1
  {
2
- "base_model_name_or_path": "decapoda-research/llama-7b-hf",
3
  "bias": "none",
4
  "enable_lora": null,
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
- "lora_alpha": 16,
8
  "lora_dropout": 0.05,
9
  "merge_weights": false,
10
  "modules_to_save": null,
11
  "peft_type": "LORA",
12
- "r": 8,
13
  "target_modules": [
14
- "q_proj",
15
- "v_proj"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  ],
17
- "task_type": "CAUSAL_LM"
18
  }
 
1
  {
2
+ "base_model_name_or_path": "IlyaGusev/llama_7b_ru_turbo_alpaca_lora_merged",
3
  "bias": "none",
4
  "enable_lora": null,
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
+ "lora_alpha": 32,
8
  "lora_dropout": 0.05,
9
  "merge_weights": false,
10
  "modules_to_save": null,
11
  "peft_type": "LORA",
12
+ "r": 32,
13
  "target_modules": [
14
+ "model.layers.27.self_attn.q_proj",
15
+ "model.layers.27.self_attn.k_proj",
16
+ "model.layers.27.self_attn.v_proj",
17
+ "model.layers.27.self_attn.o_proj",
18
+ "model.layers.27.mlp.gate_proj",
19
+ "model.layers.27.mlp.down_proj",
20
+ "model.layers.27.mlp.up_proj",
21
+ "model.layers.28.self_attn.q_proj",
22
+ "model.layers.28.self_attn.k_proj",
23
+ "model.layers.28.self_attn.v_proj",
24
+ "model.layers.28.self_attn.o_proj",
25
+ "model.layers.28.mlp.gate_proj",
26
+ "model.layers.28.mlp.down_proj",
27
+ "model.layers.28.mlp.up_proj",
28
+ "model.layers.29.self_attn.q_proj",
29
+ "model.layers.29.self_attn.k_proj",
30
+ "model.layers.29.self_attn.v_proj",
31
+ "model.layers.29.self_attn.o_proj",
32
+ "model.layers.29.mlp.gate_proj",
33
+ "model.layers.29.mlp.down_proj",
34
+ "model.layers.29.mlp.up_proj",
35
+ "model.layers.30.self_attn.q_proj",
36
+ "model.layers.30.self_attn.k_proj",
37
+ "model.layers.30.self_attn.v_proj",
38
+ "model.layers.30.self_attn.o_proj",
39
+ "model.layers.30.mlp.gate_proj",
40
+ "model.layers.30.mlp.down_proj",
41
+ "model.layers.30.mlp.up_proj",
42
+ "model.layers.31.self_attn.q_proj",
43
+ "model.layers.31.self_attn.k_proj",
44
+ "model.layers.31.self_attn.v_proj",
45
+ "model.layers.31.self_attn.o_proj",
46
+ "model.layers.31.mlp.gate_proj",
47
+ "model.layers.31.mlp.down_proj",
48
+ "model.layers.31.mlp.up_proj"
49
  ],
50
+ "task_type": null
51
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3bc8f8d27f5f3ae3f1fb1dddbf9fb06882d494a609b555cba03d54aca16a8aa6
3
- size 16822989
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11610b76aba2b0e071d9f39e0dac591743771ebd412de0b75860045d2027e797
3
+ size 49996163