imdatta0 commited on
Commit
3071e51
1 Parent(s): 04ac539

Training in progress, step 13

Browse files
adapter_config.json CHANGED
@@ -51,7 +51,7 @@
51
  40
52
  ],
53
  "loftq_config": {},
54
- "lora_alpha": 8,
55
  "lora_dropout": 0,
56
  "megatron_config": null,
57
  "megatron_core": "megatron.core",
@@ -61,13 +61,13 @@
61
  "rank_pattern": {},
62
  "revision": "unsloth",
63
  "target_modules": [
64
- "down_proj",
65
- "gate_proj",
66
- "q_proj",
67
- "k_proj",
68
  "o_proj",
69
  "v_proj",
70
- "up_proj"
 
 
 
 
71
  ],
72
  "task_type": "CAUSAL_LM"
73
  }
 
51
  40
52
  ],
53
  "loftq_config": {},
54
+ "lora_alpha": 16,
55
  "lora_dropout": 0,
56
  "megatron_config": null,
57
  "megatron_core": "megatron.core",
 
61
  "rank_pattern": {},
62
  "revision": "unsloth",
63
  "target_modules": [
 
 
 
 
64
  "o_proj",
65
  "v_proj",
66
+ "q_proj",
67
+ "up_proj",
68
+ "down_proj",
69
+ "k_proj",
70
+ "gate_proj"
71
  ],
72
  "task_type": "CAUSAL_LM"
73
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:43ff287073468fb2c296434e7be50a5e5f1208dfd0bb41e30204477cb2122763
3
  size 83945296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5fcefc886be992d30a3ab573a086937d299cb8a3d0909f9d7367cda6cb5c7a
3
  size 83945296
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:89fd87d8af6d2616f774850f2f449c533b5101818db86fea737ee9882810d915
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:382d0bb02fa7ebb5ad45632e2ba9cc22d77ff97e80e913e774b925d6b8a2421d
3
  size 5112