eliebak HF staff commited on
Commit
d11feaf
1 Parent(s): e756100

Upload config

Browse files
Files changed (1) hide show
  1. config.json +1 -2
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "wsd_124M_150B_edu",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
- "GPT2Model"
6
  ],
7
  "attn_pdrop": 0,
8
  "bos_token_id": 50256,
@@ -25,7 +25,6 @@
25
  "summary_proj_to_labels": true,
26
  "summary_type": "cls_index",
27
  "summary_use_proj": true,
28
- "torch_dtype": "float32",
29
  "transformers_version": "4.41.0",
30
  "use_cache": true,
31
  "vocab_size": 50257
 
2
  "_name_or_path": "wsd_124M_150B_edu",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
+ "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0,
8
  "bos_token_id": 50256,
 
25
  "summary_proj_to_labels": true,
26
  "summary_type": "cls_index",
27
  "summary_use_proj": true,
 
28
  "transformers_version": "4.41.0",
29
  "use_cache": true,
30
  "vocab_size": 50257