hdallatorre commited on
Commit
d65529d
1 Parent(s): 2744ba7

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -1
config.json CHANGED
@@ -1,6 +1,13 @@
1
  {
2
  "add_bias_fnn": false,
 
 
 
3
  "attention_probs_dropout_prob": 0.0,
 
 
 
 
4
  "emb_layer_norm_before": false,
5
  "esmfold_config": null,
6
  "hidden_dropout_prob": 0.0,
@@ -11,7 +18,6 @@
11
  "layer_norm_eps": 1e-12,
12
  "mask_token_id": 2,
13
  "max_position_embeddings": 2050,
14
- "model_type": "esm",
15
  "num_attention_heads": 16,
16
  "num_hidden_layers": 12,
17
  "pad_token_id": 1,
 
1
  {
2
  "add_bias_fnn": false,
3
+ "architectures": [
4
+ "EsmForMaskedLM"
5
+ ],
6
  "attention_probs_dropout_prob": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "esm_config.EsmConfig",
9
+ "AutoModelForMaskedLM": "modeling_esm.EsmForMaskedLM"
10
+ },
11
  "emb_layer_norm_before": false,
12
  "esmfold_config": null,
13
  "hidden_dropout_prob": 0.0,
 
18
  "layer_norm_eps": 1e-12,
19
  "mask_token_id": 2,
20
  "max_position_embeddings": 2050,
 
21
  "num_attention_heads": 16,
22
  "num_hidden_layers": 12,
23
  "pad_token_id": 1,