davda54 commited on
Commit
042ceea
·
verified ·
1 Parent(s): 30fd0e7

Update modeling_gptbert.py

Browse files
Files changed (1) hide show
  1. modeling_gptbert.py +8 -4
modeling_gptbert.py CHANGED
@@ -844,7 +844,8 @@ class GptBertForCausalLM(GptBertModel):
844
 
845
 
846
  class GptBertForSequenceClassification(GptBertModel):
847
- _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab"]
 
848
 
849
  def __init__(self, config: GptBertConfig, **kwargs):
850
  super().__init__(config, add_mlm_layer=False, **kwargs)
@@ -905,7 +906,8 @@ class GptBertForSequenceClassification(GptBertModel):
905
 
906
 
907
  class GptBertForTokenClassification(GptBertModel):
908
- _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab"]
 
909
 
910
  def __init__(self, config: GptBertConfig, **kwargs):
911
  super().__init__(config, add_mlm_layer=False, **kwargs)
@@ -950,7 +952,8 @@ class GptBertForTokenClassification(GptBertModel):
950
 
951
 
952
  class GptBertForQuestionAnswering(GptBertModel):
953
- _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab"]
 
954
 
955
  def __init__(self, config: GptBertConfig, **kwargs):
956
  super().__init__(config, add_mlm_layer=False, **kwargs)
@@ -1013,7 +1016,8 @@ class GptBertForQuestionAnswering(GptBertModel):
1013
 
1014
 
1015
  class GptBertForMultipleChoice(GptBertModel):
1016
- _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab"]
 
1017
 
1018
  def __init__(self, config: GptBertConfig, **kwargs):
1019
  super().__init__(config, add_mlm_layer=False, **kwargs)
 
844
 
845
 
846
  class GptBertForSequenceClassification(GptBertModel):
847
+ _keys_to_ignore_on_load_missing = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
848
+ _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
849
 
850
  def __init__(self, config: GptBertConfig, **kwargs):
851
  super().__init__(config, add_mlm_layer=False, **kwargs)
 
906
 
907
 
908
  class GptBertForTokenClassification(GptBertModel):
909
+ _keys_to_ignore_on_load_missing = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
910
+ _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
911
 
912
  def __init__(self, config: GptBertConfig, **kwargs):
913
  super().__init__(config, add_mlm_layer=False, **kwargs)
 
952
 
953
 
954
  class GptBertForQuestionAnswering(GptBertModel):
955
+ _keys_to_ignore_on_load_missing = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
956
+ _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
957
 
958
  def __init__(self, config: GptBertConfig, **kwargs):
959
  super().__init__(config, add_mlm_layer=False, **kwargs)
 
1016
 
1017
 
1018
  class GptBertForMultipleChoice(GptBertModel):
1019
+ _keys_to_ignore_on_load_missing = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
1020
+ _keys_to_ignore_on_load_unexpected = ["classifier.emb2vocab.weight", "classifier.emb2vocab.bias"]
1021
 
1022
  def __init__(self, config: GptBertConfig, **kwargs):
1023
  super().__init__(config, add_mlm_layer=False, **kwargs)