fix: dont pass hidden_size twice to NeobertMLP
Browse files
model.py
CHANGED
|
@@ -139,7 +139,7 @@ class EncoderBlock(nn.Module):
|
|
| 139 |
if XFORMERS_AVAILABLE:
|
| 140 |
self.ffn = SwiGLU(config.hidden_size, intermediate_size, config.hidden_size, bias=False)
|
| 141 |
else:
|
| 142 |
-
self.ffn = NeobertMLP(config.hidden_size, intermediate_size,
|
| 143 |
|
| 144 |
# Layer norms
|
| 145 |
self.attention_norm = nn.RMSNorm(config.hidden_size, config.norm_eps)
|
|
|
|
| 139 |
if XFORMERS_AVAILABLE:
|
| 140 |
self.ffn = SwiGLU(config.hidden_size, intermediate_size, config.hidden_size, bias=False)
|
| 141 |
else:
|
| 142 |
+
self.ffn = NeobertMLP(config.hidden_size, intermediate_size, bias=False)
|
| 143 |
|
| 144 |
# Layer norms
|
| 145 |
self.attention_norm = nn.RMSNorm(config.hidden_size, config.norm_eps)
|