fix: fix num_ln_in_parallel_attn attribute name typo in RWConfig (#2350)
Co-authored-by: Islam Almersawi <islam.almersawi@openinnovation.ai>
This commit is contained in:
parent
133015f408
commit
8094ecfc9e
|
@ -94,7 +94,7 @@ class RWConfig(PretrainedConfig):
|
|||
else kwargs.pop("n_head", 8)
|
||||
)
|
||||
self.layer_norm_epsilon = layer_norm_epsilon
|
||||
self.num_ln_in_parallel_attention = num_ln_in_prallel_attention
|
||||
self.num_ln_in_parallel_attn = num_ln_in_prallel_attention
|
||||
self.initializer_range = initializer_range
|
||||
self.use_cache = use_cache
|
||||
self.hidden_dropout = hidden_dropout
|
||||
|
|
Loading…
Reference in New Issue