fix: use the base layers weight in mistral rocm (#2155)
This commit is contained in:
parent
5d97e0c4a3
commit
b966bc0d35
|
@ -315,7 +315,9 @@ class MistralMLP(nn.Module):
|
||||||
dtype=hidden_states.dtype,
|
dtype=hidden_states.dtype,
|
||||||
device="cuda",
|
device="cuda",
|
||||||
)
|
)
|
||||||
_custom_C.LLMM_Silu(self.gate_up_proj.linear.weight, hidden_states, out, 8)
|
_custom_C.LLMM_Silu(
|
||||||
|
self.gate_up_proj.base_layer.linear.weight, hidden_states, out, 8
|
||||||
|
)
|
||||||
return self.down_proj(out, adapter_data)
|
return self.down_proj(out, adapter_data)
|
||||||
else:
|
else:
|
||||||
gate_up_states = self.gate_up_proj(hidden_states, adapter_data)
|
gate_up_states = self.gate_up_proj(hidden_states, adapter_data)
|
||||||
|
|
Loading…
Reference in New Issue