fix: use the base layers weight in mistral rocm (#2155)

This commit is contained in:
drbh 2024-07-02 05:56:25 -04:00 committed by GitHub
parent 5d97e0c4a3
commit b966bc0d35
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 3 additions and 1 deletions

View File

@ -315,7 +315,9 @@ class MistralMLP(nn.Module):
dtype=hidden_states.dtype, dtype=hidden_states.dtype,
device="cuda", device="cuda",
) )
_custom_C.LLMM_Silu(self.gate_up_proj.linear.weight, hidden_states, out, 8) _custom_C.LLMM_Silu(
self.gate_up_proj.base_layer.linear.weight, hidden_states, out, 8
)
return self.down_proj(out, adapter_data) return self.down_proj(out, adapter_data)
else: else:
gate_up_states = self.gate_up_proj(hidden_states, adapter_data) gate_up_states = self.gate_up_proj(hidden_states, adapter_data)