Fix Linear Layer Bias Initialization

This commit is contained in:
XxAlonexX 2025-02-04 10:38:45 +05:30
parent b5d872ead0
commit 6a30b43249

View File

@ -185,7 +185,7 @@ class Linear(nn.Module):
else:
self.register_parameter("scale", None)
if bias:
self.bias = nn.Parameter(torch.empty(self.part_out_features))
self.bias = nn.Parameter(torch.empty(out_features))
else:
self.register_parameter("bias", None)