Fix off-by-one

This commit is contained in:
guaneec 2022-10-26 13:43:04 +08:00 committed by GitHub
parent 2f4c91894d
commit c702d4d0df
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -42,7 +42,7 @@ class HypernetworkModule(torch.nn.Module):
linears.append(torch.nn.Linear(int(dim * layer_structure[i]), int(dim * layer_structure[i+1]))) linears.append(torch.nn.Linear(int(dim * layer_structure[i]), int(dim * layer_structure[i+1])))
# Add an activation func except last layer # Add an activation func except last layer
if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 3: if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 2:
pass pass
elif activation_func in self.activation_dict: elif activation_func in self.activation_dict:
linears.append(self.activation_dict[activation_func]()) linears.append(self.activation_dict[activation_func]())
@ -54,7 +54,7 @@ class HypernetworkModule(torch.nn.Module):
linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1]))) linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
# Add dropout except last layer # Add dropout except last layer
if use_dropout and i < len(layer_structure) - 3: if use_dropout and i < len(layer_structure) - 2:
linears.append(torch.nn.Dropout(p=0.3)) linears.append(torch.nn.Dropout(p=0.3))
self.linear = torch.nn.Sequential(*linears) self.linear = torch.nn.Sequential(*linears)