Fix merge
This commit is contained in:
parent
91bb35b1e6
commit
b6a8bb123b
1 changed files with 2 additions and 2 deletions
|
@ -60,7 +60,7 @@ class HypernetworkModule(torch.nn.Module):
|
|||
linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
|
||||
|
||||
# Add dropout except last layer
|
||||
if use_dropout and i < len(layer_structure) - 2:
|
||||
if use_dropout and i < len(layer_structure) - 3:
|
||||
linears.append(torch.nn.Dropout(p=0.3))
|
||||
|
||||
self.linear = torch.nn.Sequential(*linears)
|
||||
|
@ -126,7 +126,7 @@ class Hypernetwork:
|
|||
filename = None
|
||||
name = None
|
||||
|
||||
def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False)
|
||||
def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False):
|
||||
self.filename = None
|
||||
self.name = name
|
||||
self.layers = {}
|
||||
|
|
Loading…
Reference in a new issue