Skip to content

Commit a8feec0

Browse files
committed
Merge branch 'master' of https://github.com/ddbourgin/numpy-ml into master
2 parents d8ee5ce + 741954b commit a8feec0

File tree

2 files changed

+13
-11
lines changed

2 files changed

+13
-11
lines changed

numpy_ml/neural_nets/initializers/initializers.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -122,13 +122,14 @@ def init_from_dict(self):
122122
raise ValueError("Must have `hyperparameters` key: {}".format(S))
123123

124124
if sc and sc["id"] == "ConstantScheduler":
125-
scheduler = ConstantScheduler().set_params(sc)
125+
scheduler = ConstantScheduler()
126126
elif sc and sc["id"] == "ExponentialScheduler":
127-
scheduler = ExponentialScheduler().set_params(sc)
127+
scheduler = ExponentialScheduler()
128128
elif sc and sc["id"] == "NoamScheduler":
129-
scheduler = NoamScheduler().set_params(sc)
129+
scheduler = NoamScheduler()
130130
elif sc:
131131
raise NotImplementedError("{}".format(sc["id"]))
132+
scheduler.set_params(sc)
132133
return scheduler
133134

134135

@@ -182,15 +183,16 @@ def init_from_dict(self):
182183
raise ValueError("Must have `hyperparemeters` key: {}".format(O))
183184

184185
if op and op["id"] == "SGD":
185-
optimizer = SGD().set_params(op, cc)
186+
optimizer = SGD()
186187
elif op and op["id"] == "RMSProp":
187-
optimizer = RMSProp().set_params(op, cc)
188+
optimizer = RMSProp()
188189
elif op and op["id"] == "AdaGrad":
189-
optimizer = AdaGrad().set_params(op, cc)
190+
optimizer = AdaGrad()
190191
elif op and op["id"] == "Adam":
191-
optimizer = Adam().set_params(op, cc)
192+
optimizer = Adam()
192193
elif op:
193194
raise NotImplementedError("{}".format(op["id"]))
195+
optimizer.set_params(op, cc)
194196
return optimizer
195197

196198

numpy_ml/neural_nets/layers/layers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,12 +119,12 @@ def set_params(self, summary_dict):
119119
if k in self.hyperparameters:
120120
if k == "act_fn":
121121
layer.act_fn = ActivationInitializer(v)()
122-
if k == "optimizer":
122+
elif k == "optimizer":
123123
layer.optimizer = OptimizerInitializer(sd[k])()
124-
if k not in ["wrappers", "optimizer"]:
125-
setattr(layer, k, v)
126-
if k == "wrappers":
124+
elif k == "wrappers":
127125
layer = init_wrappers(layer, sd[k])
126+
elif k not in ["wrappers", "optimizer"]:
127+
setattr(layer, k, v)
128128
return layer
129129

130130
def summary(self):

0 commit comments

Comments
 (0)