Skip to content

Commit 591b935

Browse files
All activation functions are set to tanh, except for the output layer of the generator network (relu)
1 parent 370f03f commit 591b935

File tree

1 file changed

+8
-3
lines changed

1 file changed

+8
-3
lines changed

script.js

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ const generator = {
4949
);
5050
}
5151
),
52-
"optimizer": tf.train.adam(0.0001)
52+
"optimizer": tf.train.adam(0.001)
5353
};
5454

5555
if (logData) {
@@ -59,7 +59,12 @@ if (logData) {
5959
generator.model.add(tf.layers.dense({units: numParameters, inputShape: [numParameters]}));
6060
for (var i = 0; i < numLayers; i ++) {
6161
const layerSize = Math.round(imageVolume / (2 ** ((numLayers - 1) - i)));
62-
generator.model.add(tf.layers.dense({units: layerSize, activation: "sigmoid"}));
62+
if (i == numLayers - 1) {
63+
generator.model.add(tf.layers.dense({units: layerSize, activation: "relu"}));
64+
}
65+
else {
66+
generator.model.add(tf.layers.dense({units: layerSize, activation: "tanh"}));
67+
}
6368
if (logData) {
6469
console.log(layerSize);
6570
}
@@ -90,7 +95,7 @@ if (logData) {
9095
discriminator.model.add(tf.layers.dense({units: imageVolume, inputShape: [imageVolume]}));
9196
for (var i = 0; i < numLayers; i ++) {
9297
const layerSize = Math.round(imageVolume / (2 ** (i + 1)));
93-
discriminator.model.add(tf.layers.dense({units: layerSize, activation: "sigmoid"}));
98+
discriminator.model.add(tf.layers.dense({units: layerSize, activation: "relu"}));
9499
if (logData) {
95100
console.log(layerSize);
96101
}

0 commit comments

Comments
 (0)