text
stringlengths
0
4.99k
leaky_re_lu_21 (LeakyReLU) (None, None, 64) 0 add_8[0][0]
__________________________________________________________________________________________________
weight_normalization_21 (Weight (None, None, 32) 32865 leaky_re_lu_21[0][0]
__________________________________________________________________________________________________
leaky_re_lu_22 (LeakyReLU) (None, None, 32) 0 weight_normalization_21[0][0]
__________________________________________________________________________________________________
weight_normalization_22 (Weight (None, None, 32) 3137 leaky_re_lu_22[0][0]
__________________________________________________________________________________________________
leaky_re_lu_23 (LeakyReLU) (None, None, 32) 0 weight_normalization_22[0][0]
__________________________________________________________________________________________________
weight_normalization_23 (Weight (None, None, 32) 3137 leaky_re_lu_23[0][0]
__________________________________________________________________________________________________
add_9 (Add) (None, None, 32) 0 weight_normalization_23[0][0]
leaky_re_lu_22[0][0]
__________________________________________________________________________________________________
leaky_re_lu_24 (LeakyReLU) (None, None, 32) 0 add_9[0][0]
__________________________________________________________________________________________________
weight_normalization_24 (Weight (None, None, 32) 3137 leaky_re_lu_24[0][0]
__________________________________________________________________________________________________
leaky_re_lu_25 (LeakyReLU) (None, None, 32) 0 weight_normalization_24[0][0]
__________________________________________________________________________________________________
weight_normalization_25 (Weight (None, None, 32) 3137 leaky_re_lu_25[0][0]
__________________________________________________________________________________________________
add_10 (Add) (None, None, 32) 0 add_9[0][0]
weight_normalization_25[0][0]
__________________________________________________________________________________________________
leaky_re_lu_26 (LeakyReLU) (None, None, 32) 0 add_10[0][0]
__________________________________________________________________________________________________
weight_normalization_26 (Weight (None, None, 32) 3137 leaky_re_lu_26[0][0]
__________________________________________________________________________________________________
leaky_re_lu_27 (LeakyReLU) (None, None, 32) 0 weight_normalization_26[0][0]
__________________________________________________________________________________________________
weight_normalization_27 (Weight (None, None, 32) 3137 leaky_re_lu_27[0][0]
__________________________________________________________________________________________________
add_11 (Add) (None, None, 32) 0 weight_normalization_27[0][0]
add_10[0][0]
__________________________________________________________________________________________________
leaky_re_lu_28 (LeakyReLU) (None, None, 32) 0 add_11[0][0]
__________________________________________________________________________________________________
weight_normalization_28 (Weight (None, None, 1) 452 leaky_re_lu_28[0][0]
==================================================================================================
Total params: 4,646,912
Trainable params: 4,646,658
Non-trainable params: 254
__________________________________________________________________________________________________
Create the discriminator
def create_discriminator(input_shape):
inp = keras.Input(input_shape)
out_map1 = discriminator_block(inp)
pool1 = layers.AveragePooling1D()(inp)
out_map2 = discriminator_block(pool1)
pool2 = layers.AveragePooling1D()(pool1)
out_map3 = discriminator_block(pool2)
return keras.Model(inp, [out_map1, out_map2, out_map3])
# We use a dynamic input shape for the discriminator
# This is done because the input shape for the generator is unknown
discriminator = create_discriminator((None, 1))
discriminator.summary()
Model: \"model_1\"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, None, 1)] 0
__________________________________________________________________________________________________
average_pooling1d (AveragePooli (None, None, 1) 0 input_2[0][0]
__________________________________________________________________________________________________
average_pooling1d_1 (AveragePoo (None, None, 1) 0 average_pooling1d[0][0]
__________________________________________________________________________________________________
weight_normalization_29 (Weight (None, None, 16) 273 input_2[0][0]
__________________________________________________________________________________________________
weight_normalization_36 (Weight (None, None, 16) 273 average_pooling1d[0][0]
__________________________________________________________________________________________________
weight_normalization_43 (Weight (None, None, 16) 273 average_pooling1d_1[0][0]
__________________________________________________________________________________________________
leaky_re_lu_29 (LeakyReLU) (None, None, 16) 0 weight_normalization_29[0][0]
__________________________________________________________________________________________________
leaky_re_lu_35 (LeakyReLU) (None, None, 16) 0 weight_normalization_36[0][0]
__________________________________________________________________________________________________
leaky_re_lu_41 (LeakyReLU) (None, None, 16) 0 weight_normalization_43[0][0]
__________________________________________________________________________________________________
weight_normalization_30 (Weight (None, None, 64) 10625 leaky_re_lu_29[0][0]
__________________________________________________________________________________________________
weight_normalization_37 (Weight (None, None, 64) 10625 leaky_re_lu_35[0][0]
__________________________________________________________________________________________________
weight_normalization_44 (Weight (None, None, 64) 10625 leaky_re_lu_41[0][0]
__________________________________________________________________________________________________
leaky_re_lu_30 (LeakyReLU) (None, None, 64) 0 weight_normalization_30[0][0]
__________________________________________________________________________________________________
leaky_re_lu_36 (LeakyReLU) (None, None, 64) 0 weight_normalization_37[0][0]
__________________________________________________________________________________________________
leaky_re_lu_42 (LeakyReLU) (None, None, 64) 0 weight_normalization_44[0][0]
__________________________________________________________________________________________________
weight_normalization_31 (Weight (None, None, 256) 42497 leaky_re_lu_30[0][0]
__________________________________________________________________________________________________
weight_normalization_38 (Weight (None, None, 256) 42497 leaky_re_lu_36[0][0]
__________________________________________________________________________________________________
weight_normalization_45 (Weight (None, None, 256) 42497 leaky_re_lu_42[0][0]