Skip to content
Snippets Groups Projects
Commit e510fa1f authored by JGlombitza's avatar JGlombitza
Browse files

remove pydot depndency

parent b29c3bb7
No related branches found
No related tags found
No related merge requests found
File added
......@@ -80,9 +80,9 @@ def build_critic():
return critic
generator = build_generator(latent_size)
plot_model(generator, to_file=log_dir + '/generator.png', show_shapes=True)
#plot_model(generator, to_file=log_dir + '/generator.png', show_shapes=True)
critic = build_critic()
plot_model(critic, to_file=log_dir + '/critic.png', show_shapes=True)
#plot_model(critic, to_file=log_dir + '/critic.png', show_shapes=True)
# make trainings model for generator
utils.make_trainable(critic, False)
......@@ -93,7 +93,7 @@ generator_out = generator(generator_in)
critic_out = critic(generator_out)
generator_training = Model(inputs=generator_in, outputs=critic_out)
generator_training.compile(optimizer=Adam(0.0001, beta_1=0.5, beta_2=0.9, decay=0.0), loss=[utils.wasserstein_loss])
plot_model(generator_training, to_file=log_dir + '/generator_training.png', show_shapes=True)
#plot_model(generator_training, to_file=log_dir + '/generator_training.png', show_shapes=True)
# make trainings model for critic
utils.make_trainable(critic, True)
......@@ -110,7 +110,7 @@ critic_training = Model(inputs=[generator_in_critic_training, shower_in_critic_t
gradient_penalty = partial(utils.gradient_penalty_loss, averaged_batch=averaged_batch, penalty_weight=GRADIENT_PENALTY_WEIGHT)
gradient_penalty.__name__ = 'gradient_penalty'
critic_training.compile(optimizer=Adam(0.0001, beta_1=0.5, beta_2=0.9, decay=0.0), loss=[utils.wasserstein_loss, utils.wasserstein_loss, gradient_penalty])
plot_model(critic_training, to_file=log_dir + '/critic_training.png', show_shapes=True)
#plot_model(critic_training, to_file=log_dir + '/critic_training.png', show_shapes=True)
# For Wassersteinloss
positive_y = np.ones(BATCH_SIZE)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment