Skip to content

Commit 36a7283

Browse files
committed
add CoulombGAN / gradient history example
1 parent f38bc77 commit 36a7283

File tree

2 files changed

+27
-1
lines changed

2 files changed

+27
-1
lines changed

config.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from models import *
22

33

4-
model_zoo = ['DCGAN', 'LSGAN', 'WGAN', 'WGAN-GP', 'EBGAN', 'BEGAN', 'DRAGAN']
4+
model_zoo = ['DCGAN', 'LSGAN', 'WGAN', 'WGAN-GP', 'EBGAN', 'BEGAN', 'DRAGAN', 'CoulombGAN']
55

66
def get_model(mtype, name, training):
77
model = None

utils.py

+26
Original file line numberDiff line numberDiff line change
@@ -142,3 +142,29 @@ def merge(images, size):
142142
return img
143143
else:
144144
raise ValueError('in merge(images,size) images parameter must have dimensions: HxW or HxWx3 or HxWx4')
145+
146+
147+
'''Sugar for gradients histograms
148+
# D_train_op = tf.train.AdamOptimizer(learning_rate=self.D_lr, beta1=self.beta1, beta2=self.beta2).\
149+
# minimize(D_loss, var_list=D_vars)
150+
D_opt = tf.train.AdamOptimizer(learning_rate=self.D_lr, beta1=self.beta1, beta2=self.beta2)
151+
D_grads = tf.gradients(D_loss, D_vars)
152+
D_grads_and_vars = list(zip(D_grads, D_vars))
153+
D_train_op = D_opt.apply_gradients(grads_and_vars=D_grads_and_vars)
154+
155+
# G_train_op = tf.train.AdamOptimizer(learning_rate=self.G_lr, beta1=self.beta1, beta2=self.beta2).\
156+
# minimize(G_loss, var_list=G_vars, global_step=global_step)
157+
G_opt = tf.train.AdamOptimizer(learning_rate=self.G_lr, beta1=self.beta1, beta2=self.beta2)
158+
G_grads = tf.gradients(G_loss, G_vars)
159+
G_grads_and_vars = list(zip(G_grads, G_vars))
160+
G_train_op = G_opt.apply_gradients(grads_and_vars=G_grads_and_vars, global_step=global_step)
161+
162+
163+
for var in tf.trainable_variables():
164+
tf.summary.histogram(var.op.name, var)
165+
166+
for grad, var in D_grads_and_vars:
167+
tf.summary.histogram('D/' + var.name + '/gradient', grad)
168+
for grad, var in G_grads_and_vars:
169+
tf.summary.histogram('G/' + var.name + '/gradient', grad)
170+
'''

0 commit comments

Comments
 (0)