yunjey

name scope added

Showing 1 changed file with 2 additions and 0 deletions
...@@ -125,6 +125,7 @@ class DTN(object): ...@@ -125,6 +125,7 @@ class DTN(object):
125 f_vars = [var for var in t_vars if 'content_extractor' in var.name] 125 f_vars = [var for var in t_vars if 'content_extractor' in var.name]
126 126
127 # train op 127 # train op
128 + with tf.name_scope('source_train_op'):
128 self.d_train_op_src = slim.learning.create_train_op(self.d_loss_src, self.d_optimizer_src, variables_to_train=d_vars) 129 self.d_train_op_src = slim.learning.create_train_op(self.d_loss_src, self.d_optimizer_src, variables_to_train=d_vars)
129 self.g_train_op_src = slim.learning.create_train_op(self.g_loss_src, self.g_optimizer_src, variables_to_train=g_vars) 130 self.g_train_op_src = slim.learning.create_train_op(self.g_loss_src, self.g_optimizer_src, variables_to_train=g_vars)
130 self.f_train_op_src = slim.learning.create_train_op(self.f_loss_src, self.f_optimizer_src, variables_to_train=f_vars) 131 self.f_train_op_src = slim.learning.create_train_op(self.f_loss_src, self.f_optimizer_src, variables_to_train=f_vars)
...@@ -158,6 +159,7 @@ class DTN(object): ...@@ -158,6 +159,7 @@ class DTN(object):
158 self.g_optimizer_trg = tf.train.AdamOptimizer(self.learning_rate) 159 self.g_optimizer_trg = tf.train.AdamOptimizer(self.learning_rate)
159 160
160 # train op 161 # train op
162 + with tf.name_scope('target_train_op'):
161 self.d_train_op_trg = slim.learning.create_train_op(self.d_loss_trg, self.d_optimizer_trg, variables_to_train=d_vars) 163 self.d_train_op_trg = slim.learning.create_train_op(self.d_loss_trg, self.d_optimizer_trg, variables_to_train=d_vars)
162 self.g_train_op_trg = slim.learning.create_train_op(self.g_loss_trg, self.g_optimizer_trg, variables_to_train=g_vars) 164 self.g_train_op_trg = slim.learning.create_train_op(self.g_loss_trg, self.g_optimizer_trg, variables_to_train=g_vars)
163 165
......