김성주

fixed batch loop

...@@ -114,7 +114,7 @@ with tf.Session() as sess: ...@@ -114,7 +114,7 @@ with tf.Session() as sess:
114 loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter() 114 loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter()
115 115
116 ### train part 116 ### train part
117 - for i in trange(args.train_batch_num): 117 + for i in trange(args.train_batch_num+1):
118 _, __y_pred, __y_true, __loss, __global_step, __lr = sess.run( 118 _, __y_pred, __y_true, __loss, __global_step, __lr = sess.run(
119 [train_op, y_pred, y_true, loss, global_step, learning_rate], 119 [train_op, y_pred, y_true, loss, global_step, learning_rate],
120 feed_dict={is_training: True}) 120 feed_dict={is_training: True})
......
...@@ -1766,7 +1766,7 @@ ...@@ -1766,7 +1766,7 @@
1766 " loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter()\n", 1766 " loss_total, loss_xy, loss_wh, loss_conf, loss_class = AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter(), AverageMeter()\n",
1767 "\n", 1767 "\n",
1768 " ### train part\n", 1768 " ### train part\n",
1769 - " for i in trange(train_batch_num):\n", 1769 + " for i in trange(train_batch_num+1):\n",
1770 " _, __y_pred, __y_true, __loss, __global_step, __lr = sess.run(\n", 1770 " _, __y_pred, __y_true, __loss, __global_step, __lr = sess.run(\n",
1771 " [train_op, y_pred, y_true, loss, global_step, learning_rate],\n", 1771 " [train_op, y_pred, y_true, loss, global_step, learning_rate],\n",
1772 " feed_dict={is_training: True})\n", 1772 " feed_dict={is_training: True})\n",
......