-
Katharina Hafner authoredKatharina Hafner authored
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
layers.py 1.48 KiB
import tensorflow as tf
def general_conv2d(inputconv, o_d=64, f_h=7, f_w=7, s_h=1, s_w=1, padding="VALID", name="conv2d",
do_norm=True, do_relu=True, relufactor=0):
with tf.variable_scope(name):
conv = tf.contrib.layers.conv2d(inputconv, o_d, f_w, s_w, padding, activation_fn=None)
if do_norm:
# conv = instance_norm(conv)
conv = tf.contrib.layers.batch_norm(conv, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True, scope="batch_norm")
if do_relu:
if (relufactor == 0):
conv = tf.nn.relu(conv, "relu")
else:
conv = tf.nn.leaky_relu(conv, relufactor, 'lrelu')
# lrelu(conv, relufactor, "lrelu")
return conv
def general_deconv2d(inputconv, outshape, o_d=64, f_h=7, f_w=7, s_h=1, s_w=1, padding="VALID",
name="deconv2d", do_norm=True, do_relu=True, relufactor=0):
with tf.variable_scope(name):
conv = tf.contrib.layers.conv2d_transpose(inputconv, o_d, [f_h, f_w], [s_h, s_w], padding, activation_fn=None)
if do_norm:
# conv = instance_norm(conv)
conv = tf.contrib.layers.batch_norm(conv, decay=0.9, updates_collections=None, epsilon=1e-5, scale=True, scope="batch_norm")
if do_relu:
if (relufactor == 0):
conv = tf.nn.relu(conv, "relu")
else:
conv = tf.nn.leaky_relu(conv, relufactor, "lrelu")
return conv