Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Args:
x: The 4D input vector.
out_channels: Number of features in the output layer.
name: The variable scope name for the block.
downsample: If True, downsample the spatial size the input tensor by
a factor of 2 on each side. If False, the spatial size of the
input tensor is unchanged.
act: The activation function used in the block.
Returns:
A `Tensor` representing the output of the operation.
"""
with tf.compat.v1.variable_scope(name):
input_channels = x.shape.as_list()[-1]
x_0 = x
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv1')
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv2')
if downsample:
x = dsample(x)
if downsample or input_channels != out_channels:
x_0 = ops.snconv2d(x_0, out_channels, 1, 1, 1, 1, name='sn_conv3')
if downsample:
x_0 = dsample(x_0)
return x_0 + x
update_collection = self._get_update_collection(is_training)
with tf.variable_scope("g_resblock_"+str(idx), reuse=tf.AUTO_REUSE):
h = self._cbn(x, y, is_training, scope='g_resblock_cbn_1')
h = tf.nn.relu(h)
if resize:
h = upscale(h, 2)
h = snconv2d(h, n_ch, name='g_resblock_conv_1', update_collection=update_collection)
h = self._cbn(h, y, is_training, scope='g_resblock_cbn_2')
h = tf.nn.relu(h)
h = snconv2d(h, n_ch, name='g_resblock_conv_2', update_collection=update_collection)
if resize:
sc = upscale(x, 2)
else:
sc = x
sc = snconv2d(sc, n_ch, k_h=1, k_w=1, name='g_resblock_conv_sc', update_collection=update_collection)
return h + sc
Returns:
A `Tensor` representing the output of the operation.
"""
with tf.compat.v1.variable_scope(name):
labels_onehot = tf.one_hot(labels, num_classes)
x_0 = x
x = tf.nn.relu(tfgan.tpu.batch_norm(x, training, labels_onehot,
name='cbn_0'))
x = usample(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, training, 'snconv1')
x = tf.nn.relu(tfgan.tpu.batch_norm(x, training, labels_onehot,
name='cbn_1'))
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, training, 'snconv2')
x_0 = usample(x_0)
x_0 = ops.snconv2d(x_0, out_channels, 1, 1, 1, 1, training, 'snconv3')
return x_0 + x
'g_block4',
training) # 64
act5 = block(
act4,
target_class,
gf_dim,
num_classes,
'g_block5',
training) # 128
act5 = tf.nn.relu(
tfgan.tpu.batch_norm(
act5,
training,
conditional_class_labels=None,
name='g_bn'))
act6 = ops.snconv2d(act5, 3, 3, 3, 1, 1, training, 'g_snconv_last')
out = tf.nn.tanh(act6)
return out, attn_map
Args:
x: The 4D input vector.
out_channels: Number of features in the output layer.
name: The variable scope name for the block.
act: The activation function used in the block.
Returns:
A `Tensor` representing the output of the operation.
"""
with tf.compat.v1.variable_scope(name):
x_0 = x
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv1')
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv2')
x = dsample(x)
x_0 = dsample(x_0)
x_0 = ops.snconv2d(x_0, out_channels, 1, 1, 1, 1, name='sn_conv3')
return x + x_0
def _g_residual_block(self, x, y, n_ch, idx, is_training, resize=True):
update_collection = self._get_update_collection(is_training)
with tf.variable_scope("g_resblock_"+str(idx), reuse=tf.AUTO_REUSE):
h = self._cbn(x, y, is_training, scope='g_resblock_cbn_1')
h = tf.nn.relu(h)
if resize:
h = upscale(h, 2)
h = snconv2d(h, n_ch, name='g_resblock_conv_1', update_collection=update_collection)
h = self._cbn(h, y, is_training, scope='g_resblock_cbn_2')
h = tf.nn.relu(h)
h = snconv2d(h, n_ch, name='g_resblock_conv_2', update_collection=update_collection)
if resize:
sc = upscale(x, 2)
else:
sc = x
sc = snconv2d(sc, n_ch, k_h=1, k_w=1, name='g_resblock_conv_sc', update_collection=update_collection)
return h + sc
"""Builds optimized residual blocks for downsampling.
Compared with block, optimized_block always downsamples the spatial resolution
by a factor of 2 on each side.
Args:
x: The 4D input vector.
out_channels: Number of features in the output layer.
name: The variable scope name for the block.
act: The activation function used in the block.
Returns:
A `Tensor` representing the output of the operation.
"""
with tf.compat.v1.variable_scope(name):
x_0 = x
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv1')
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv2')
x = dsample(x)
x_0 = dsample(x_0)
x_0 = ops.snconv2d(x_0, out_channels, 1, 1, 1, 1, name='sn_conv3')
return x + x_0
input tensor is unchanged.
act: The activation function used in the block.
Returns:
A `Tensor` representing the output of the operation.
"""
with tf.compat.v1.variable_scope(name):
input_channels = x.shape.as_list()[-1]
x_0 = x
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv1')
x = act(x)
x = ops.snconv2d(x, out_channels, 3, 3, 1, 1, name='sn_conv2')
if downsample:
x = dsample(x)
if downsample or input_channels != out_channels:
x_0 = ops.snconv2d(x_0, out_channels, 1, 1, 1, 1, name='sn_conv3')
if downsample:
x_0 = dsample(x_0)
return x_0 + x
def _g_residual_block(self, x, y, n_ch, idx, is_training, resize=True):
update_collection = self._get_update_collection(is_training)
with tf.variable_scope("g_resblock_"+str(idx), reuse=tf.AUTO_REUSE):
h = self._cbn(x, y, is_training, scope='g_resblock_cbn_1')
h = tf.nn.relu(h)
if resize:
h = upscale(h, 2)
h = snconv2d(h, n_ch, name='g_resblock_conv_1', update_collection=update_collection)
h = self._cbn(h, y, is_training, scope='g_resblock_cbn_2')
h = tf.nn.relu(h)
h = snconv2d(h, n_ch, name='g_resblock_conv_2', update_collection=update_collection)
if resize:
sc = upscale(x, 2)
else:
sc = x
sc = snconv2d(sc, n_ch, k_h=1, k_w=1, name='g_resblock_conv_sc', update_collection=update_collection)
return h + sc