-
Notifications
You must be signed in to change notification settings - Fork 3
/
discriminator.py
80 lines (64 loc) · 2.99 KB
/
discriminator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import tensorflow as tf
class Discriminator(object):
def __init__(self):
self.dis_inter_layer_dim = config.dis_inter_layer_dim
self.init_kernel = config.init_kernel
def discriminator_network(self, x_inp, is_training=False, getter=None, reuse=False):
""" Discriminator architecture in tensorflow
Discriminates between real data and generated data
Args:
x_inp (tensor): input data for the encoder. [batch_size x sequnce_length x hidden_size]
reuse (bool): sharing variables or not
Returns:
logits (tensor): last activation layer of the discriminator (shape 1)
intermediate_layer (tensor): intermediate layer for feature matching
"""
batch_size = x_inp.shape[0]
x_inp = tf.reshape(x_inp,[batch_size, -1])
with tf.variable_scope('discriminator', reuse=reuse, custom_getter=getter):
name_net = 'layer_1'
with tf.variable_scope(name_net):
net = tf.layers.dense(x_inp,
units=256,
kernel_initializer=init_kernel,
name='fc')
net = leakyReLu(net)
net = tf.layers.dropout(net, rate=0.2, name='dropout',
training=is_training)
name_net = 'layer_2'
with tf.variable_scope(name_net):
net = tf.layers.dense(net,
units=128,
kernel_initializer=init_kernel,
name='fc')
net = leakyReLu(net)
net = tf.layers.dropout(net, rate=0.2, name='dropout',
training=is_training)
name_net = 'layer_3'
with tf.variable_scope(name_net):
net = tf.layers.dense(net,
units=dis_inter_layer_dim,
kernel_initializer=init_kernel,
name='fc')
net = leakyReLu(net)
net = tf.layers.dropout(net,
rate=0.2,
name='dropout',
training=is_training)
intermediate_layer = net
name_net = 'layer_4'
with tf.variable_scope(name_net):
net = tf.layers.dense(net,
units=1,
kernel_initializer=init_kernel,
name='fc')
net = tf.squeeze(net)
return net, intermediate_layer
def leakyReLu(x, alpha=0.1, name=None):
if name:
with tf.variable_scope(name):
return _leakyReLu_impl(x, alpha)
else:
return _leakyReLu_impl(x, alpha)
def _leakyReLu_impl(x, alpha):
return tf.nn.relu(x) - (alpha * tf.nn.relu(-x))