-
Notifications
You must be signed in to change notification settings - Fork 0
/
metrics.py
161 lines (128 loc) · 6.28 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
def masked_softmax_cross_entropy(preds, labels, mask):
"""Softmax cross-entropy loss with masking."""
loss = tf.nn.softmax_cross_entropy_with_logits(logits=preds, labels=labels)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
loss *= mask
return tf.reduce_mean(loss)
def masked_accuracy(preds, labels, mask):
"""Accuracy with masking."""
correct_prediction = tf.equal(tf.argmax(preds, 1), tf.argmax(labels, 1))
accuracy_all = tf.cast(correct_prediction, tf.float32)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
accuracy_all *= mask
return tf.reduce_mean(accuracy_all)
def masked_sigmoid_cross_entropy(preds, labels, mask):
"""Sigmoid cross-entropy loss with masking."""
loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=preds, labels=labels)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
loss *= mask
return tf.reduce_mean(loss)
def mask_mse_loss(preds, labels, mask):
"""Sigmoid cross-entropy loss with masking."""
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
labels *= mask
preds *= mask
# loss = tf.losses.mean_squared_error(labels=labels, predictions=preds, weights=mask)
# loss = tf.losses.mean_squared_error(labels=labels, predictions=preds, weights=1.0)
loss = tf.nn.l2_loss(tf.subtract(labels, preds))
# return tf.reduce_mean(loss)
return loss
def mask_classification_loss(output_all, output_att, train_mask, attention_mask, labels):
"""Sigmoid cross-entropy loss with masking."""
label_num = train_mask.shape[0]
label_num = int(label_num)
attend_features = tf.nn.embedding_lookup_sparse(output_att, attention_mask, None, combiner='sum')
classifiers = tf.slice(output_all,[0,0],[label_num,-1])
if FLAGS.use_normalization:
# attend_features = tf.nn.l2_normalize(attend_features, dim=-1)
classifiers = tf.nn.l2_normalize(classifiers, dim=-1)
raw_score_per_action = tf.multiply(classifiers, attend_features)
raw_score_per_action = tf.reduce_sum(raw_score_per_action,axis=1)
labels = tf.one_hot(labels,label_num)
loss = tf.nn.sigmoid_cross_entropy_with_logits(logits=raw_score_per_action, labels=labels)
mask = tf.cast(train_mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
loss *= mask
return tf.reduce_mean(loss), classifiers, attend_features
def mask_classification_accuracy(output_all, output_att, train_mask, attention_mask, labels):
label_num = train_mask.shape[0]
label_num = int(label_num)
attend_features = tf.nn.embedding_lookup_sparse(output_att, attention_mask, None, combiner='sum')
classifiers = tf.slice(output_all, [0, 0], [label_num, -1])
if FLAGS.use_normalization:
# attend_features = tf.nn.l2_normalize(attend_features, dim=-1)
classifiers = tf.nn.l2_normalize(classifiers, dim=-1)
raw_score_per_action = tf.multiply(classifiers, attend_features)
# raw_score_per_action = tf.multiply(tf.slice(output_all, [0, 0], [label_num, -1]), attend_features)
raw_score_per_action = tf.reduce_sum(raw_score_per_action, axis=1)
raw_score_per_action = tf.nn.sigmoid(raw_score_per_action)
mask = tf.cast(train_mask, dtype=tf.float32)
raw_score_per_action *= mask
top1 = tf.arg_max(raw_score_per_action, 0)
flag = tf.equal(top1, tf.cast(labels, dtype=tf.int64))
acc = tf.cond(flag, fn_true, fn_false)
return (acc, top1, raw_score_per_action)
def mask_classification_softmax_loss(output_all, output_att, train_mask, attention_mask, labels):
label_num = train_mask.shape[0]
label_num = int(label_num)
classifiers = tf.slice(output_all, [0, 0], [label_num, -1])
# In the initial code, we find no normalization is better
if FLAGS.use_normalization:
# attend_features = tf.nn.l2_normalize(attend_features, dim=-1)
classifiers = tf.nn.l2_normalize(classifiers, dim=-1)
output_att = tf.reshape(output_att, [FLAGS.batch_size, -1, FLAGS.output_dim])
losses = []
for i in range(FLAGS.batch_size):
output_att_one = output_att[i, :, :]
label = labels[i]
attend_features = tf.nn.embedding_lookup_sparse(output_att_one, attention_mask, None, combiner='sum')
raw_score_per_action = tf.multiply(classifiers, attend_features)
raw_score_per_action = tf.reduce_sum(raw_score_per_action,axis=1)
label = tf.one_hot(label,label_num)
mask = tf.cast(train_mask, dtype=tf.float32)
loss = tf.nn.softmax_cross_entropy_with_logits(logits=raw_score_per_action, labels=label)
mask /= tf.reduce_mean(mask)
loss *= mask
losses.append(loss)
loss = tf.add_n(losses)
return tf.reduce_mean(loss), classifiers, attend_features
def mask_classification_softmax_accuracy(output_all, output_att, train_mask, attention_mask, labels):
label_num = train_mask.shape[0]
label_num = int(label_num)
classifiers = tf.slice(output_all, [0, 0], [label_num, -1])
if FLAGS.use_normalization:
# attend_features = tf.nn.l2_normalize(attend_features, dim=-1)
classifiers = tf.nn.l2_normalize(classifiers, dim=-1)
output_att = tf.reshape(output_att, [FLAGS.batch_size, -1, FLAGS.output_dim])
accs = []
top1s = []
scores = []
for i in range(FLAGS.batch_size):
output_att_one = output_att[i, :, :]
label = labels[i]
attend_features = tf.nn.embedding_lookup_sparse(output_att_one, attention_mask, None, combiner='sum')
raw_score_per_action = tf.multiply(classifiers, attend_features)
raw_score_per_action = tf.reduce_sum(raw_score_per_action, axis=1)
mask = tf.cast(train_mask, dtype=tf.float32)
# raw_score_per_action = raw_score_per_action[mask]
raw_score_per_action = tf.nn.softmax(raw_score_per_action)
raw_score_per_action *= mask
top1 = tf.arg_max(raw_score_per_action, 0)
flag = tf.equal(top1, tf.cast(label,dtype=tf.int64))
acc = tf.cond(flag, fn_true, fn_false)
accs.append(acc)
top1s.append(top1)
scores.append(raw_score_per_action)
return (accs, top1s, scores)
# return (acc, top1, raw_score_per_action)
def fn_true():
return tf.constant(1)
def fn_false():
return tf.constant(0)