-
Notifications
You must be signed in to change notification settings - Fork 2
/
model_blocks.py
86 lines (66 loc) · 3.5 KB
/
model_blocks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from tensorflow.keras import models, layers, regularizers
from tensorflow.keras import backend as K
def conv_block(x, filter_size, size, dropout,num, batch_norm=False):
conv = layers.Conv2D(size, (filter_size, filter_size), padding="same")(x)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
conv = layers.Conv2D(size, (filter_size, filter_size), padding="same",name="conv"+str(num))(conv)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
if dropout > 0:
conv = layers.Dropout(dropout)(conv)
return conv
def reg_conv_block(x, filter_size, size, dropout,num, batch_norm=False):
conv = layers.Conv2D(size, (filter_size, filter_size),kernel_regularizer='l1', padding="same")(x)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
conv = layers.Conv2D(size, (filter_size, filter_size),kernel_regularizer='l1', padding="same",name="conv"+str(num))(conv)
if batch_norm is True:
conv = layers.BatchNormalization(axis=3)(conv)
conv = layers.Activation("relu")(conv)
if dropout > 0:
conv = layers.Dropout(dropout)(conv)
return conv
def gating_signal(input, out_size, batch_norm=False):
"""
resize the down layer feature map into the same dimension as the up layer feature map
using 1x1 conv
:return: the gating feature map with the same dimension of the up layer feature map
"""
x = layers.Conv2D(out_size, (1, 1), padding='same')(input)
if batch_norm:
x = layers.BatchNormalization()(x)
x = layers.Activation('relu')(x)
return x
def attention_block(x, gating, inter_shape):
shape_x = K.int_shape(x)
shape_g = K.int_shape(gating)
# Getting the x signal to the same shape as the gating signal
theta_x = layers.Conv2D(inter_shape, (2, 2), strides=(2, 2), padding='same')(x) # 16
shape_theta_x = K.int_shape(theta_x)
# Getting the gating signal to the same number of filters as the inter_shape
phi_g = layers.Conv2D(inter_shape, (1, 1), padding='same')(gating)
upsample_g = layers.Conv2DTranspose(inter_shape, (3, 3),
strides=(shape_theta_x[1] // shape_g[1], shape_theta_x[2] // shape_g[2]),
padding='same')(phi_g) # 16
concat_xg = layers.add([upsample_g, theta_x])
act_xg = layers.Activation('relu')(concat_xg)
psi = layers.Conv2D(1, (1, 1), padding='same')(act_xg)
sigmoid_xg = layers.Activation('sigmoid')(psi)
shape_sigmoid = K.int_shape(sigmoid_xg)
upsample_psi = layers.UpSampling2D(size=(shape_x[1] // shape_sigmoid[1], shape_x[2] // shape_sigmoid[2]))(sigmoid_xg) # 32
upsample_psi = repeat_elem(upsample_psi, shape_x[3])
y = layers.multiply([upsample_psi, x])
result = layers.Conv2D(shape_x[3], (1, 1), padding='same')(y)
result_bn = layers.BatchNormalization()(result)
return result_bn
def repeat_elem(tensor, rep):
# lambda function to repeat Repeats the elements of a tensor along an axis
#by a factor of rep.
# If tensor has shape (None, 256,256,3), lambda will return a tensor of shape
#(None, 256,256,6), if specified axis=3 and rep=2.
return layers.Lambda(lambda x, repnum: K.repeat_elements(x, repnum, axis=3),
arguments={'repnum': rep})(tensor)