-
Notifications
You must be signed in to change notification settings - Fork 0
/
simple_convnet.py
62 lines (47 loc) · 2.31 KB
/
simple_convnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
try:
import cupy as np
is_cupy_available = True
except:
import numpy as np
is_cupy_available = False
from diffusion.layers import Dense, Conv2D, BatchNormalization
from diffusion.activations import Sigmoid, Softmax, ReLU, LeakyReLU
class SimpleConvNet(): #Just example
def __init__(self, channels_num = 32):
channels = [channels_num, channels_num * 2, channels_num * 4, channels_num * 8]
self.layers = [
Conv2D(channels_num = 1, kernels_num = channels[0], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[0], kernels_num = channels[1], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[1], kernels_num = channels[2], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[2], kernels_num = channels[3], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[3], kernels_num = channels[2], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[2], kernels_num = channels[1], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[1], kernels_num = channels[0], kernel_shape = (7, 7), padding = (3, 3)),
LeakyReLU(),
Conv2D(channels_num = channels[0], kernels_num = 1, kernel_shape = (3, 3), padding = (1, 1))
]
def forward(self, x, t = None, training = True):
x = np.asarray(x)
for layer in self.layers:
x = layer.forward(x)
return x
def backward(self, error):
error = np.asarray(error)
for layer in reversed(self.layers):
error = layer.backward(error)
return error
def update_weights(self):
for i, layer in enumerate(reversed(self.layers)):
if hasattr(layer, 'update_weights'):
layer.update_weights(layer_num = i + 1)
def set_optimizer(self, optimizer):
self.optimizer = optimizer
for layer in self.layers:
if hasattr(layer, 'set_optimizer'):
layer.set_optimizer(self.optimizer)