-
Notifications
You must be signed in to change notification settings - Fork 2
/
main.cc
68 lines (53 loc) · 2.22 KB
/
main.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#include <cstdio>
#include <cstdlib>
#include <iostream>
#include "conv_layer.h"
#include "data.h"
#include "fully_connected_layer.h"
#include "input_layer.h"
#include "layer.h"
#include "max_pooling_layer.h"
#include "net.h"
#include "util.h"
#include "softmax_loss_layer.h"
#include "relu_layer.h"
#include "average_pooling_layer.h"
#include "activation.h"
#include "filler.h"
using namespace std;
using namespace con;
auto gaussianFiller1 = GaussianFiller(0, 0.0001);
auto gaussianFiller2 = GaussianFiller(0, 0.01);
auto gaussianFiller3 = GaussianFiller(0, 0.1);
auto constantFiller = ConstantFiller(0);
const int batch = 100;
vector<Layer*> layers;
vector<Sample> trainData, testData;
Real lr = 0.001;
Real momentum = 0.9;
Real weightDecay = 0.004;
void getLayers(vector<Layer*> *layers) {
layers->push_back(new InputLayer("input", batch, 32, 32, 3));
layers->push_back(new ConvolutionalLayer("conv1", 32, 5, 1, 2, layers->back(), &gaussianFiller1, &constantFiller));
layers->push_back(new MaxPoolingLayer("pool1", 3, 2, layers->back()));
layers->push_back(new ReluLayer("relu1", layers->back()));
layers->push_back(new ConvolutionalLayer("conv2", 32, 5, 1, 2, layers->back(), &gaussianFiller2, &constantFiller));
layers->push_back(new ReluLayer("relu2", layers->back()));
layers->push_back(new AveragePoolingLayer("pool2", 3, 2, layers->back()));
layers->push_back(new ConvolutionalLayer("conv3", 64, 5, 1, 2, layers->back(), &gaussianFiller2, &constantFiller));
layers->push_back(new ReluLayer("relu3", layers->back()));
layers->push_back(new AveragePoolingLayer("pool3", 3, 2, layers->back()));
layers->push_back(new FullyConnectedLayer("fc1", 64, layers->back(), &gaussianFiller3, &constantFiller));
layers->push_back(new FullyConnectedLayer("fc2", 10, layers->back(), &gaussianFiller3, &constantFiller));
layers->push_back(new SoftmaxLossLayer("softmax", layers->back()));
for (int i = 0; i < layers->size(); i++) {
cout << layers->at(i)->name << " " << layers->at(i)->depth << " " << layers->at(i)->width << " " << layers->at(i)->height << endl;
}
}
int main() {
getLayers(&layers);
readTrain(&trainData);
readTest(&testData);
train(batch, layers, trainData, testData, lr, momentum, weightDecay);
return 0;
}