-
Notifications
You must be signed in to change notification settings - Fork 0
/
NeuralNet.h
260 lines (236 loc) · 10.1 KB
/
NeuralNet.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
/*
*
* Fast Artificial Neural Network (fann) C# Wrapper
* Copyright (C) 2010 created by james (at) jamesbates.net
*
* On LinkedIn here http://uk.linkedin.com/in/alexanderjamesbates
*
* This wrapper is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This wrapper is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#pragma once
#pragma managed (push,off)
#include "doublefann.h"
#include "fann_cpp.h"
#pragma managed (pop)
#include "ProxyImpl.h"
#include "Connection.h"
#include "TrainingData.h"
#using <mscorlib.dll>
#include <vcclr.h>
using namespace System;
using namespace System::Collections::Generic;
using namespace System::Runtime::InteropServices;
namespace FANN
{
namespace Net
{
public enum class ErrorFunction {
Linear = FANN_ERRORFUNC_LINEAR,
TanH
};
public enum class StopFunction
{
MSE = FANN_STOPFUNC_MSE,
Bit
};
public enum class TrainingAlgorithm {
Incremental = FANN_TRAIN_INCREMENTAL,
Batch,
RProp,
QuickProp
};
public enum class ActivationFunction {
Linear = FANN_LINEAR,
Threshold,
ThresholdSymmetric,
Sigmoid,
SigmoidStepWise,
SigmoidSymmetric,
SigmoidSymmetricStepWise,
Gaussian,
GaussianSymmetric,
GaussianStepwise,
Elliot,
ElliotSymmetric,
LinearPiece,
LinearPieceSymmetric,
SinSymmetric,
CosSymmetric
};
public enum class NetworkType
{
Layer = FANN_NETTYPE_LAYER,
ShortCut
};
public ref class NeuralNet : ProxyImpl<FANN::neural_net>
{
public:
delegate int CallbackType (NeuralNet^ net, TrainingData^ train,
unsigned int maxEpochs, unsigned int epochsBetweenReports,
float desiredError, unsigned int epochs);
NeuralNet(void);
virtual ~NeuralNet(void);
//bool Createstandard(unsigned int numLayers, ...);
bool CreateStandardArray(array<unsigned int,1>^ layers);
//bool Createsparse(float connection_rate, unsigned int numLayers, ...);
bool CreateSparseArray(float connectionRate,array<unsigned int,1>^ layers);
//bool Createshortcut(unsigned int numLayers, ...);
bool CreateShortcutArray(array<unsigned int,1>^ layers);
array<fann_type, 1> ^ Run(array<fann_type, 1> ^input);
void RandomizeWeights(fann_type minWeight, fann_type maxWeight);
void InitWeights(TrainingData^ data);
void PrintConnections();
bool CreateFromFile(System::String^ configurationFile);
bool Save(System::String^ configurationFile);
int SaveToFixed(System::String^ configurationFile);
void Train(fann_type *input, fann_type *desiredOutput);
float TrainEpoch(TrainingData^ data);
void TrainOnData(TrainingData^ data, unsigned int maxEpochs,
unsigned int epochs_between_reports, float desired_error);
void TrainOnFile(System::String^ filename, unsigned int maxEpochs,
unsigned int epochs_between_reports, float desired_error);
fann_type * Test(fann_type *input, fann_type *desiredOutput);
float TestData(TrainingData^ data);
float GetMSE();
void ResetMSE();
//void Setcallback(callback_type callback, void *user_data);
void PrintParameters();
TrainingAlgorithm GetTrainingAlgorithm();
void SetTrainingAlgorithm(TrainingAlgorithm trainingAlgorithm);
float GetLearningRate();
void SetLearningRate(float learningRate);
ActivationFunction GetActivationFunction(int layer, int neuron);
void SetActivationFunction(ActivationFunction activationFunction, int layer, int neuron);
void SetActivationFunctionLayer(ActivationFunction activationFunction, int layer);
void SetActivationFunctionHidden(ActivationFunction activationFunction);
void SetActivationFunctionOutput(ActivationFunction activationFunction);
fann_type GetActivationSteepness(int layer, int neuron);
void SetActivationSteepness(fann_type steepness, int layer, int neuron);
void SetActivationSteepnessLayer(fann_type steepness, int layer);
void SetActivationSteepnessHidden(fann_type steepness);
void SetActivationSteepnessOutput(fann_type steepness);
ErrorFunction GetTrainErrorFunction();
void SetTrainErrorFunction(ErrorFunction trainErrorFunction);
float GetQuickPropDecay();
void SetQuickPropDecay(float quickPropDecay);
float GetQuickPropMu();
void SetQuickPropMu(float quickPropMu);
float GetRPropIncreaseFactor();
void SetRPropIncreaseFactor(float rPropIncreaseFactor);
float GetRPropDecreaseFactor();
void SetRPropDecreaseFactor(float rPropDecreaseFactor);
float GetRPropDeltaMin();
void SetRPropDeltaMin(float rpropDeltaMin);
float GetRPropDeltaMax();
void SetRPropDeltaMax(float rpropDeltaMax);
unsigned int GetNumInput();
unsigned int GetNumOutput();
unsigned int GetTotalNeurons();
unsigned int GetTotalConnections();
#ifdef FIXEDFANN
unsigned int Getdecimal_point();
unsigned int Getmultiplier();
#endif
NetworkType GetNetworkType();
float GetConnectionRate();
unsigned int GetNumLayers();
void GetLayerArray(unsigned int *layers);
void GetBiasArray(unsigned int *bias);
void GetConnectionArray(array<Connection^,1>^ % connections);
void SetWeightArray(array<FANN::Net::Connection^,1>^ connections);
void SetWeight(unsigned int from_neuron, unsigned int toNeuron, fann_type weight);
float GetLearningMomentum();
void SetLearningMomentum(float learning_momentum);
StopFunction GetTrainStopFunction();
void SetTrainStopFunction(StopFunction trainStopFunction);
fann_type GetBitFailLimit();
void SetBitFailLimit(fann_type bitFailLimit);
unsigned int GetBitFail();
void CascadeTrainOnData(TrainingData^ data, unsigned int maxNeurons,
unsigned int neuronsBetweenReports, float desiredError);
void CascadeTrainOnFile(System::String^ filename, unsigned int maxNeurons,
unsigned int neurons_between_reports, float desiredError);
float GetCascadeOutputChangeFraction();
void SetCascadeOutputChangeFraction(float cascadeOutputChangeFraction);
unsigned int GetCascadeOutputStagnationEpochs();
void SetCascadeOutputStagnationEpochs(unsigned int cascadeOutputStagnationEpochs);
float GetCascadeCandidateChangeFraction();
void SetCascadeCandidateChangeFraction(float cascadecandidateChangeFraction);
unsigned int GetCascadeCandidateStagnationEpochs();
void SetCascadeCandidateStagnationEpochs(unsigned int cascadecandidateStagnationEpochs);
fann_type GetCascadeWeightMultiplier();
void SetCascadeWeightMultiplier(fann_type cascadeWeightMultiplier);
fann_type GetCascadeCandidateLimit();
void SetCascadeCandidateLimit(fann_type cascadeCandidateLimit);
unsigned int GetCascadeMaxOutEpochs();
void SetCascadeMaxOutEpochs(unsigned int cascadeMaxOutEpochs);
unsigned int GetCascadeMaxCandEpochs();
void SetCascadeMaxCandEpochs(unsigned int cascadeMaxCandEpochs);
unsigned int GetCascadeNumCandidates();
unsigned int GetCascadeActivationFunctionsCount();
ActivationFunction * GetCascadeActivationFunctions();
void SetCascadeActivationFunctions(ActivationFunction *cascadeActivationFunctions,
unsigned int cascadeActivationFunctionsCount);
unsigned int GetCascadeActivationSteepnessesCount();
fann_type *GetCascadeActivationSteepnesses();
void SetCascadeActivationSteepnesses(fann_type *CascadeActivationSteepnesses,
unsigned int CascadeActivationSteepnessesCount);
unsigned int GetCascadeNumCandidateGroups();
void SetCascadeNumCandidateGroups(unsigned int CascadeNumcandidate_groups);
void ScaleTrain(TrainingData^ data);
void DescaleTrain(TrainingData^ data);
bool SetInputScalingParams(TrainingData^ data, float new_inputMin, float new_input_max);
bool SetOutputScalingParams(TrainingData^ data, float newOutputMin, float newOutput_max);
bool SetScalingParams(TrainingData^ data,
float new_inputMin, float new_input_max, float newOutputMin, float newOutput_max);
bool ClearScalingParams();
void ScaleInput(array<fann_type, 1> ^ inputVector);
void ScaleOutput(array<fann_type, 1> ^ output_vector);
void DescaleInput(array<fann_type, 1> ^ input_vector);
void DescaleOutput(array<fann_type, 1> ^ output_vector);
void SetErrorLog(FILE *log_file);
unsigned int GetErrNo();
void ResetErrNo();
void ResetErrStr();
System::String^ GetErrStr();
void PrintError();
public:
event CallbackType^ Callback
{
void add( CallbackType^ handler );
void remove( CallbackType^ handler );
int raise(NeuralNet^ net, TrainingData^ train,
unsigned int maxEpochs, unsigned int epochsBetweenReports,
float desiredError, unsigned int epochs);
}
internal:
NeuralNet(FANN::neural_net* net);
static NeuralNet^ Instance(FANN::neural_net* net);
CallbackType^ callbackHandler;
GCHandle gch;
[UnmanagedFunctionPointer(CallingConvention::Cdecl)]
delegate int InternalCallbackType (neural_net &net, training_data &train,
unsigned int max_epochs, unsigned int epochs_between_reports,
float desired_error, unsigned int epochs, void *user_data);
int InternalCallback(neural_net &net, training_data &train,
unsigned int max_epochs, unsigned int epochs_between_reports,
float desired_error, unsigned int epochs, void *user_data);
private:
static Dictionary<unsigned int,NeuralNet^>^ m_Instances = gcnew Dictionary<unsigned int,NeuralNet^>();
InternalCallbackType^ m_internalCallback;
};
}
}