Neural Network-3 layer BP network VC dynamic library package implementation
The neural network is a large scope that discusses the error reverse communication network (BP network), the basic principle is to adjust the network's free parameters to make the large number of free parameters of the network to a convergence value, the convergence of the parameters The process is the training process, and the network is basically stable after training.
Network operation process:
Simple single-layer BP network has N0 input X, but also contains N0 free parameter w (weight)
Positive transformation output
X and W are not included in V, V, and then outputs the network output, the general activation function requires a sigmoid nonlinear function. One of the widely used functions with Sigmoid nonlinearity is logistic function. Expression
Logistic (V) = 1 / (1 Exp (-AV))
Where A is a function slope. This is to get the output value of the network.
In addition, it is often used to be a dual-band normal cut function. Select as needed.
2. Error reverse communication
The error is calculated as e = D-O, where D is the desired network output convergence value, o actually outputs the network.
First calculate the local gradient domain of the output node
S = E * f '(v)
f is the activation function, F 'indicates the derivative
Weight adjustment value Delta = N * S * Y
n is the rate of learning, with a value of 0.01 ~ 0.9
S is the topical gradient domain calculated above
Y is the last output of this node
Then perform weight adjustment W = W Delta
Iterations have multiple times after multiple times, they can converge and achieve training purposes.
The training process is like educating a child. Through continuous educational church children, what is wrong, so the neural network is a category of artificial intelligence.
The essence of training is to converge the free parameter W of the network to a desired level, converge, and calculate the adjustment value of the free parameter by error, the error will be less and smaller.
The nature of the network is a highly nonlinear mapping of high-dimensional special space approximation.
The BP network can only have one layer, and you can set a multi-layer, of course, the calculation of the error is complicated.
Below is the DLL library of the 3-layer BP network I made with VC.Net, one input layer, an output layer, and a hidden layer, each layer of neuron can be adjusted as needed. The network is a full connection structure, using the logistic function as activation Function. Function slope is fixed to 1, and the learning rate can be freely adjusted.
//--------------VC.NET DLL
// ------------------------- below starting for the actual code.
INT N0, N1, N2; / / ------ Number of nodes
Double * a1, * a2; // ------- Weight
Double * x; // ------ input
Double a; // ---- Logistic function parameters
Double * O1, * O2; // ------- Each neuron output scale
Double * S1, * S2; // --------- Local gradient domain
Double n; // ----------- Learning rate
// ---------- Network parameter initialization
Extern "c" __declspec (dllexport)
Bool InitNetwork (int N0, INT N1, INT N2, Double NN = 0.8)
{
N0 = N0;
N1 = N1;
N2 = N2;
A1 = New Double [(N1 1) * (N0 1)];
A2 = New Double [N2 * (N1 1)];
X = new double [(N0 1)];
O1 = new double [(n1 1)];
O2 = new double [N2];
S1 = New Double [(N1 1)];
S2 = new double [n2];
// ---------- Network initialization
For (int i = 0; i <(n1 1) * (N0 1); i )
{
A1 [i] = 1;
}
For (int i = 0; i <(n2) * (N1 1); i )
{
A2 [I] = 1;
}
A = 1; // ---- Logistic function parameters
N = nn; // --------- Learning Rate Return True;
}
// ------------ Release network memory
Extern "c" __declspec (dllexport)
Bool ReleaseNetwork ()
{
Delete A1;
Delete A2;
Delete x;
DELETE O1;
Delete O2;
Delete S1;
Delete S2;
Return True;
}
// ----------------- Network public calculation
BOOL Commonwork ()
{
INT I, J;
// ---------------- Network public calculation
// -------- Enter the input / output layer end of the hidden layer
Double v = 0;
For (i = 0; i { v = 0; For (j = 0; j { V = (A1 [i * (N0 1) j] * x [j]); // linear summation } // logistic function (SIGMOID) O1 [i] = (double) 1.0 / (1.0 exp (-a * v)); } O1 [n1] = 1; // -------- hidden layer fixed bias = 1 // ------------ calculate the output layer output For (i = 0; i { v = 0; For (j = 0; j { V = (A2 [i * (n1 1) j] * O1 [j]); // linear summation } // logistic function (SIGMOID) O2 [i] = (double) 1.0 / (1.0 exp (-a * v)); } Return True; } // --------------- Network classification, the input range is [0, 1], the output is the output of N2 neuron quantization. Extern "c" __declspec (dllexport) BOOL WORK (Double * Input, Int * Output) { INT I; For (i = 0; i { X [i] = input [i]; } X [N0] = 1; // ------- Input layer fixed bias Commonwork (); // ---- quantify the output of output layer neurons to 0 or 1 For (i = 0; i { IF (O2 [i] <0.5) Output [i] = 0; Else Output [i] = 1; } Return True; } //------------training Extern "c" __declspec (dllexport) BOOL TRAN (Double * Input, INT * EXPECTED) { INT i = 0, j = 0; For (i = 0; i { x [i] = input [i]; } X [N0] = 1; // ------- Input layer fixed bias Commonwork (); // ------------ output layer adjustment // ---------- Calculate the local gradient domain of the output layer For (i = 0; i { S2 [i] = a * (Double) Expected [i] -O2 [i]) * O2 [i] * (1-O2 [i]); } /// ------------ Adjustment Weight For (i = 0; i { For (j = 0; j { A2 [i * (n1 1) j] = (N * S2 [i] * O1 [j]);} } // ---------------- hidden layer adjustment // ---------- calculate the hidden layer node local gradient domain For (i = 0; i { Double ek = 0; For (j = 0; j { EK = (S2 [J] * A1 [J * (N1 1) I]); } S1 [i] = a * O1 [i] * (1-O1 [i]) * ek; } /// ------------ Adjustment Weight For (i = 0; i { For (j = 0; j { A1 [i * (n0 1) j] = (N * S1 [i] * x [j]); } } Return True; } Extern "c" __declspec (dllexport) Bool Savenetwork (Char * filepath) { FSTREAM F; F.Open (FilePath, iOS_BASE :: out | iOS_BASE :: Trunc); INT I; For (i = 0; i <(n1 1) * (N0 1); i ) { F << A1 [i] << " } For (i = 0; i { F << A2 [i] << " } f.close (); Return True; } Extern "c" __declspec (dllexport) BOOL LoadNetWork (Char * filepath) { FSTREAM F; F.Open (filepath, iOS_BASE :: IN); INT I; For (i = 0; i <(n1 1) * (N0 1); i ) { f >> A1 [i]; } For (i = 0; i { f >> A2 [i]; } f.close (); Return True; } / / -------------------------------------------------------------------------------------------- --------------------------- / / -------------------------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------------------------------------- / / -------------------------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------------------------------------- Attached one using Sample Set 8 input signals, 4 concealed layer neurons, 1 output neurons //-----------------------VC.NET console procedure #include #include Typedef Bool (* PinitNetwork) (INT N0, INT N1, INT N2, DOUBLE NN); Typedef Bool (* prelightWork) (); TypeDef Bool (* PWORK) (Double * Input, INT * OUTPUT); Typedef Bool (* PTRAN) (Double * Input, INT * EXPECTED); Typedef Bool (* psavenetwork); TypedEf Bool (Char * FilePath); PinitNetwork InitNetwork = NULL; PRELESENETWORK Releasenetwork = NULL; PWORK WORK = NULL; PTRAN TRAN = NULL; Psavenetwork savenetwork = null; PLOADNETWORK loadNetwork = NULL; Int main (int Argc, char * argv []) { INT i = 0, J = 0, k = 0; HModule H = :: loadLibrary ("f: //projects//bpnetwork3//release//bpnetwork3.dll"); IF (h == NULL) { Std :: Cout << "Load DLL Error / N"; System ("pause"); Return -1; } INITNETWORK = (PinitNetwork) :: getProcadDress (H, "InitNetwork"); ReleaseNetwork = (PreleaseNetwork) :: getProcaddress (H, "ReleaseNetwork"); Work = (PWork) :: getProcaddress (H, "Work"); TRAN = (PTRAN) :: getProcaddress (h, "tran"); Savenetwork = (psavenetwork) :: getProcaddress (h, "savenetwork"); LoadNetwork = (PloadNetwork) :: getProcaddress (H, "loadingnetwork"); IF (InitNetwork == Null || ReleaseNetwork == Null || Work == Null || TRAN == NULL || Savenetwork == Null || loading == Null) { Std :: Cout << "Take Function Address Error / N"; :: Freelibrary (h); System ("pause"); Return -1; } INITNETWORK (8, 4, 1, 0.8); // ----------- Initialize 8 inputs, 4 hidden neurons, 1 output, learning rate 0.8 Double x [8]; // 8 inputs INT Y [1]; // 1 output // ------------- Before the training For (i = 0; i <10; i ) { / / ----------- Convert to 8-bit 2-based number input FOR (k = 0; k <8; k ) { X [K] = (Double) ((i >> K) & 1); } Work (x, y); // is sent to the network classification Std :: cout << "Enter" << i << "/ t output" << Y [0] << "/ n"; // Display Output } Std :: cout << "/ n / n / N / n"; // ---------------- Start 100 round training, the goal is that the training network has the number of odd numbers or even the number of odds, odd output 1, even output 0 INT EXPECTED = 0; // Expected value For (i = 0; i <1000; i ) { For (j = 0; j <10; j ) { / / Turn to 8 bits 2 as 8 inputs as the network FOR (k = 0; k <8; k ) { X [k] = (double) ((j >> K) & 1); } EXPECTED = J% 2; // Expected result TRAN (X, & Expected); // Training } } // ------------- Training completion, the following inspection training results For (i = 0; i <10; i ) { / / ----------- Convert to 8-bit 2-based number input FOR (k = 0; k <8; k ) { X [K] = (Double) ((i >> K) & 1); } Work (x, y); // is sent to the network classification Std :: cout << "Enter" << i << "/ t output" << Y [0] << "/ n"; // Display network output } Releasenetwork (); :: Freelibrary (h); System ("pause"); Return 0; } // -------------------------------- After actual operation, this program learns that the number between 0 and 9 is odd or even. The experiment was successful. The download address of the program is http://www.Lingch.Net/myProducts/bpNetwork.rar The structure of this network is as shown Neural networks have been widely used in pattern identification and mode classification.