DeltasMinimum: Double;
DeltasMinimum: double;
The DeltasMinimum property sets minimum value of the accuracy parameter delta.
delta is the error value that will be used to change synapse weight. If the delta value for all the weights is less than DeltasMinimum, the learning process is finished.
To execute the example, add links to the MathFin, Stat system assemblies.
Sub UserBackP;
Var
NN: SmBackPropagation;
y, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10: Array[15] Of Double;
x11, x12, x13, x14, x15, x16, x17, x18, x19, x20: Array[15] Of Double;
x21, x22, x23, x24, x25, x26, x27, x28, x29, x30: Array[15] Of Double;
Ex: ISlSeries;
res, i, j: Integer;
masI, PerformanceMatrix: Array Of Double;
CatList, CategoriesList, KfoldIntervals: Array Of Integer;
s: String;
CrossValidation: ICrossValidation;
CrossValPerf: ICrossValidationPerformanceScores;
Begin
NN := New SmBackPropagation.Create;
// Generate source data. 15 objects with 30 attributes
For i := 0 To 14 Do
x1[i] := Math.RandBetween(-5, 5); x16[i] := Math.RandBetween(-5, 5);
x2[i] := Math.RandBetween(-5, 5); x17[i] := Math.RandBetween(-5, 5);
x3[i] := Math.RandBetween(-5, 5); x18[i] := Math.RandBetween(-5, 5);
x4[i] := Math.RandBetween(-5, 5); x19[i] := Math.RandBetween(-5, 5);
x5[i] := Math.RandBetween(-5, 5); x20[i] := Math.RandBetween(-5, 5);
x6[i] := Math.RandBetween(-5, 5); x21[i] := Math.RandBetween(-5, 5);
x7[i] := Math.RandBetween(-5, 5); x22[i] := Math.RandBetween(-5, 5);
x8[i] := Math.RandBetween(-5, 5); x23[i] := Math.RandBetween(-5, 5);
x9[i] := Math.RandBetween(-5, 5); x24[i] := Math.RandBetween(-5, 5);
x10[i] := Math.RandBetween(-5, 5); x25[i] := Math.RandBetween(-5, 5);
x11[i] := Math.RandBetween(-5, 5); x26[i] := Math.RandBetween(-5, 5);
x12[i] := Math.RandBetween(-5, 5); x27[i] := Math.RandBetween(-5, 5);
x13[i] := Math.RandBetween(-5, 5); x28[i] := Math.RandBetween(-5, 5);
x14[i] := Math.RandBetween(-5, 5); x29[i] := Math.RandBetween(-5, 5);
x15[i] := Math.RandBetween(-5, 5); x30[i] := Math.RandBetween(-5, 5);
End For;
// Explained series values
y[0] := 1; y[5] := 6; y[10] := -1;
y[1] := 2; y[6] := 4; y[11] := -1;
y[2] := 5; y[7] := 8; y[12] := -1;
y[3] := 4; y[8] := 8; y[13] := -1;
y[4] := 5; y[9] := 7; y[14] := -1;
// Set explained series
NN.Dependent.Value := y;
// Set explanatory series
Ex := NN.Explanatories;
Ex.Add.Value := x1; Ex.Add.Value := x11; Ex.Add.Value := x21;
Ex.Add.Value := x2; Ex.Add.Value := x12; Ex.Add.Value := x22;
Ex.Add.Value := x3; Ex.Add.Value := x13; Ex.Add.Value := x23;
Ex.Add.Value := x4; Ex.Add.Value := x14; Ex.Add.Value := x24;
Ex.Add.Value := x5; Ex.Add.Value := x15; Ex.Add.Value := x25;
Ex.Add.Value := x6; Ex.Add.Value := x16; Ex.Add.Value := x26;
Ex.Add.Value := x7; Ex.Add.Value := x17; Ex.Add.Value := x27;
Ex.Add.Value := x8; Ex.Add.Value := x18; Ex.Add.Value := x28;
Ex.Add.Value := x9; Ex.Add.Value := x19; Ex.Add.Value := x29;
Ex.Add.Value := x10; Ex.Add.Value := x20; Ex.Add.Value := x30;
// Number of neurons in internal layer
NN.Neurons := 22;
// Number of iterations
NN.Epoch := 400;
// Minimum value delta
NN.DeltasMinimum := 0.001;
// Set cross-validation parameters
CrossValidation := NN.CrossValidation;
CrossValidation.SamplingType := CrossValidationSamplingType.Kfold;
CrossValidation.NumberOfFolds := 8;
// Perform calculation and display results
res := NN.Execute;
If res = 0 Then
Debug.WriteLine(" == Pattern substitution == ");
Debug.WriteLine("Before After ");
masI := NN.FilledDependent.Value;
For i := 0 To y.Length - 1 Do
If y[i] <> -1 Then
s := y[i].ToString;
Else
s := "-";
End If;
s := s + " " + masI[i].ToString;
Debug.WriteLine(s);
End For;
// Display list of categories
CatList := NN.CategoriesList;
If CatList.Length > 0 Then
Debug.WriteLine(" == List of categories == "); Debug.Indent;
For i := 0 To CatList.Length - 1 Do
Debug.WriteLine(CatList[i]);
End For;
Debug.Unindent;
End If;
// Display summary classification results
Debug.WriteLine(" === Summary classification results ===");
Debug.Indent;
s := "";
For i := 0 To NN.ClassificationSummary.GetUpperBound(1) Do
For j := 0 To NN.ClassificationSummary.GetUpperBound(2) Do
s := s + NN.ClassificationSummary[i, j].ToString + " ";
End For;
Debug.WriteLine(s);
s := "";
End For;
Debug.Unindent;
// Display cross-validation results
NN.ExecuteValidation;
CrossValPerf := NN.PerformanceScores;
Debug.WriteLine(" === Cross-validation results === ");
Debug.Indent;
Debug.WriteLine("Analyzed attribute: " + CrossValPerf.ClassificatorName);
Debug.Write("Number of factors affecting the analyzed attribute: ");
Debug.WriteLine(CrossValPerf.FactorsNumber);
Debug.WriteLine("Number of observations: " + CrossValPerf.ObservationsNumber.ToString);
Debug.WriteLine("Number of repetitions: " + CrossValidation.NumberOfRandomTests.ToString);
Debug.WriteLine("Classification accuracy: " + CrossValPerf.ClassificationAccuracy.ToString);
Debug.WriteLine("Categories:");
Debug.Indent;
CategoriesList := CrossValPerf.CategoriesList;
For i := 0 To CategoriesList.Length - 1 Do
Debug.WriteLine(CategoriesList[i]);
End For;
Debug.Unindent;
Debug.WriteLine("Fold intervals:");
Debug.Indent;
KfoldIntervals := CrossValPerf.KfoldIntervals;
For i := 0 To KfoldIntervals.Length - 1 Do
Debug.WriteLine(KfoldIntervals[i]);
End For;
Debug.Unindent;
Debug.WriteLine("Correct classification:");
Debug.Indent;
PerformanceMatrix := CrossValPerf.PerformanceMatrix;
For i := 0 To PerformanceMatrix.GetUpperBound(1) - 1 Do
For j := 0 To PerformanceMatrix.GetUpperBound(2) - 1 Do
Debug.Write(PerformanceMatrix[i, j].ToString + #9);
End For;
Debug.WriteLine("");
End For;
Debug.Unindent;
Debug.Unindent;
End If;
End Sub UserBackP;
After executing the example the specified data is clustered by means of back-propagation network, the console window displays clustering results, list of categories and summary results of classification, and cross-validation results.
The requirements and result of the Fore.NET example execution match with those in the Fore example.
Imports Prognoz.Platform.Interop.MathFin;
Imports Prognoz.Platform.Interop.Stat;
…
Public Shared Sub UserBackP();
Var
NN: SmBackPropagation;
y, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10: Array[15] Of Double;
x11, x12, x13, x14, x15, x16, x17, x18, x19, x20: Array[15] Of Double;
x21, x22, x23, x24, x25, x26, x27, x28, x29, x30: Array[15] Of Double;
Ex: ISlSeries;
res, i, j: Integer;
CatList, masI, PerformanceMatrix, CategoriesList, KfoldIntervals: System.Array;
m: Prognoz.Platform.Interop.MathFin.MathClass;
s: string;
CrossValidation: ICrossValidation;
CrossValPerf: ICrossValidationPerformanceScores;
Begin
NN := New SmBackPropagation.Create();
// Generate source data. 15 objects with 30 attributes
m := New Prognoz.Platform.Interop.MathFin.MathClass.Create();
For i := 0 To 14 Do
x1[i] := m.RandBetween(-5, 5); x16[i] := m.RandBetween(-5, 5);
x2[i] := m.RandBetween(-5, 5); x17[i] := m.RandBetween(-5, 5);
x3[i] := m.RandBetween(-5, 5); x18[i] := m.RandBetween(-5, 5);
x4[i] := m.RandBetween(-5, 5); x19[i] := m.RandBetween(-5, 5);
x5[i] := m.RandBetween(-5, 5); x20[i] := m.RandBetween(-5, 5);
x6[i] := m.RandBetween(-5, 5); x21[i] := m.RandBetween(-5, 5);
x7[i] := m.RandBetween(-5, 5); x22[i] := m.RandBetween(-5, 5);
x8[i] := m.RandBetween(-5, 5); x23[i] := m.RandBetween(-5, 5);
x9[i] := m.RandBetween(-5, 5); x24[i] := m.RandBetween(-5, 5);
x10[i] := m.RandBetween(-5, 5); x25[i] := m.RandBetween(-5, 5);
x11[i] := m.RandBetween(-5, 5); x26[i] := m.RandBetween(-5, 5);
x12[i] := m.RandBetween(-5, 5); x27[i] := m.RandBetween(-5, 5);
x13[i] := m.RandBetween(-5, 5); x28[i] := m.RandBetween(-5, 5);
x14[i] := m.RandBetween(-5, 5); x29[i] := m.RandBetween(-5, 5);
x15[i] := m.RandBetween(-5, 5); x30[i] := m.RandBetween(-5, 5);
End For;
// Explained series values
y[0] := 1; y[5] := 6; y[10] := -1;
y[1] := 2; y[6] := 4; y[11] := -1;
y[2] := 5; y[7] := 8; y[12] := -1;
y[3] := 4; y[8] := 8; y[13] := -1;
y[4] := 5; y[9] := 7; y[14] := -1;
// Set explained series
NN.Dependent.Value := y;
// Set explanatory series
Ex := NN.Explanatories;
Ex.Add().Value := x1; Ex.Add().Value := x11; Ex.Add().Value := x21;
Ex.Add().Value := x2; Ex.Add().Value := x12; Ex.Add().Value := x22;
Ex.Add().Value := x3; Ex.Add().Value := x13; Ex.Add().Value := x23;
Ex.Add().Value := x4; Ex.Add().Value := x14; Ex.Add().Value := x24;
Ex.Add().Value := x5; Ex.Add().Value := x15; Ex.Add().Value := x25;
Ex.Add().Value := x6; Ex.Add().Value := x16; Ex.Add().Value := x26;
Ex.Add().Value := x7; Ex.Add().Value := x17; Ex.Add().Value := x27;
Ex.Add().Value := x8; Ex.Add().Value := x18; Ex.Add().Value := x28;
Ex.Add().Value := x9; Ex.Add().Value := x19; Ex.Add().Value := x29;
Ex.Add().Value := x10; Ex.Add().Value := x20; Ex.Add().Value := x30;
// Number of neurons in the internal layer
NN.Neurons := 22;
// Number of iterations
NN.Epoch := 400;
// Minimum value delta
NN.DeltasMinimum := 0.001;
// Set cross-validation parameters
CrossValidation := NN.CrossValidation;
CrossValidation.SamplingType := CrossValidationSamplingType.cvstKfold;
CrossValidation.NumberOfFolds := 4;
// Perform calculation and display results
res := NN.Execute();
If res = 0 Then
System.Diagnostics.Debug.WriteLine(" == Pattern substitution == ");
System.Diagnostics.Debug.WriteLine("Before After ");
masI := NN.FilledDependent.Value;
For i := 0 To y.Length - 1 Do
If y[i] <> -1 Then
s := y[i].ToString();
Else
s := "-";
End If;
s := s + " " + masI[i].ToString();
System.Diagnostics.Debug.WriteLine(s)
End For;
// Display list of categories
CatList := NN.CategoriesList;
If CatList.Length > 0 Then
System.Diagnostics.Debug.WriteLine(" == List of categories == ");
System.Diagnostics.Debug.Indent();
For i := 0 To CatList.Length - 1 Do
System.Diagnostics.Debug.WriteLine(CatList[i]);
End For;
System.Diagnostics.Debug.Unindent();
End If;
// Display summary classification results
System.Diagnostics.Debug.WriteLine(" === Summary classification results ===");
System.Diagnostics.Debug.Indent();
s := "";
For i := 0 To NN.ClassificationSummary.GetUpperBound(1) Do
For j := 0 To NN.ClassificationSummary.GetUpperBound(0) Do
s := s + NN.ClassificationSummary.GetValue(i, j).ToString() + " ";
End For;
System.Diagnostics.Debug.WriteLine(s);
s := "";
End For;
// Display cross-validation results
NN.ExecuteValidation();
CrossValPerf := NN.PerformanceScores;
System.Diagnostics.Debug.WriteLine(" === Cross-validation results === ");
System.Diagnostics.Debug.Indent();
System.Diagnostics.Debug.WriteLine("Analyzed attribute: " + CrossValPerf.ClassificatorName);
System.Diagnostics.Debug.Write("Number of factors affecting the analyzed attribute: ");
System.Diagnostics.Debug.WriteLine(CrossValPerf.FactorsNumber);
System.Diagnostics.Debug.WriteLine("Number of observations: " + CrossValPerf.ObservationsNumber.ToString());
System.Diagnostics.Debug.WriteLine("Number of repetitions: " + CrossValidation.NumberOfRandomTests.ToString());
System.Diagnostics.Debug.WriteLine("Classification accuracy: " + CrossValPerf.ClassificationAccuracy.ToString());
System.Diagnostics.Debug.WriteLine("Categories:");
System.Diagnostics.Debug.Indent();
CategoriesList := CrossValPerf.CategoriesList;
For i := 0 To CategoriesList.Length - 1 Do
System.Diagnostics.Debug.WriteLine(CategoriesList[i]);
End For;
System.Diagnostics.Debug.Unindent();
System.Diagnostics.Debug.WriteLine("Fold intervals:");
System.Diagnostics.Debug.Indent();
KfoldIntervals := CrossValPerf.KfoldIntervals;
For i := 0 To KfoldIntervals.Length - 1 Do
System.Diagnostics.Debug.WriteLine(KfoldIntervals[i]);
End For;
System.Diagnostics.Debug.Unindent();
System.Diagnostics.Debug.WriteLine("Correct classification:");
System.Diagnostics.Debug.Indent();
PerformanceMatrix := CrossValPerf.PerformanceMatrix;
For i := 0 To PerformanceMatrix.GetUpperBound(0) - 1 Do
For j := 0 To PerformanceMatrix.GetUpperBound(1) - 1 Do
System.Diagnostics.Debug.Write(PerformanceMatrix[i, j].ToString() + char.ConvertFromUtf32(9));
End For;
System.Diagnostics.Debug.WriteLine("");
End For;
System.Diagnostics.Debug.Unindent();
System.Diagnostics.Debug.Unindent();
End If;
End Sub UserBackP;
See also: