One-Ring-to-rule-them-all v2.0.4
Table des procédures 1 - AssemblyInfo.vb 2 - clsMatrixHP.vb 2.1 - Default Public Property Item# 2.2 - Private Function Abs_ 2.3 - Private Function Average_# 2.4 - Private Shared Function DeltaMult 2.5 - Private Shared Function MatdoubleMult 2.6 - Private Shared Function MatdoubleSum 2.7 - Private Shared Function MatMult 2.8 - Private Shared Function MatSum 2.9 - Private Shared Function Transpose_ 2.10 - Public Function AddColumn 2.11 - Public Function GetColumn 2.12 - Public Function GetRow 2.13 - Public Function GetValue# 2.14 - Public Function Slice 2.15 - Public Function Sumatory 2.16 - Public Function Threshold 2.17 - Public Function ToArray 2.18 - Public Function ToArrayOfSingle 2.19 - Public Function ToStringWithFormat$ 2.20 - Public Overrides Function ToString$ 2.21 - Public Property ItemUIntSng! 2.22 - Public Property matrixP 2.23 - Public ReadOnly Property Abs 2.24 - Public ReadOnly Property Average# 2.25 - Public ReadOnly Property c% 2.26 - Public ReadOnly Property isDefined 2.27 - Public ReadOnly Property r% 2.28 - Public ReadOnly Property Size 2.29 - Public ReadOnly Property T 2.30 - Public Shared Function Dot 2.31 - Public Shared Function FromArraySingle 2.32 - Public Shared Function Identy 2.33 - Public Shared Function Map 2.34 - Public Shared Function MultiplyAddAndMap 2.35 - Public Shared Function MultiplyAndMap 2.36 - Public Shared Function Ones 2.37 - Public Shared Function Pow 2.38 - Public Shared Function Randomize 2.39 - Public Shared Function SubtractFromArraySingle 2.40 - Public Shared Function Sumatory 2.41 - Public Shared Function TransposeAndMultiply1 2.42 - Public Shared Function TransposeAndMultiply2 2.43 - Public Shared Function Zeros 2.44 - Public Shared Sub MatrixLoop 2.45 - Public Sub New 2.46 - Public Sub New 2.47 - Public Sub New 2.48 - Public Sub New 2.49 - Public Sub Randomize 3 - clsMatrixMN.vb 3.1 - Default Public Property Item# 3.2 - Private Function Abs_ 3.3 - Private Function Average_# 3.4 - Private Function IClone 3.5 - Private Overloads Sub Multiply 3.6 - Private Shared Function Transpose_ 3.7 - Private Sub Constructor 3.8 - Public Function AddColumn 3.9 - Public Function Clone 3.10 - Public Function GetColumn 3.11 - Public Function GetRow 3.12 - Public Function GetValue# 3.13 - Public Function Slice 3.14 - Public Function Sumatory 3.15 - Public Function Threshold 3.16 - Public Function ToArray 3.17 - Public Function ToArrayOfSingle 3.18 - Public Function ToStringWithFormat$ 3.19 - Public Overrides Function ToString$ 3.20 - Public Property ItemUIntSng! 3.21 - Public Property matrixP 3.22 - Public ReadOnly Property Abs 3.23 - Public ReadOnly Property Average# 3.24 - Public ReadOnly Property c% 3.25 - Public ReadOnly Property isDefined 3.26 - Public ReadOnly Property r% 3.27 - Public ReadOnly Property Size 3.28 - Public ReadOnly Property T 3.29 - Public Shared Function FromArraySingle 3.30 - Public Shared Function Map 3.31 - Public Shared Function MultiplyAddAndMap 3.32 - Public Shared Function MultiplyAndMap 3.33 - Public Shared Function Ones 3.34 - Public Shared Function Randomize 3.35 - Public Shared Function SubtractFromArraySingle 3.36 - Public Shared Function Sumatory 3.37 - Public Shared Function TransposeAndMultiply1 3.38 - Public Shared Function TransposeAndMultiply2 3.39 - Public Shared Function Zeros 3.40 - Public Shared Sub MatrixLoop 3.41 - Public Sub Map 3.42 - Public Sub New 3.43 - Public Sub New 3.44 - Public Sub New 3.45 - Public Sub New 3.46 - Public Sub New 3.47 - Public Sub Randomize 4 - clsMatrixNL.vb 4.1 - Default Public Property Item# 4.2 - Private Function Abs_ 4.3 - Private Function Average_# 4.4 - Private Function IClone 4.5 - Private Overloads Shared Function Multiply 4.6 - Private Overloads Shared Function Subtract 4.7 - Private Overloads Sub Multiply 4.8 - Private Overloads Sub Multiply 4.9 - Private Overloads Sub Subtract 4.10 - Private Overloads Sub Subtract 4.11 - Private Shared Function Transpose_ 4.12 - Private Sub Add 4.13 - Public Function AddColumn 4.14 - Public Function Clone 4.15 - Public Function GetColumn 4.16 - Public Function GetRow 4.17 - Public Function GetValue# 4.18 - Public Function Slice 4.19 - Public Function Sumatory 4.20 - Public Function Threshold 4.21 - Public Function ToArray 4.22 - Public Function ToArrayOfSingle 4.23 - Public Function ToStringWithFormat$ 4.24 - Public Overrides Function ToString$ 4.25 - Public Property ItemUIntSng! 4.26 - Public Property matrixP 4.27 - Public ReadOnly Property Abs 4.28 - Public ReadOnly Property Average# 4.29 - Public ReadOnly Property c% 4.30 - Public ReadOnly Property isDefined 4.31 - Public ReadOnly Property r% 4.32 - Public ReadOnly Property Size 4.33 - Public ReadOnly Property T 4.34 - Public Shared Function FromArraySingle 4.35 - Public Shared Function Map 4.36 - Public Shared Function MultiplyAddAndMap 4.37 - Public Shared Function MultiplyAndMap 4.38 - Public Shared Function Ones 4.39 - Public Shared Function Randomize 4.40 - Public Shared Function SubtractFromArraySingle 4.41 - Public Shared Function Sumatory 4.42 - Public Shared Function TransposeAndMultiply1 4.43 - Public Shared Function TransposeAndMultiply2 4.44 - Public Shared Function Zeros 4.45 - Public Shared Sub MatrixLoop 4.46 - Public Sub Map 4.47 - Public Sub New 4.48 - Public Sub New 4.49 - Public Sub New 4.50 - Public Sub New 4.51 - Public Sub Randomize 4.52 - Sub(i 4.53 - Sub(j 5 - clsMLPGeneric.vb 5.1 - Protected Sub PrintSuccess 5.2 - Public Function ShowThisIteration 5.3 - Public MustOverride Function GetMLPType$ 5.4 - Public MustOverride Sub InitializeWeights 5.5 - Public MustOverride Sub Randomize 5.6 - Public MustOverride Sub TestOneSample 5.7 - Public MustOverride Sub TrainOneSample 5.8 - Public Overridable Function ComputeAverageError# 5.9 - Public Overridable Function ComputeAverageErrorFromLastError# 5.10 - Public Overridable Function ComputeAverageErrorOneSample# 5.11 - Public Overridable Function ComputeAverageSignedErrorFromLastError# 5.12 - Public Overridable Function GetActivationFunctionType 5.13 - Public Overridable Function GetWeight# 5.14 - Public Overridable Function GetWeightSingle! 5.15 - Public Overridable Function ShowWeights$ 5.16 - Public Overridable Sub CloseTrainingSession 5.17 - Public Overridable Sub ComputeError 5.18 - Public Overridable Sub ComputeErrorOneSample 5.19 - Public Overridable Sub ComputeSuccess 5.20 - Public Overridable Sub InitializeStruct 5.21 - Public Overridable Sub PrintOutput 5.22 - Public Overridable Sub PrintWeights 5.23 - Public Overridable Sub SetActivationFunction 5.24 - Public Overridable Sub SetActivationFunctionOptimized 5.25 - Public Overridable Sub SetWeight 5.26 - Public Overridable Sub SetWeightSingle 5.27 - Public Overridable Sub TestAllSamples 5.28 - Public Overridable Sub TestOneSampleAndComputeError 5.29 - Public Overridable Sub TrainSemiStochastic 5.30 - Public Overridable Sub TrainStochastic 5.31 - Public Overridable Sub TrainSystematic 5.32 - Public Sub InitializeTraining 5.33 - Public Sub PrintParameters 5.34 - Public Sub PrintSuccessPrediction 5.35 - Public Sub ShowMessage 5.36 - Public Sub TestAllSamples 5.37 - Public Sub TestAllSamples 5.38 - Public Sub TestAllSamples 5.39 - Public Sub Train 5.40 - Public Sub Train 5.41 - Public Sub Train 5.42 - Public Sub TrainAllSamples 6 - clsMLPGenericVec.vb 6.1 - Public MustOverride Sub TrainVector 6.2 - Public Overridable Sub SetOuput1D 6.3 - Public Overridable Sub TrainVectorBatch 6.4 - Public Overridable Sub TrainVectorBatch 6.5 - Public Overrides Sub PrintOutput 6.6 - Public Overrides Sub TrainSemiStochastic 6.7 - Public Overrides Sub TrainStochastic 6.8 - Public Overrides Sub TrainSystematic 7 - clsMLPHelper.vb 7.1 - Public Shared Function ArrayToString$ 7.2 - Public Shared Function ArrayToString$ 7.3 - Public Shared Function Compare 7.4 - Public Shared Function CompareArray 7.5 - Public Shared Function CompareArray1D 7.6 - Public Shared Function CompareArray1DSingle 7.7 - Public Shared Function Convert1DArrayOfDoubleToSingle 7.8 - Public Shared Function Convert1DArrayOfSingleToDouble 7.9 - Public Shared Function Convert2DArrayOfSingleToDouble 7.10 - Public Shared Function GetColumn 7.11 - Public Shared Function GetVector 7.12 - Public Shared Function ReadEnumDescription$ 7.13 - Public Shared Function Swap2DArray 7.14 - Public Shared Function Transform2DArrayDoubleToArraySingle 7.15 - Public Shared Function Transform2DArrayDoubleToArraySingle2 7.16 - Public Shared Function Transform2DArrayDoubleToJaggedArraySingle 7.17 - Public Shared Function Transform2DArrayToJaggedArray 7.18 - Public Shared Function Transform2DArrayToJaggedArraySingle 7.19 - Public Shared Function TransformArrayTo2DArray 7.20 - Public Shared Sub Fill2DArrayOfDouble 7.21 - Public Shared Sub Fill2DArrayOfDoubleByArray 7.22 - Public Shared Sub Fill2DArrayOfDoubleByArrayOfSingle 7.23 - Public Shared Sub Fill2DArrayOfDoubleByArrayOfSingle2 7.24 - Public Shared Sub Fill2DArrayOfSingle 7.25 - Public Shared Sub Fill2DArrayOfSingle2 8 - clsRndExtension.vb 8.1 - Public Function NextDouble# 8.2 - Public Function NextDoubleGreaterThanZero# 8.3 - Public Function NextFloat! 8.4 - Public Function NextFloat! 8.5 - Public Function NextFloat! 9 - mod1XOR.vb 10 - mod2XOR.vb 11 - mod3XOR.vb 12 - modIrisFlowerInputCorrected.vb 13 - modIrisFlowerInputOriginal.vb 14 - modIrisFlowerTargetAnalog.vb 15 - modIrisFlowerTargetAnalogOriginal.vb 16 - modIrisFlowerTargetLogical.vb 17 - modSunspot.vb 18 - clsMLPAccord.vb 18.1 - Public Overrides Function GetActivationFunctionType 18.2 - Public Overrides Function GetMLPType$ 18.3 - Public Overrides Function GetWeight# 18.4 - Public Overrides Function GetWeightSingle! 18.5 - Public Overrides Sub InitializeStruct 18.6 - Public Overrides Sub InitializeWeights 18.7 - Public Overrides Sub Randomize 18.8 - Public Overrides Sub RoundWeights 18.9 - Public Overrides Sub SetActivationFunction 18.10 - Public Overrides Sub SetActivationFunctionOptimized 18.11 - Public Overrides Sub SetOuput1D 18.12 - Public Overrides Sub SetWeight 18.13 - Public Overrides Sub SetWeightSingle 18.14 - Public Overrides Sub TestOneSample 18.15 - Public Overrides Sub TrainOneSample 18.16 - Public Overrides Sub TrainVector 18.17 - Public Overrides Sub TrainVectorOneIteration 19 - modMLPAccordTest.vb 19.1 - Public Sub AccordMLPXorTest 19.2 - Sub MainAccordMLP 20 - clsMLPBrightWire.vb 20.1 - Private Sub BuildGraph 20.2 - Private Sub BuildGraph 20.3 - Private Sub BuildSamples 20.4 - Private Sub BuildSamples 20.5 - Private Sub TestOneSampleStatic 20.6 - Private Sub TestOneSampleStaticTest 20.7 - Public Overrides Function GetActivationFunctionType 20.8 - Public Overrides Function GetMLPType$ 20.9 - Public Overrides Function GetWeight# 20.10 - Public Overrides Function GetWeightSingle! 20.11 - Public Overrides Function ShowWeights$ 20.12 - Public Overrides Sub InitializeStruct 20.13 - Public Overrides Sub InitializeWeights 20.14 - Public Overrides Sub Randomize 20.15 - Public Overrides Sub RoundWeights 20.16 - Public Overrides Sub SetActivationFunction 20.17 - Public Overrides Sub SetOuput1D 20.18 - Public Overrides Sub SetOuput1D 20.19 - Public Overrides Sub SetWeight 20.20 - Public Overrides Sub SetWeightSingle 20.21 - Public Overrides Sub TestAllSamples 20.22 - Public Overrides Sub TestAllSamples 20.23 - Public Overrides Sub TestOneSample 20.24 - Public Overrides Sub TestOneSample 20.25 - Public Overrides Sub TestOneSampleAndComputeError 20.26 - Public Overrides Sub TrainOneSample 20.27 - Public Overrides Sub TrainOneSample 20.28 - Public Overrides Sub TrainSystematic 20.29 - Public Overrides Sub TrainVector 20.30 - Public Overrides Sub TrainVector 20.31 - Public Overrides Sub TrainVectorBatch 20.32 - Public Overrides Sub TrainVectorBatch 20.33 - Public Overrides Sub TrainVectorOneIteration 20.34 - Public Overrides Sub TrainVectorOneIteration 20.35 - Public Sub GetWeights 20.36 - Public Sub GetWeights 20.37 - Public Sub ReDimWeights 20.38 - Public Sub SetWeights 20.39 - Public Sub SetWeights 20.40 - Public Sub TestOneSampleAndComputeErrorTest 20.41 - Public Sub TestOneSampleByIndex 20.42 - Public Sub TestOneSampleByIndex 21 - modMLPBrightWireTest.vb 21.1 - Public Sub BrightWireMLPXorTest 21.2 - Sub MainBrightWireMLP 22 - clsMLPClassic.vb 22.1 - Private Sub ForwardPropogateSignal 22.2 - Private Sub GetOutputSignal 22.3 - Private Sub SetInputSignal 22.4 - Public Function ComputeOutputError! 22.5 - Public Overrides Function GetActivationFunctionType 22.6 - Public Overrides Function GetMLPType$ 22.7 - Public Overrides Function GetWeight# 22.8 - Public Overrides Function GetWeightSingle! 22.9 - Public Overrides Sub InitializeStruct 22.10 - Public Overrides Sub InitializeWeights 22.11 - Public Overrides Sub Randomize 22.12 - Public Overrides Sub RoundWeights 22.13 - Public Overrides Sub SetWeight 22.14 - Public Overrides Sub SetWeightSingle 22.15 - Public Overrides Sub TestOneSample 22.16 - Public Overrides Sub TestOneSample 22.17 - Public Overrides Sub TrainOneSample 22.18 - Public Sub BackwardPropagateError 22.19 - Public Sub ComputeGradientAndAdjustWeights 23 - modMLPClassicTest.vb 23.1 - Public Sub ClassicMLPXorTest 23.2 - Sub MainClassicMLP 24 - clsMLPEncog.vb 24.1 - Public Overrides Function GetActivationFunctionType 24.2 - Public Overrides Function GetMLPType$ 24.3 - Public Overrides Function GetWeight# 24.4 - Public Overrides Function GetWeightSingle! 24.5 - Public Overrides Sub CloseTrainingSession 24.6 - Public Overrides Sub InitializeStruct 24.7 - Public Overrides Sub InitializeWeights 24.8 - Public Overrides Sub Randomize 24.9 - Public Overrides Sub RoundWeights 24.10 - Public Overrides Sub SetActivationFunction 24.11 - Public Overrides Sub SetOuput1D 24.12 - Public Overrides Sub SetWeight 24.13 - Public Overrides Sub SetWeightSingle 24.14 - Public Overrides Sub TestOneSample 24.15 - Public Overrides Sub TrainOneSample 24.16 - Public Overrides Sub TrainSemiStochastic 24.17 - Public Overrides Sub TrainStochastic 24.18 - Public Overrides Sub TrainSystematic 24.19 - Public Overrides Sub TrainVector 24.20 - Public Overrides Sub TrainVectorOneIteration 25 - modMLPEncogTest.vb 25.1 - Public Sub EncogMLPXorTest 25.2 - Sub MainEncogMLP 26 - clsMLPKeras.vb 26.1 - Public Overrides Function GetActivationFunctionType 26.2 - Public Overrides Function GetMLPType$ 26.3 - Public Overrides Function ShowWeights$ 26.4 - Public Overrides Sub InitializeStruct 26.5 - Public Overrides Sub InitializeWeights 26.6 - Public Overrides Sub Randomize 26.7 - Public Overrides Sub SetActivationFunction 26.8 - Public Overrides Sub SetOuput1D 26.9 - Public Overrides Sub TestOneSample 26.10 - Public Overrides Sub TrainOneSample 26.11 - Public Overrides Sub TrainSemiStochastic 26.12 - Public Overrides Sub TrainStochastic 26.13 - Public Overrides Sub TrainSystematic 26.14 - Public Overrides Sub TrainVector 26.15 - Public Overrides Sub TrainVectorBatch 26.16 - Public Overrides Sub TrainVectorOneIteration 26.17 - Public Sub ModelEvaluate 26.18 - Public Sub SetOuput1DOneSample 27 - modMLPKerasTest.vb 27.1 - Public Sub KerasMLPXorTest 27.2 - Sub MainKerasMLP 28 - clsMatrixMLP.vb 28.1 - Private Sub BackwardPropagateError 28.2 - Private Sub ComputeErrorOneSampleSpecific 28.3 - Private Sub ForwardPropogateSignal 28.4 - Private Sub GetWeights 28.5 - Public Overrides Function GetActivationFunctionType 28.6 - Public Overrides Function GetMLPType$ 28.7 - Public Overrides Function GetWeight# 28.8 - Public Overrides Function GetWeightSingle! 28.9 - Public Overrides Function ShowWeights$ 28.10 - Public Overrides Sub InitializeStruct 28.11 - Public Overrides Sub InitializeWeights 28.12 - Public Overrides Sub Randomize 28.13 - Public Overrides Sub SetWeight 28.14 - Public Overrides Sub SetWeightSingle 28.15 - Public Overrides Sub TestOneSample 28.16 - Public Overrides Sub TrainOneSample 28.17 - Public Overrides Sub TrainSystematic 28.18 - Public Sub BackwardPropagateErrorComputeGradientAndAdjustWeights 29 - clsTrainData.vb 29.1 - Public Function GetInputs 29.2 - Public Function GetOutputs 29.3 - Public Sub Create 29.4 - Public Sub New 30 - modMatrixMLPTest.vb 30.1 - Public Sub MatrixMLPTest 30.2 - Public Sub MatrixMLPXorTest 30.3 - Sub MainMatrixMLP 31 - clsVecMatrixMLP.vb 31.1 - Private Sub BackwardPropagateErrorInternal 31.2 - Private Sub ComputeGradientAndAdjustWeights 31.3 - Private Sub ForwardPropagateSignalInternal 31.4 - Private Sub SetInputOneSample 31.5 - Private Sub SetTargetOneSample 31.6 - Public Overrides Function GetActivationFunctionType 31.7 - Public Overrides Function GetMLPType$ 31.8 - Public Overrides Function GetWeight# 31.9 - Public Overrides Function GetWeightSingle! 31.10 - Public Overrides Function ShowWeights$ 31.11 - Public Overrides Sub InitializeStruct 31.12 - Public Overrides Sub InitializeWeights 31.13 - Public Overrides Sub PrintOutput 31.14 - Public Overrides Sub Randomize 31.15 - Public Overrides Sub SetOuput1D 31.16 - Public Overrides Sub SetWeight 31.17 - Public Overrides Sub SetWeightSingle 31.18 - Public Overrides Sub TestOneSample 31.19 - Public Overrides Sub TrainOneSample 31.20 - Public Overrides Sub TrainVector 31.21 - Public Overrides Sub TrainVectorOneIteration 31.22 - Public Property input 31.23 - Public Property target 31.24 - Public Sub BackwardPropagateError 31.25 - Public Sub ComputeErrorInternal 31.26 - Public Sub ForwardPropagateSignal 31.27 - Public Sub SetLastError 32 - modVecMatrixMLPTest.vb 32.1 - Public Sub VectorizedMatrixMLPTest 32.2 - Public Sub VectorizedMatrixMLPXorTest 32.3 - Sub MainVectorizedMatrixMLP 33 - clsMLPNeuralNet.vb 33.1 - Private Sub BuildGraph 33.2 - Private Sub Forward 33.3 - Private Sub TrainNetwork 33.4 - Public Overrides Function GetActivationFunctionType 33.5 - Public Overrides Function GetMLPType$ 33.6 - Public Overrides Function GetWeight# 33.7 - Public Overrides Function GetWeightSingle! 33.8 - Public Overrides Function ShowWeights$ 33.9 - Public Overrides Function ShowWeights$ 33.10 - Public Overrides Sub InitializeStruct 33.11 - Public Overrides Sub InitializeWeights 33.12 - Public Overrides Sub InitializeWeights 33.13 - Public Overrides Sub Randomize 33.14 - Public Overrides Sub RoundWeights 33.15 - Public Overrides Sub SetActivationFunction 33.16 - Public Overrides Sub SetOuput1D 33.17 - Public Overrides Sub SetWeight 33.18 - Public Overrides Sub SetWeightSingle 33.19 - Public Overrides Sub TestOneSample 33.20 - Public Overrides Sub TrainOneSample 33.21 - Public Overrides Sub TrainSemiStochastic 33.22 - Public Overrides Sub TrainStochastic 33.23 - Public Overrides Sub TrainSystematic 33.24 - Public Overrides Sub TrainSystematic 33.25 - Public Overrides Sub TrainVector 33.26 - Public Overrides Sub TrainVector 33.27 - Public Overrides Sub TrainVectorBatch 33.28 - Public Overrides Sub TrainVectorBatch 33.29 - Public Overrides Sub TrainVectorBatch 33.30 - Public Overrides Sub TrainVectorOneIteration 33.31 - Public Overrides Sub TrainVectorOneIteration 33.32 - Public Sub GetWeights 33.33 - Public Sub GetWeights 33.34 - Public Sub ReDimWeights 33.35 - Public Sub SetOuput1DOneSample 33.36 - Public Sub SetWeights 33.37 - Public Sub SetWeights 34 - modMLPNeuralNetTest.vb 34.1 - Public Sub NeuralNetMLPXorTest 34.2 - Sub MainNeuralNetMLP 35 - AdjustableSigmoid.vb 35.1 - Public Overrides Function AbstractedDerivative# 35.2 - Public Overrides Function Derivative# 35.3 - Public Overrides Function Evaluate# 35.4 - Public Property Alpha# 35.5 - Public Sub New 36 - BaseActivation.vb 36.1 - Public MustOverride Function Evaluate# 36.2 - Public Property Center# 36.3 - Public ReadOnly Property OutputRange 36.4 - Public Sub New 37 - ELU.vb 37.1 - Public Overrides Function AbstractedDerivative# 37.2 - Public Overrides Function Derivative# 37.3 - Public Overrides Function Evaluate# 37.4 - Public Property Alpha# 37.5 - Public Sub New 38 - HyperbolicTangent.vb 38.1 - Public Overrides Function AbstractedDerivative# 38.2 - Public Overrides Function Derivative# 38.3 - Public Overrides Function Evaluate# 38.4 - Public Property Alpha# 38.5 - Public Sub New 39 - Linear.vb 39.1 - Public Overrides Function AbstractedDerivative# 39.2 - Public Overrides Function Derivative# 39.3 - Public Overrides Function Evaluate# 39.4 - Public Property Slope# 39.5 - Public Sub New 39.6 - Public Sub New 40 - Sigmoid.vb 40.1 - Public Overrides Function AbstractedDerivative# 40.2 - Public Overrides Function Derivative# 40.3 - Public Overrides Function Evaluate# 40.4 - Public Property Alpha# 40.5 - Public Sub New 41 - Testing.vb 41.1 - Public Property Input 42 - Training.vb 42.1 - Public Property Input 42.2 - Public Sub New 43 - BaseLayer.vb 43.1 - Public Property Size% 43.2 - Public Sub ConnectBias 43.3 - Public Sub ConnectBias2 43.4 - Public Sub ConnectBiasInit 43.5 - Public Sub ConnectChild 43.6 - Public Sub ConnectChildInit 43.7 - Public Sub ConnectParent 43.8 - Public Sub ConnectParent2 43.9 - Public Sub Init 43.10 - Public Sub InitBias 43.11 - Public Sub InitChild 43.12 - Public Sub RestoreWeightsWithBias 44 - HiddenLayer.vb 44.1 - Public Sub New 45 - InputLayer.vb 45.1 - Public Sub New 45.2 - Public Sub SetInput 46 - OutputLayer.vb 46.1 - Public Function CalculateAbsError# 46.2 - Public Function CalculateSignedError# 46.3 - Public Function CalculateSquaredError# 46.4 - Public Function ExtractOutputs 46.5 - Public Sub AssignErrors 46.6 - Public Sub New 47 - MLPOOPTests.vb 47.1 - Public Sub OOPMLPXorTest 47.2 - Sub MainOOPMLP 48 - MultilayerPerceptron.vb 48.1 - Private Function SetInputAndTargetOneSample 48.2 - Private Sub BackwardPropogateErrorComputeGradientAndAdjustWeights 48.3 - Private Sub ForwardPropogateSignal 48.4 - Private Sub SetInputOneSample 48.5 - Private Sub SetOutput 48.6 - Private Sub WeightInitStruct 48.7 - Public Overrides Function GetActivationFunctionType 48.8 - Public Overrides Function GetMLPType$ 48.9 - Public Overrides Function GetWeight# 48.10 - Public Overrides Function GetWeightSingle! 48.11 - Public Overrides Sub InitializeStruct 48.12 - Public Overrides Sub InitializeWeights 48.13 - Public Overrides Sub Randomize 48.14 - Public Overrides Sub SetWeight 48.15 - Public Overrides Sub SetWeightSingle 48.16 - Public Overrides Sub TestOneSample 48.17 - Public Overrides Sub TrainOneSample 48.18 - Public Property TotalSquaredError# 48.19 - Public Sub New 48.20 - Public Sub SetOuput1D 48.21 - Public Sub TrainOneIteration 48.22 - Public Sub TrainOneSampleOOP 49 - Neuron.vb 49.1 - Public Overrides Function ToString$ 49.2 - Public Property NumericalFormat$ 50 - NeuronEnum.vb 51 - Weight.vb 51.1 - Public Property Value# 52 - BaseRandom.vb 52.1 - Public MustOverride Function Generate# 52.2 - Public Property Range 53 - Standard.vb 53.1 - Public Overrides Function Generate# 53.2 - Public Sub New 54 - Range.vb 54.1 - Public Property Minimum# 54.2 - Public Sub New 55 - clsMLPRProp.vb 55.1 - Private Function MakeAllData 55.2 - Private Shared Function CalculateGradTermsForLast 55.3 - Private Shared Function CalculateGradTermsForNonLast 55.4 - Private Shared Function MaxIndex% 55.5 - Private Shared Sub ShowData 55.6 - Private Shared Sub ShowVector 55.7 - Private Sub InitializeGradients 55.8 - Private Sub InitializeThreads 55.9 - Private Sub InitializeThreads2 55.10 - Private Sub InitializeWeightsNguyenWidrow 55.11 - Private Sub MakeTrainTest 55.12 - Protected Function ComputeGraduate 55.13 - Protected Function ComputeGraduateMultiThread 55.14 - Protected Function ComputeGraduateSingleThread 55.15 - Protected MustOverride Function ActivateFunction 55.16 - Protected Overrides Function ActivateFunction 55.17 - Protected Overrides Function CalculateGradTerms 55.18 - Protected Shared Function HyperTan# 55.19 - Protected Shared Function MakeVector 55.20 - Protected Shared Function Softmax 55.21 - Protected Shared Sub ZeroOut 55.22 - Protected Shared Sub ZeroOut 55.23 - Protected Sub InitializeWeightsNguyenWidrow 55.24 - Protected Sub New 55.25 - Protected Sub UpdateWeigtsAndBiases 55.26 - Public Function Accuracy 55.27 - Public Function ComputeOutputs 55.28 - Public Function GetWeights 55.29 - Public Function GetWeightsCount% 55.30 - Public Function RootMeanSquaredError 55.31 - Public Function RootMeanSquaredErrorMultiThread 55.32 - Public Function RootMeanSquaredErrorSingleThread 55.33 - Public Function TrainRPROPAndTest 55.34 - Public Overrides Function GetActivationFunctionType 55.35 - Public Overrides Function GetMLPType$ 55.36 - Public Overrides Function GetWeight# 55.37 - Public Overrides Function GetWeightSingle! 55.38 - Public Overrides Function ShowWeights$ 55.39 - Public Overrides Sub InitializeStruct 55.40 - Public Overrides Sub InitializeWeights 55.41 - Public Overrides Sub Randomize 55.42 - Public Overrides Sub SetActivationFunction 55.43 - Public Overrides Sub SetActivationFunctionOptimized 55.44 - Public Overrides Sub SetOuput1D 55.45 - Public Overrides Sub SetWeight 55.46 - Public Overrides Sub SetWeightSingle 55.47 - Public Overrides Sub TestOneSample 55.48 - Public Overrides Sub TrainOneSample 55.49 - Public Overrides Sub TrainSemiStochastic 55.50 - Public Overrides Sub TrainStochastic 55.51 - Public Overrides Sub TrainSystematic 55.52 - Public Overrides Sub TrainVector 55.53 - Public Overrides Sub TrainVectorBatch 55.54 - Public Overrides Sub TrainVectorOneIteration 55.55 - Public Sub ComputeGraduateInThread 55.56 - Public Sub ComputeRMSEInThread 55.57 - Public Sub ConsoleDemo 55.58 - Public Sub New 55.59 - Public Sub Save 55.60 - Public Sub SetTrainData 55.61 - Public Sub SetWeights 55.62 - Public Sub ShowMessage 55.63 - Public Sub TestRPROP 55.64 - Public Sub TrainRPROP 56 - modMLPRPropTest.vb 56.1 - Public Sub RPropMLPXorTest 56.2 - Sub MainRPropMLP 57 - clsActivation.vb 57.1 - Public Shared Function Forward 57.2 - Public Shared Function Forward 57.3 - Public Shared Function Forward 57.4 - Public Shared Sub Backward 57.5 - Public Shared Sub Backward 57.6 - Public Shared Sub Backward 58 - clsMeanSquaredError.vb 58.1 - Public Overrides Function Forward 59 - clsMLPTensor.vb 59.1 - Private Sub AddLayerWithActivationFunction 59.2 - Private Sub ComputeErrorInternal 59.3 - Private Sub ForwardPropogateSignal 59.4 - Private Sub InitializeSequential 59.5 - Private Sub SetInputOneSample 59.6 - Private Sub SetOuput1DOneSample 59.7 - Private Sub SetOuputAllSamples 59.8 - Private Sub SetTargetOneSample 59.9 - Public Overrides Function GetActivationFunctionType 59.10 - Public Overrides Function GetMLPType$ 59.11 - Public Overrides Function ShowWeights$ 59.12 - Public Overrides Sub InitializeStruct 59.13 - Public Overrides Sub InitializeWeights 59.14 - Public Overrides Sub PrintOutput 59.15 - Public Overrides Sub Randomize 59.16 - Public Overrides Sub SetOuput1D 59.17 - Public Overrides Sub TestOneSample 59.18 - Public Overrides Sub TrainOneSample 59.19 - Public Overrides Sub TrainVector 59.20 - Public Overrides Sub TrainVectorOneIteration 59.21 - Public Sub BackwardPropagateError 59.22 - Public Sub InitializeGradient 59.23 - Public Sub SetInputAllSamples 59.24 - Public Sub SetTargetAllSamples 59.25 - Public Sub WeightInitLayerLinear 60 - clsStochasticGradientDescent.vb 60.1 - Public Function ParametersToString$ 60.2 - Public Property Parameters 60.3 - Public ReadOnly Property LearningRate! 60.4 - Public ReadOnly Property WeightAdjustment! 60.5 - Public Sub New 60.6 - Public Sub Step_ 61 - clsTensor.vb 61.1 - Matrix.MatrixLoop(Sub(i 61.2 - Private Function allChildrenGradsAccountedFor 61.3 - Private Function CheckCreators 61.4 - Private Sub AdditionTensorOperation 61.5 - Private Sub CheckArgumentsThrow 61.6 - Private Sub CheckCreatorsThrow 61.7 - Private Sub ExpandTensorOperation 61.8 - Private Sub MatrixMultiplicationTensorOperation 61.9 - Private Sub MultiplicationTensorOperation 61.10 - Private Sub NegationTensorOperation 61.11 - Private Sub SubstractionTensorOperation 61.12 - Private Sub SumatoryTensorOperation 61.13 - Private Sub TransposeTensorOperation 61.14 - Public Overrides Function ToString$ 61.15 - Public Property Childrens 61.16 - Public Property Data 61.17 - Public Property Gradient 61.18 - Public ReadOnly Property Arguments 61.19 - Public ReadOnly Property AutoGrad 61.20 - Public ReadOnly Property CreationOperation 61.21 - Public ReadOnly Property Creators 61.22 - Public ReadOnly Property Id% 61.23 - Public Shared Function Add 61.24 - Public Shared Function Expand 61.25 - Public Shared Function MatMult 61.26 - Public Shared Function Mul 61.27 - Public Shared Function Neg 61.28 - Public Shared Function Substract 61.29 - Public Shared Function Sum 61.30 - Public Shared Function Transp 61.31 - Public Sub Backward 61.32 - Public Sub New 62 - clsELULayer.vb 62.1 - Public Overrides Function Forward 62.2 - Public ReadOnly Property Center! 62.3 - Public Sub New 63 - clsHyperbolicTangentLayer.vb 63.1 - Public Overrides Function Forward 63.2 - Public ReadOnly Property Center! 63.3 - Public Sub New 64 - clsLayer.vb 64.1 - Public Overridable Function Forward 64.2 - Public Overridable Function Forward 64.3 - Public Overridable Property Parameters 64.4 - Public Sub New 65 - clsLinear.vb 65.1 - Public Overrides Function Forward 65.2 - Public Sub New 65.3 - Public Sub New 65.4 - Public Sub New 65.5 - Public Sub New 66 - clsSequential.vb 66.1 - Public Function GetParameters 66.2 - Public Function ParametersToString$ 66.3 - Public Overrides Function Forward 66.4 - Public Overrides Property Parameters 66.5 - Public ReadOnly Property Layers 66.6 - Public Sub New 66.7 - Public Sub New 67 - clsSigmoidLayer.vb 67.1 - Public Overrides Function Forward 67.2 - Public ReadOnly Property Center! 67.3 - Public Sub New 68 - modMLPTensorTest.vb 68.1 - Public Sub SimpleTest 68.2 - Public Sub TensorMLPXorTest 68.3 - Sub MainTensorMLP 69 - clsMLPTensorFlow.vb 69.1 - Private Function ConvertNDArrayToArrayOfDouble 69.2 - Private Function makeGraph 69.3 - Private Sub InitializeTensorFlow 69.4 - Private Sub InitializeTensorFlow 69.5 - Private Sub ReadWeights 69.6 - Private Sub ReadWeights 69.7 - Public Overrides Function GetActivationFunctionType 69.8 - Public Overrides Function GetMLPType$ 69.9 - Public Overrides Function ShowWeights$ 69.10 - Public Overrides Sub CloseTrainingSession 69.11 - Public Overrides Sub InitializeStruct 69.12 - Public Overrides Sub InitializeWeights 69.13 - Public Overrides Sub PrintOutput 69.14 - Public Overrides Sub Randomize 69.15 - Public Overrides Sub SetActivationFunction 69.16 - Public Overrides Sub SetOuput1D 69.17 - Public Overrides Sub TestOneSample 69.18 - Public Overrides Sub TestOneSample 69.19 - Public Overrides Sub TrainOneSample 69.20 - Public Overrides Sub TrainSemiStochastic 69.21 - Public Overrides Sub TrainStochastic 69.22 - Public Overrides Sub TrainSystematic 69.23 - Public Overrides Sub TrainVector 69.24 - Public Overrides Sub TrainVectorOneIteration 70 - modMLPTensorFlowTest.vb 70.1 - Public Sub TensorFlowMLPIrisFlowerAnalogTest 70.2 - Public Sub TensorFlowMLPXORTest 70.3 - Sub MainTensorFlowMLP 71 - modActivation.vb 71.1 - Function DoesDerivativeDependOnOriginalFunction 71.2 - Function DoesDerivativeDependOnOriginalFunction 71.3 - Function DoesDerivativeDependOnOriginalFunction 71.4 - Function DoesDerivativeDependOnOriginalFunction 71.5 - Function DoesDerivativeDependOnOriginalFunction 71.6 - Function DoesDerivativeDependOnOriginalFunction 71.7 - Function DoesDerivativeDependOnOriginalFunction 71.8 - Function DoesDerivativeDependOnOriginalFunction 71.9 - Function DoesDerivativeDependOnOriginalFunction 71.10 - Function DoesDerivativeDependOnOriginalFunction 71.11 - Function DoesDerivativeDependOnOriginalFunction 71.12 - Function IsNonLinear 71.13 - Private Function Softplus# 71.14 - Private Shared Function CommonActivation# 71.15 - Public Function Activation# 71.16 - Public Function Activation# 71.17 - Public Function Activation# 71.18 - Public Function Activation# 71.19 - Public Function Activation# 71.20 - Public Function Activation# 71.21 - Public Function Activation# 71.22 - Public Function Activation# 71.23 - Public Function Activation# 71.24 - Public Function Activation# 71.25 - Public Function Activation# 71.26 - Public Function Derivative# 71.27 - Public Function Derivative# 71.28 - Public Function Derivative# 71.29 - Public Function Derivative# 71.30 - Public Function Derivative# 71.31 - Public Function Derivative# 71.32 - Public Function Derivative# 71.33 - Public Function Derivative# 71.34 - Public Function Derivative# 71.35 - Public Function Derivative# 71.36 - Public Function Derivative# 71.37 - Public Function DerivativeFromOriginalFunction# 71.38 - Public Function DerivativeFromOriginalFunction# 71.39 - Public Function DerivativeFromOriginalFunction# 71.40 - Public Function DerivativeFromOriginalFunction# 71.41 - Public Function DerivativeFromOriginalFunction# 71.42 - Public Function DerivativeFromOriginalFunction# 71.43 - Public Function DerivativeFromOriginalFunction# 71.44 - Public Function DerivativeFromOriginalFunction# 71.45 - Public Function DerivativeFromOriginalFunction# 71.46 - Public Function DerivativeFromOriginalFunction# 71.47 - Public Function DerivativeFromOriginalFunction# 71.48 - Public Function IsNonLinear 71.49 - Public Function IsNonLinear 71.50 - Public Function IsNonLinear 71.51 - Public Function IsNonLinear 71.52 - Public Function IsNonLinear 71.53 - Public Function IsNonLinear 71.54 - Public Function IsNonLinear 71.55 - Public Function IsNonLinear 71.56 - Public Function IsNonLinear 71.57 - Public Function IsNonLinear 71.58 - Public Shared Function CommonDerivative# 71.59 - Public Shared Function CommonDerivativeFromOriginalFunction# 72 - modMLPHelper.vb 72.1 - Public Function isConsoleApp 72.2 - Public Function removeNegativeSignFromZero$ 72.3 - Public Function ReplaceCommaByDot$ 73 - modMLPTest.vb 73.1 - Public Sub Init2XOR 73.2 - Public Sub Init3XOR 73.3 - Public Sub InitIrisFlowerAnalog4Layers 73.4 - Public Sub InitIrisFlowerLogical 73.5 - Public Sub InitIrisFlowerLogical4Layers 73.6 - Public Sub InitSunspot1 73.7 - Public Sub InitSunspot2 73.8 - Public Sub InitXOR 73.9 - Public Sub MLPGenericIrisFlowerTest 73.10 - Public Sub MLPGenericIrisFlowerTestAnalog 73.11 - Public Sub MLPGenericSunspotTest 74 - modTests.vb 74.1 - Private Sub ApplicationMenu 74.2 - Private Sub IrisFlowerTestAnalog 74.3 - Private Sub IrisFlowerTestLogical 74.4 - Private Sub XORTest 74.5 - Public Sub MLPMenu 74.6 - Public Sub NextTest 74.7 - Public Sub SunspotTest 74.8 - Public Sub WaitForKeyToContinue 74.9 - Public Sub WaitForKeyToQuit 74.10 - Public Sub WaitForKeyToStart 74.11 - Sub Main AssemblyInfo.vb Imports System.Reflection Imports System.Runtime.InteropServices <Assembly: AssemblyTitle("One-Ring-to-rule-them-all")> <Assembly: AssemblyDescription( "Functional tests for Multi-Layer Perceptron implementations")> <Assembly: AssemblyCompany("ORS Production")> <Assembly: AssemblyProduct("One-Ring-to-rule-them-all")> <Assembly: AssemblyCopyright("Copyright © 2021 ORS Production")> <Assembly: AssemblyTrademark("One-Ring-to-rule-them-all")> <Assembly: ComVisible(False)> 'Le GUID suivant est pour l'ID de la typelib si ce projet est exposé à COM <Assembly: Guid("0fcfbf59-38ad-4e08-ac72-0fee3624bc83")> <Assembly: AssemblyVersion("2.0.4")> clsMatrixHP.vb ' From https://github.com/HectorPulido/Vectorized-multilayer-neural-network : C# -> VB .NET conversion ' and for Axis, AxisZero and Item (this function) from: ' https://github.com/HectorPulido/Machine-learning-Framework-Csharp : C# -> VB .NET conversion Imports System.Text ' StringBuilder Imports System.Threading.Tasks ' Parallel.For Namespace Utility ' Hector Pulido implementation's : 12 sec ' Nikos Labiris implementation's : 7.4 sec : 1.5 times faster ' MathNet implementation's : 8.2 sec #Const Implementation = 0 ' 0 : Off, 1 : On ' From https://github.com/HectorPulido/Machine-learning-Framework-Csharp Public Enum Axis horizontal vertical End Enum Public Enum AxisZero horizontal = 0 vertical = 1 none = -1 End Enum #If Implementation Then Public Class Matrix Private data#(,) #Region "Properties" ' From https://github.com/HectorPulido/Machine-learning-Framework-Csharp (double this[int i, int j]) Default Public Property Item#(r%, c%) Get Return Me.data(r, c) End Get Set(value#) Me.data(r, c) = value End Set End Property ' API should be conform to the CLS [Common Language Specification] ' Specifically, unsigned types should not be part of the class's public interface, ' as users will be forced to implement any combinations like this Public Property ItemUIntSng!(r As UInteger, c As UInteger) Get Return CSng(Me.data(CType(r, Integer), CType(c, Integer))) End Get Set(value!) Me.data(CType(r, Integer), CType(c, Integer)) = value End Set End Property Public Property matrixP As Double(,) Get Return CType(Me.data.Clone(), Double(,)) End Get Set(value As Double(,)) Me.data = value End Set End Property Public ReadOnly Property isDefined As Boolean Get Return Not IsNothing(Me.data) End Get End Property ''' <summary> ''' Rows ''' </summary> Public ReadOnly Property r% Get Return Me.data.GetLength(0) End Get End Property ''' <summary> ''' Columns ''' </summary> Public ReadOnly Property c% Get Return Me.data.GetLength(1) End Get End Property ''' <summary> ''' Transpose ''' </summary> Public ReadOnly Property T As Matrix Get Return Transpose_(Me) End Get End Property Public ReadOnly Property Size As Matrix Get Return (New Double(,) {{Me.r, Me.c}}) End Get End Property Public ReadOnly Property Abs As Matrix Get Return Abs_() End Get End Property Public ReadOnly Property Average# Get Return Average_() End Get End Property #End Region #Region "Constructors" Public Sub New() ' Constructor used by Tensor End Sub Public Sub New(rows%, columns%) Me.data = New Double(rows - 1, columns - 1) {} End Sub Public Sub New(doubleArray#(,)) Me.data = doubleArray End Sub Public Sub New(singleArray!(,)) Dim rows = singleArray.GetLength(0) Dim columns = singleArray.GetLength(1) Me.data = New Double(rows - 1, columns - 1) {} For i = 0 To rows - 1 For j = 0 To columns - 1 Me.data(i, j) = singleArray(i, j) Next Next End Sub ' From clsMatrix: https://github.com/PatriceDargenton/Matrix-MultiLayerPerceptron ''' <summary> ''' Create a matrix object from an array of Single ''' </summary> Public Shared Function FromArraySingle(inputs!()) As Matrix Dim m As New Matrix(inputs.Length, 1) For i = 0 To inputs.Length - 1 m.data(i, 0) = inputs(i) Next Return m End Function #End Region #Region "Operators" ' Implicit conversion operator #(,) -> Matrix Public Shared Widening Operator CType(doubleArray#(,)) As Matrix Return New Matrix(doubleArray) End Operator ' Implicit conversion operator Matrix -> #(,) Public Shared Widening Operator CType(matrix0 As Matrix) As Double(,) Return matrix0.matrixP End Operator ' Implicit conversion operator !(,) -> Matrix Public Shared Widening Operator CType(singleArray!(,)) As Matrix Return New Matrix(singleArray) End Operator Public Shared Operator +(m1 As Matrix, m2 As Matrix) As Matrix Dim m As Matrix = MatSum(m1, m2) Return m End Operator 'Public Shared Operator +(m2 As Matrix, m1#) As Matrix ' Dim m As Matrix = MatdoubleSum(m1, m2) ' Return m 'End Operator Public Shared Operator -(m1 As Matrix, m2 As Matrix) As Matrix Dim m As Matrix = MatSum(m1, m2, neg:=True) Return m End Operator Public Shared Operator -(m2 As Matrix, m1#) As Matrix Dim m As Matrix = MatdoubleSum(-m1, m2) Return m End Operator Public Shared Operator *(m2 As Matrix, m1#) As Matrix Dim m As Matrix = MatdoubleMult(m2, m1) Return m End Operator Public Shared Operator *(m1 As Matrix, m2 As Matrix) As Matrix If m1.r = m2.r AndAlso m1.c = m2.c Then Dim m3 As Matrix = DeltaMult(m1, m2) Return m3 End If Dim m4 As Matrix = MatMult(m1, m2) Return m4 End Operator Public Shared Operator /(m2 As Matrix, m1#) As Matrix Return MatdoubleMult(m2, 1 / m1) End Operator Public Shared Operator ^(m2 As Matrix, m1#) As Matrix Return Pow(m2, m1) End Operator #End Region #Region "Public shared operations" Public Shared Function Zeros(r%, c%) As Matrix Dim zeros0 = New Double(r - 1, c - 1) {} MatrixLoop((Sub(i, j) zeros0(i, j) = 0), r, c) Dim m As Matrix = zeros0 Return m End Function Public Shared Function Ones(r%, c%) As Matrix Dim ones0 = New Double(r - 1, c - 1) {} MatrixLoop((Sub(i, j) ones0(i, j) = 1), r, c) Dim m As Matrix = ones0 Return m End Function Public Shared Function Identy(x%) As Matrix Dim identy0 = New Double(x - 1, x - 1) {} MatrixLoop( (Sub(i, j) If i = j Then identy0(i, j) = 1 Else identy0(i, j) = 0 End If End Sub), x, x) Dim m As Matrix = identy0 Return m End Function Private Shared Function Transpose_(m As Matrix) As Matrix Dim mT = New Double(m.c - 1, m.r - 1) {} 'MatrixLoop((Sub(i, j) mT(j, i) = m.matrixP(i, j)), m.r, m.c) MatrixLoop((Sub(i, j) mT(j, i) = m.data(i, j)), m.r, m.c) Dim result As Matrix = mT Return result End Function ''' <summary> ''' Transpose and multiply this transposed matrix by m ''' </summary> Public Shared Function TransposeAndMultiply1(original As Matrix, m As Matrix) As Matrix 'Dim original_t As Matrix = Transpose(original) Dim result As Matrix = original.T * m Return result End Function ''' <summary> ''' Transpose and multiply a matrix m by this transposed one ''' </summary> Public Shared Function TransposeAndMultiply2(original As Matrix, m As Matrix) As Matrix 'Dim original_t As Matrix = Transpose(original) Dim result As Matrix = m * original.T Return result End Function ''' <summary> ''' Subtract 2 matrices (the first as an array of Single) and return a new matrix ''' </summary> Public Shared Function SubtractFromArraySingle(a_array!(), b As Matrix) As Matrix Dim a As Matrix = FromArraySingle(a_array) Dim result As Matrix = a - b Return result End Function ''' <summary> ''' Multiply matrices a and b, add matrix c, ''' and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAddAndMap( a As Matrix, b As Matrix, c As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As Matrix = MatMult(a, b) d += c d = Map(d, lambdaFct) Return d End Function ''' <summary> ''' Multiply matrices a and b, and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAndMap(a As Matrix, b As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As Matrix = MatMult(a, b) d = Map(d, lambdaFct) Return d End Function ''' <summary> ''' Apply a function to each element of the array ''' </summary> Public Shared Function Map(m As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim c As New Matrix(m.r, m.c) For i = 0 To m.r - 1 For j = 0 To m.c - 1 c.data(i, j) = lambdaFct.Invoke(m.data(i, j)) Next Next Return c End Function Public Shared Function Sumatory(m As Matrix, Optional dimension As AxisZero = AxisZero.none) As Matrix Dim output#(,) If dimension = AxisZero.none Then output = New Double(0, 0) {} ElseIf dimension = AxisZero.horizontal Then output = New Double(m.r - 1, 0) {} ElseIf dimension = AxisZero.vertical Then output = New Double(0, m.c - 1) {} Else Throw New ArgumentException("The dimension must be -1, 0 or 1") End If 'If dimension = AxisZero.none Then ' MatrixLoop((Sub(i, j) output(0, 0) += m.matrixP(i, j)), m.r, m.c) 'ElseIf dimension = AxisZero.horizontal Then ' MatrixLoop((Sub(i, j) output(i, 0) += m.matrixP(i, j)), m.r, m.c) 'ElseIf dimension = AxisZero.vertical Then ' MatrixLoop((Sub(i, j) output(0, j) += m.matrixP(i, j)), m.r, m.c) 'End If If dimension = AxisZero.none Then MatrixLoop((Sub(i, j) output(0, 0) += m.data(i, j)), m.r, m.c) ElseIf dimension = AxisZero.horizontal Then MatrixLoop((Sub(i, j) output(i, 0) += m.data(i, j)), m.r, m.c) ElseIf dimension = AxisZero.vertical Then MatrixLoop((Sub(i, j) output(0, j) += m.data(i, j)), m.r, m.c) End If Dim result As Matrix = output Return result End Function Public Shared Function Pow(m2 As Matrix, m1#) As Matrix Dim output = New Double(m2.r - 1, m2.c - 1) {} 'MatrixLoop((Sub(i, j) output(i, j) = Math.Pow(m2.matrixP(i, j), m1)), m2.r, m2.c) MatrixLoop((Sub(i, j) output(i, j) = Math.Pow(m2.data(i, j), m1)), m2.r, m2.c) Dim result As Matrix = output Return result End Function Public Shared Function Dot(m1 As Matrix, m2 As Matrix) As Matrix Return m1 * m2.T End Function Public Shared Sub MatrixLoop(e As Action(Of Integer, Integer), r%, c%) For i = 0 To r - 1 For j = 0 To c - 1 e(i, j) Next Next ' Not stable in this loop: 'Parallel.For(0, r, ' Sub(i) ' Parallel.For(0, c, ' Sub(j) ' e(i, j) ' End Sub) ' End Sub) End Sub #End Region #Region "Public operations" Public Function AddColumn(m2 As Matrix) As Matrix 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") If m2.c <> 1 OrElse m2.r <> r Then Throw New ArgumentException("Invalid dimensions") Dim newMatrix = New Double(Me.r - 1, Me.c + 1 - 1) {} 'Dim m = Me.matrixP For i = 0 To Me.r - 1 'newMatrix(i, 0) = m2.matrixP(i, 0) newMatrix(i, 0) = m2.data(i, 0) Next 'MatrixLoop((Sub(i, j) newMatrix(i, j + 1) = m(i, j)), Me.r, Me.c) MatrixLoop((Sub(i, j) newMatrix(i, j + 1) = Me.data(i, j)), Me.r, Me.c) Return newMatrix End Function 'Public Function AddRow(m2 As Matrix) As Matrix ' 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") ' If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") ' If m2.r <> 1 OrElse m2.c <> Me.c Then Throw New ArgumentException("Invalid dimensions") ' Dim newMatrix = New Double(Me.r + 1 - 1, Me.c - 1) {} ' Dim m = Me.matrixP ' For j = 0 To Me.c - 1 ' 'newMatrix(0, j) = m2.matrixP(0, j) ' newMatrix(0, j) = m2.data(0, j) ' Next ' MatrixLoop((Sub(i, j) newMatrix(i + 1, j) = m(i, j)), Me.r, Me.c) ' Return newMatrix 'End Function Public Function Sumatory(Optional dimension As AxisZero = AxisZero.none) As Matrix Return Sumatory(Me, dimension) End Function ''' <summary> ''' Cut matrix from r1, c1 to r2, c2 ''' </summary> Public Function Slice(r1%, c1%, r2%, c2%) As Matrix 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") If r1 >= r2 OrElse c1 >= c2 OrElse r1 < 0 OrElse r2 < 0 OrElse c1 < 0 OrElse c2 < 0 Then _ Throw New ArgumentException("Dimensions are not valid") Dim slice0 = New Double(r2 - r1 - 1, c2 - c1 - 1) {} For i = r1 To r2 - 1 For j = c1 To c2 - 1 'slice0(i - r1, j - c1) = Me.matrixP(i, j) slice0(i - r1, j - c1) = Me.data(i, j) Next Next Return slice0 End Function 'Public Function Slice(r%, c%) As Matrix ' Return Slice(0, 0, r, c) 'End Function 'Public Function Pow(m1#) As Matrix ' Return Pow(Me, m1) 'End Function 'Public Function Dot(m As Matrix) As Matrix ' Return Dot(Me, m) 'End Function #End Region #Region "Private operations" Private Shared Function MatdoubleSum(m1#, m2 As Matrix) As Matrix Dim a#(,) = m2 Dim b = New Double(m2.r - 1, m2.c - 1) {} MatrixLoop((Sub(i, j) b(i, j) = a(i, j) + m1), b.GetLength(0), b.GetLength(1)) Dim m As Matrix = b Return m End Function Private Shared Function MatSum(m1 As Matrix, m2 As Matrix, Optional neg As Boolean = False) As Matrix If m1.r <> m2.r OrElse m1.c <> m2.c Then _ Throw New ArgumentException("Matrix must have the same dimensions") Dim a#(,) = m1 Dim b#(,) = m2 Dim c = New Double(m1.r - 1, m1.c - 1) {} MatrixLoop( (Sub(i, j) If Not neg Then c(i, j) = a(i, j) + b(i, j) Else c(i, j) = a(i, j) - b(i, j) End If End Sub), c.GetLength(0), c.GetLength(1)) Dim m As Matrix = c Return m End Function Private Shared Function MatdoubleMult(m2 As Matrix, m1#) As Matrix Dim a#(,) = m2 Dim b = New Double(m2.r - 1, m2.c - 1) {} MatrixLoop((Sub(i, j) b(i, j) = a(i, j) * m1), b.GetLength(0), b.GetLength(1)) Dim m As Matrix = b Return m End Function Private Shared Function MatMult(m1 As Matrix, m2 As Matrix) As Matrix If m1.c <> m2.r Then _ Throw New ArgumentException("Matrix must have compatible dimensions") Dim n = m1.r Dim m = m1.c Dim p = m2.c Dim a#(,) = m1 Dim b#(,) = m2 Dim c = New Double(n - 1, p - 1) {} MatrixLoop( (Sub(i, j) Dim sum# = 0 For k = 0 To m - 1 sum += a(i, k) * b(k, j) Next c(i, j) = sum End Sub), n, p) Dim result As Matrix = c Return result End Function Private Shared Function DeltaMult(m1 As Matrix, m2 As Matrix) As Matrix If m1.r <> m2.r OrElse m1.c <> m2.c Then _ Throw New ArgumentException("Matrix must have the same dimensions") Dim output = New Double(m1.r - 1, m2.c - 1) {} 'MatrixLoop((Sub(i, j) output(i, j) = m1.matrixP(i, j) * m2.matrixP(i, j)), m1.r, m2.c) MatrixLoop((Sub(i, j) output(i, j) = m1.data(i, j) * m2.data(i, j)), m1.r, m2.c) Dim m As Matrix = output Return m End Function ''' <summary> ''' Compute absolute values of a matrix ''' </summary> Private Function Abs_() As Matrix Dim d#(,) = Me 'MatrixLoop((Sub(i, j) d(i, j) = Math.Abs(m.matrixP(i, j))), m.r, m.c) MatrixLoop((Sub(i, j) d(i, j) = Math.Abs(Me.data(i, j))), Me.r, Me.c) Dim result As Matrix = d Return result End Function ''' <summary> ''' Compute average value of the matrix ''' </summary> Private Function Average_#() Dim d# = 0 'MatrixLoop((Sub(i, j) d += m.matrixP(i, j)), m.r, m.c) MatrixLoop((Sub(i, j) d += Me.data(i, j)), Me.r, Me.c) Dim aver# = d / (Me.r * Me.c) Return aver End Function #End Region #Region "Exports" ''' <summary> ''' Override <c>ToString()</c> method to pretty-print the matrix ''' </summary> Public Overrides Function ToString$() Return ToStringWithFormat() End Function Public Function ToStringWithFormat$(Optional dec$ = format2Dec) dec = removeNegativeSignFromZero(dec) Dim sb As New StringBuilder() sb.AppendLine("{") For i = 0 To Me.r - 1 sb.Append(" {") For j = 0 To Me.c - 1 Dim strVal$ = Me.data(i, j).ToString(dec).ReplaceCommaByDot() sb.Append(strVal) If j < Me.c - 1 Then sb.Append(", ") Next sb.Append("}") If i < Me.r - 1 Then sb.Append("," & vbLf) Next sb.Append("}") Dim s$ = sb.ToString Return s End Function ''' <summary> ''' Convert whole Matrix object to an array of Single ''' </summary> Public Function ToArrayOfSingle() As Single() Dim array!() = New Single(Me.data.Length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = CSng(Me.data(i, j)) k += 1 Next Next Return array End Function ''' <summary> ''' Convert whole Matrix object to an array of Double ''' </summary> Public Function ToArray() As Double() Dim array#() = New Double(Me.data.Length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = Me.data(i, j) k += 1 Next Next Return array End Function #End Region #Region "Miscellaneous" 'Public Sub SetValue(r%, c%, value#) ' 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") ' If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") ' Me.data(r, c) = value 'End Sub Public Function GetValue#(r%, c%) 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") 'Return Me.matrixP(r, c) Return Me.data(r, c) End Function Public Function GetRow(r%) As Matrix 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") Dim row = New Double(0, Me.c - 1) {} For j = 0 To Me.c - 1 'row(0, j) = Me.matrixP(r, j) row(0, j) = Me.data(r, j) Next Return row End Function Public Function GetColumn(c%) As Matrix 'If Me.matrixP Is Nothing Then Throw New ArgumentException("Matrix can not be null") If Me.data Is Nothing Then Throw New ArgumentException("Matrix can not be null") Dim column = New Double(Me.r - 1, 0) {} For i = 0 To Me.r - 1 'column(i, 0) = Me.matrixP(i, c) column(i, 0) = Me.data(i, c) Next Return column End Function Public Shared Function Randomize(r%, c%, rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) As Matrix Dim random_ = New Double(r - 1, c - 1) {} 'MatrixLoop((Sub(i, j) random_(i, j) = rnd.NextDouble), x, y) MatrixLoop((Sub(i, j) random_(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), r, c) Dim m As Matrix = random_ Return m End Function Public Sub Randomize(rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) MatrixLoop((Sub(i, j) Me.data(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), Me.r, Me.c) End Sub ''' <summary> ''' Set 1 or 0 for each value of the matrix whether it is inferior ''' to the threshold, and return a new matrix ''' </summary> Public Function Threshold(minThreshold!) As Matrix Dim c As New Matrix(Me.r, Me.c) For i = 0 To c.r - 1 For j = 0 To c.c - 1 c.data(i, j) = CDbl(IIf(Math.Abs(Me.data(i, j)) <= minThreshold, 1.0#, 0.0#)) Next Next Return c End Function #End Region End Class #End If End Namespace clsMatrixMN.vb ' Matrix implementation using Math.Net ' https://numerics.mathdotnet.com/Matrix.html ' <package id="MathNet.Numerics" version="4.15.0" targetFramework="net472" /> Imports System.Text ' StringBuilder Namespace Utility #Const Implementation = 0 ' 0 : Off, 1 : On #If Implementation Then Public Class Matrix : Implements ICloneable Private Function IClone() As Object Implements ICloneable.Clone Dim m As Matrix = New Matrix(Me) Return m End Function Public Function Clone() As Matrix Dim m As Matrix = DirectCast(Me.IClone(), Matrix) Return m End Function Private m_matrix As MathNet.Numerics.LinearAlgebra.Matrix(Of Double) #Region "Properties" Default Public Property Item#(r%, c%) Get Return Me.m_matrix(r, c) End Get Set(value#) Me.m_matrix(r, c) = value End Set End Property ' API should be conform to the CLS [Common Language Specification] ' Specifically, unsigned types should not be part of the class's public interface, ' as users will be forced to implement any combinations like this Public Property ItemUIntSng!(r As UInteger, c As UInteger) Get Return CSng(Me.m_matrix(CType(r, Integer), CType(c, Integer))) End Get Set(value!) Me.m_matrix(CType(r, Integer), CType(c, Integer)) = value End Set End Property Public Property matrixP As Double(,) Get Return Me.m_matrix.ToArray End Get Set(doubleArray As Double(,)) Dim rows = doubleArray.GetLength(0) Dim columns = doubleArray.GetLength(1) Constructor(rows, columns) For i = 0 To rows - 1 For j = 0 To columns - 1 Me.m_matrix(i, j) = doubleArray(i, j) Next Next End Set End Property Public ReadOnly Property isDefined As Boolean Get Return Not IsNothing(Me.m_matrix) End Get End Property ''' <summary> ''' Rows ''' </summary> Public ReadOnly Property r% Get Return Me.m_matrix.RowCount End Get End Property ''' <summary> ''' Columns ''' </summary> Public ReadOnly Property c% Get Return Me.m_matrix.ColumnCount End Get End Property ''' <summary> ''' Transpose ''' </summary> Public ReadOnly Property T As Matrix Get Return Transpose_(Me) End Get End Property Public ReadOnly Property Size As Matrix Get Return (New Double(,) {{Me.r, Me.c}}) End Get End Property Public ReadOnly Property Abs As Matrix Get Return Abs_() End Get End Property Public ReadOnly Property Average# Get Return Average_() End Get End Property #End Region #Region "Constructors" Public Sub New() ' Constructor used by Tensor End Sub Public Sub New(rows%, columns%) Constructor(rows, columns) End Sub Public Sub New(doubleArray#(,)) Dim rows = doubleArray.GetLength(0) Dim columns = doubleArray.GetLength(1) Constructor(rows, columns) For i = 0 To rows - 1 For j = 0 To columns - 1 Me.m_matrix(i, j) = doubleArray(i, j) Next Next End Sub Public Sub New(matrix0!(,)) Dim rows = matrix0.GetLength(0) Dim columns = matrix0.GetLength(1) Constructor(rows, columns) For i = 0 To rows - 1 For j = 0 To columns - 1 Me.m_matrix(i, j) = matrix0(i, j) Next Next End Sub Public Sub New(m As MathNet.Numerics.LinearAlgebra.Matrix(Of Double)) Me.m_matrix = m End Sub Private Sub Constructor(rows%, columns%) Me.m_matrix = MathNet.Numerics.LinearAlgebra.Matrix(Of Double).Build.DenseIdentity(rows, columns) End Sub ''' <summary> ''' Create a matrix object from an array of Single ''' </summary> Public Shared Function FromArraySingle(inputs!()) As Matrix Dim m As New Matrix(inputs.Length, 1) For i = 0 To inputs.Length - 1 m.m_matrix(i, 0) = inputs(i) Next Return m End Function #End Region #Region "Operators" ' Implicit conversion operator #(,) -> Matrix Public Shared Widening Operator CType(doubleArray#(,)) As Matrix Return New Matrix(doubleArray) End Operator ' Implicit conversion operator Matrix -> #(,) Public Shared Widening Operator CType(matrix0 As Matrix) As Double(,) Return matrix0.matrixP End Operator ' Implicit conversion operator !(,) -> Matrix Public Shared Widening Operator CType(singleArray!(,)) As Matrix Return New Matrix(singleArray) End Operator Public Shared Operator +(m1 As Matrix, m2 As Matrix) As Matrix Dim m1plusm2 As Matrix = m1.Clone() m1plusm2.m_matrix += m2.m_matrix Return m1plusm2 End Operator Public Shared Operator -(m1 As Matrix, m2 As Matrix) As Matrix Dim m1minusm2 As Matrix = m1.Clone() m1minusm2.m_matrix -= m2.m_matrix Return m1minusm2 End Operator Public Shared Operator -(m2 As Matrix, m1#) As Matrix Dim m As Matrix = m2.Clone() m.m_matrix = m.m_matrix.Add(-m1) Return m End Operator Public Shared Operator *(m2 As Matrix, m1#) As Matrix Dim m As Matrix = m2.Clone() m.m_matrix *= m1 Return m End Operator Public Shared Operator *(m1 As Matrix, m2 As Matrix) As Matrix If m1.r = m2.r AndAlso m1.c = m2.c Then Dim m1multm2 As Matrix = m1.Clone() m1multm2.Multiply(m2) Return m1multm2 End If Return New Matrix(m1.m_matrix * m2.m_matrix) End Operator #End Region #Region "Public shared operations" Public Shared Function Zeros(r%, c%) As Matrix Dim MNMatrix = MathNet.Numerics.LinearAlgebra.Matrix(Of Double).Build.DenseIdentity(r, c) MNMatrix.Clear() Dim result As Matrix = New Matrix(MNMatrix) Return result End Function Public Shared Function Ones(r%, c%) As Matrix Dim m = Zeros(r, c) m.m_matrix += 1 Return m End Function ''' <summary> ''' Transpose a matrix ''' </summary> Private Shared Function Transpose_(m As Matrix) As Matrix Return New Matrix(m.m_matrix.Transpose()) End Function ''' <summary> ''' Transpose and multiply this transposed matrix by m ''' </summary> Public Shared Function TransposeAndMultiply1(original As Matrix, m As Matrix) As Matrix Return New Matrix(original.T.m_matrix * m.m_matrix) End Function ''' <summary> ''' Transpose and multiply a matrix m by this transposed one ''' </summary> Public Shared Function TransposeAndMultiply2(original As Matrix, m As Matrix) As Matrix Return New Matrix(m.m_matrix * original.T.m_matrix) End Function ''' <summary> ''' Subtract 2 matrices (the first as an array of Single) and return a new matrix ''' </summary> Public Shared Function SubtractFromArraySingle(a_array!(), b As Matrix) As Matrix Dim a As Matrix = FromArraySingle(a_array) Return New Matrix(a.m_matrix - b.m_matrix) End Function ''' <summary> ''' Multiply matrices a and b, add matrix c, ''' and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAddAndMap( a As Matrix, b As Matrix, c As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As New Matrix(a.m_matrix * b.m_matrix) d.m_matrix += c.m_matrix d.Map(lambdaFct) Return d End Function ''' <summary> ''' Multiply matrices a and b, and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAndMap(a As Matrix, b As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As New Matrix(a.m_matrix * b.m_matrix) d.Map(lambdaFct) Return d End Function ''' <summary> ''' Apply a function to each element of the array ''' </summary> Public Shared Function Map(m As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim c As New Matrix(m.r, m.c) For i = 0 To m.r - 1 For j = 0 To m.c - 1 c.m_matrix(i, j) = lambdaFct.Invoke(m.m_matrix(i, j)) Next Next Return c End Function Public Shared Function Sumatory(m As Matrix, Optional dimension As AxisZero = AxisZero.none) As Matrix Dim output#(,) If dimension = AxisZero.none Then output = New Double(0, 0) {} ElseIf dimension = AxisZero.horizontal Then output = New Double(m.r - 1, 0) {} ElseIf dimension = AxisZero.vertical Then output = New Double(0, m.c - 1) {} Else Throw New ArgumentException("The dimension must be -1, 0 or 1") End If If dimension = AxisZero.none Then MatrixLoop((Sub(i, j) output(0, 0) += m.m_matrix(i, j)), m.r, m.c) ElseIf dimension = AxisZero.horizontal Then MatrixLoop((Sub(i, j) output(i, 0) += m.m_matrix(i, j)), m.r, m.c) ElseIf dimension = AxisZero.vertical Then MatrixLoop((Sub(i, j) output(0, j) += m.m_matrix(i, j)), m.r, m.c) End If Dim result As Matrix = output Return result End Function Public Shared Sub MatrixLoop(e As Action(Of Integer, Integer), r%, c%) For i = 0 To r - 1 For j = 0 To c - 1 e(i, j) Next Next End Sub #End Region #Region "Public operations" Public Function AddColumn(m2 As Matrix) As Matrix If m2.c <> 1 OrElse m2.r <> Me.r Then Throw New ArgumentException("Invalid dimensions") Dim newMatrix = New Double(Me.r - 1, Me.c + 1 - 1) {} For i = 0 To Me.r - 1 newMatrix(i, 0) = m2.m_matrix(i, 0) Next MatrixLoop((Sub(i, j) newMatrix(i, j + 1) = Me.m_matrix(i, j)), r, c) Dim result As Matrix = newMatrix Return result End Function ''' <summary> ''' Apply a function to every element of the array ''' </summary> Public Sub Map(lambdaFct As Func(Of Double, Double)) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.m_matrix(i, j) = lambdaFct.Invoke(Me.m_matrix(i, j)) Next Next End Sub Public Function Sumatory(Optional dimension As AxisZero = AxisZero.none) As Matrix Return Sumatory(Me, dimension) End Function ''' <summary> ''' Cut matrix from r1, c1 to r2, c2 ''' </summary> Public Function Slice(r1%, c1%, r2%, c2%) As Matrix If r1 >= r2 OrElse c1 >= c2 OrElse r1 < 0 OrElse r2 < 0 OrElse c1 < 0 OrElse c2 < 0 Then _ Throw New ArgumentException("Dimensions are not valid") Dim slice0 = New Double(r2 - r1 - 1, c2 - c1 - 1) {} For i = r1 To r2 - 1 For j = c1 To c2 - 1 slice0(i - r1, j - c1) = Me.m_matrix(i, j) Next Next Dim m As Matrix = slice0 Return m End Function #End Region #Region "Private operations" ''' <summary> ''' Hadamard product (element-wise multiplication): ''' Multiply each element of the array with each element of the given array ''' </summary> Private Overloads Sub Multiply(m As Matrix) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.m_matrix(i, j) *= m.m_matrix(i, j) Next Next End Sub ''' <summary> ''' Compute absolute values of a matrix ''' </summary> Private Function Abs_() As Matrix Dim c As New Matrix(Me.r, Me.c) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 c.m_matrix(i, j) = Math.Abs(Me.m_matrix(i, j)) Next Next Return c End Function ''' <summary> ''' Compute average value of the matrix ''' </summary> Private Function Average_#() Dim nbElements% = Me.r * Me.c Dim sum# = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 sum += Me.m_matrix(i, j) Next Next Dim average1# = 0 If nbElements <= 1 Then average1 = sum Else average1 = sum / nbElements End If Return average1 End Function #End Region #Region "Exports" ''' <summary> ''' Override <c>ToString()</c> method to pretty-print the matrix ''' </summary> Public Overrides Function ToString$() Return ToStringWithFormat() End Function Public Function ToStringWithFormat$(Optional dec$ = format2Dec) dec = removeNegativeSignFromZero(dec) Dim sb As New StringBuilder() sb.AppendLine("{") For i = 0 To Me.r - 1 sb.Append(" {") For j = 0 To Me.c - 1 Dim strVal$ = Me.m_matrix(i, j).ToString(dec).ReplaceCommaByDot() sb.Append(strVal) If j < Me.c - 1 Then sb.Append(", ") Next sb.Append("}") If i < Me.r - 1 Then sb.Append("," & vbLf) Next sb.Append("}") Dim s$ = sb.ToString Return s End Function ''' <summary> ''' Convert whole Matrix object to an array of Single ''' </summary> Public Function ToArrayOfSingle() As Single() Dim length = Me.m_matrix.RowCount * Me.m_matrix.ColumnCount Dim array!() = New Single(length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = CSng(Me.m_matrix(i, j)) k += 1 Next Next Return array End Function ''' <summary> ''' Convert whole Matrix object to an array of Double ''' </summary> Public Function ToArray() As Double() Dim length = Me.m_matrix.RowCount * Me.m_matrix.ColumnCount Dim array#() = New Double(length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = Me.m_matrix(i, j) k += 1 Next Next Return array End Function #End Region #Region "Miscellaneous" Public Function GetValue#(r%, c%) Return Me.m_matrix(r, c) End Function Public Function GetRow(r%) As Matrix Dim row = New Double(0, Me.c - 1) {} For j = 0 To Me.c - 1 row(0, j) = Me.m_matrix(r, j) Next Return row End Function Public Function GetColumn(c%) As Matrix Dim column = New Double(Me.r - 1, 0) {} For i = 0 To Me.r - 1 column(i, 0) = Me.m_matrix(i, c) Next Return column End Function Public Shared Function Randomize(r%, c%, rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) As Matrix Dim random_ = New Double(r - 1, c - 1) {} 'MatrixLoop((Sub(i, j) random_(i, j) = rnd.NextDouble), x, y) MatrixLoop((Sub(i, j) random_(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), r, c) Dim m As Matrix = random_ Return m End Function Public Sub Randomize(rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) MatrixLoop((Sub(i, j) Me.m_matrix(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), Me.r, Me.c) End Sub ''' <summary> ''' Set 1 or 0 for each value of the matrix whether it is inferior ''' to the threshold, and return a new matrix ''' </summary> Public Function Threshold(minThreshold!) As Matrix Dim c As New Matrix(Me.r, Me.c) For i = 0 To c.r - 1 For j = 0 To c.c - 1 c.m_matrix(i, j) = CDbl(IIf(Math.Abs(Me.m_matrix(i, j)) <= minThreshold, 1.0#, 0.0#)) Next Next Return c End Function #End Region End Class #End If End Namespace clsMatrixNL.vb ' From https://github.com/nlabiris/perceptrons : C# -> VB .NET conversion Imports System.Text ' StringBuilder Imports System.Threading.Tasks ' Parallel.For Namespace Utility #Const Implementation = 1 ' 0 : Off, 1 : On #If Implementation Then Public Class Matrix : Implements ICloneable Const parallelLoop As Boolean = True Const parallelMinSize = 64 Shared rowMax%, columnMax% Private Function IClone() As Object Implements ICloneable.Clone Dim m As Matrix = New Matrix(Me) Return m End Function Public Function Clone() As Matrix Dim m As Matrix = DirectCast(Me.IClone(), Matrix) Return m End Function Private data#(,) #Region "Properties" ' From https://github.com/HectorPulido/Machine-learning-Framework-Csharp (double this[int i, int j]) Default Public Property Item#(r%, c%) Get Return Me.data(r, c) End Get Set(value#) Me.data(r, c) = value End Set End Property ' API should be conform to the CLS [Common Language Specification] ' Specifically, unsigned types should not be part of the class's public interface, ' as users will be forced to implement any combinations like this Public Property ItemUIntSng!(r As UInteger, c As UInteger) Get Return CSng(Me.data(CType(r, Integer), CType(c, Integer))) End Get Set(value!) Me.data(CType(r, Integer), CType(c, Integer)) = value End Set End Property Public Property matrixP As Double(,) Get Return CType(Me.data.Clone(), Double(,)) End Get Set(value As Double(,)) Me.data = value End Set End Property Public ReadOnly Property isDefined As Boolean Get Return Not IsNothing(Me.data) End Get End Property ''' <summary> ''' Rows ''' </summary> Public ReadOnly Property r% Get Return Me.data.GetLength(0) End Get End Property ''' <summary> ''' Columns ''' </summary> Public ReadOnly Property c% Get Return Me.data.GetLength(1) End Get End Property ''' <summary> ''' Transpose ''' </summary> Public ReadOnly Property T As Matrix Get Return Transpose_(Me) End Get End Property Public ReadOnly Property Size As Matrix Get Return (New Double(,) {{Me.r, Me.c}}) End Get End Property Public ReadOnly Property Abs As Matrix Get Return Abs_() End Get End Property Public ReadOnly Property Average# Get Return Average_() End Get End Property #End Region #Region "Constructors" Public Sub New() ' Constructor used by Tensor End Sub Public Sub New(rows%, columns%) Me.data = New Double(rows - 1, columns - 1) {} End Sub Public Sub New(doubleArray#(,)) Me.data = doubleArray End Sub Public Sub New(singleArray!(,)) Dim rows = singleArray.GetLength(0) Dim columns = singleArray.GetLength(1) ReDim Me.data(rows - 1, columns - 1) For i = 0 To rows - 1 For j = 0 To columns - 1 Me.data(i, j) = singleArray(i, j) Next Next End Sub ''' <summary> ''' Create a matrix object from an array of Single ''' </summary> Public Shared Function FromArraySingle(inputs!()) As Matrix Dim m As New Matrix(inputs.Length, 1) For i = 0 To inputs.Length - 1 m.data(i, 0) = inputs(i) Next Return m End Function #End Region #Region "Operators" ' Implicit conversion operator #(,) -> Matrix Public Shared Widening Operator CType(doubleArray#(,)) As Matrix Return New Matrix(doubleArray) End Operator ' Implicit conversion operator Matrix -> #(,) Public Shared Widening Operator CType(matrix0 As Matrix) As Double(,) 'Return matrix0.data Return matrix0.matrixP End Operator ' Implicit conversion operator !(,) -> Matrix Public Shared Widening Operator CType(singleArray!(,)) As Matrix Return New Matrix(singleArray) End Operator Public Shared Operator +(m1 As Matrix, m2 As Matrix) As Matrix Dim m1plusm2 As Matrix = m1.Clone() m1plusm2.Add(m2) Return m1plusm2 End Operator Public Shared Operator -(m1 As Matrix, m2 As Matrix) As Matrix Dim m1minusm2 As Matrix = m1.Clone() m1minusm2.Subtract(m2) Return m1minusm2 End Operator Public Shared Operator -(m2 As Matrix, m1#) As Matrix Dim m As Matrix = m2.Clone() m.Subtract(m1) Return m End Operator Public Shared Operator *(m2 As Matrix, m1#) As Matrix Dim m As Matrix = m2.Clone() m.Multiply(m1) Return m End Operator Public Shared Operator *(m1 As Matrix, m2 As Matrix) As Matrix If m1.r = m2.r AndAlso m1.c = m2.c Then Dim m1multm2 As Matrix = m1.Clone() m1multm2.Multiply(m2) Return m1multm2 End If Dim m As Matrix = Multiply(m1, m2) Return m End Operator #End Region #Region "Public shared operations" Public Shared Function Zeros(r%, c%) As Matrix Dim zeros0 = New Double(r - 1, c - 1) {} MatrixLoop((Sub(i, j) zeros0(i, j) = 0), r, c) Dim m As Matrix = zeros0 Return m End Function Public Shared Function Ones(r%, c%) As Matrix Dim ones0 = New Double(r - 1, c - 1) {} MatrixLoop((Sub(i, j) ones0(i, j) = 1), r, c) Dim m As Matrix = ones0 Return m End Function ''' <summary> ''' Transpose a matrix ''' </summary> Private Shared Function Transpose_(m As Matrix) As Matrix Dim c As New Matrix(m.c, m.r) For i = 0 To m.r - 1 For j = 0 To m.c - 1 c.data(j, i) = m.data(i, j) Next Next Return c End Function ''' <summary> ''' Transpose and multiply this transposed matrix by m ''' </summary> Public Shared Function TransposeAndMultiply1(original As Matrix, m As Matrix) As Matrix 'Dim original_t As Matrix = Transpose(original) Dim result As Matrix = Multiply(original.T, m) Return result End Function ''' <summary> ''' Transpose and multiply a matrix m by this transposed one ''' </summary> Public Shared Function TransposeAndMultiply2( original As Matrix, m As Matrix) As Matrix 'Dim original_t As Matrix = Transpose(original) Dim result As Matrix = Multiply(m, original.T) Return result End Function ''' <summary> ''' Subtract 2 matrices (the first as an array of Single) and return a new matrix ''' </summary> Public Shared Function SubtractFromArraySingle(a_array!(), b As Matrix) As Matrix Dim a As Matrix = FromArraySingle(a_array) Dim result As Matrix = Subtract(a, b) Return result End Function ''' <summary> ''' Multiply matrices a and b, add matrix c, ''' and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAddAndMap( a As Matrix, b As Matrix, c As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As Matrix = Multiply(a, b) d.Add(c) d.Map(lambdaFct) Return d End Function ''' <summary> ''' Multiply matrices a and b, and apply a function to every element of the result ''' </summary> Public Shared Function MultiplyAndMap(a As Matrix, b As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim d As Matrix = Multiply(a, b) d.Map(lambdaFct) Return d End Function ''' <summary> ''' Apply a function to each element of the array ''' </summary> Public Shared Function Map(m As Matrix, lambdaFct As Func(Of Double, Double)) As Matrix Dim c As New Matrix(m.r, m.c) For i = 0 To m.r - 1 For j = 0 To m.c - 1 c.data(i, j) = lambdaFct.Invoke(m.data(i, j)) Next Next Return c End Function Public Shared Function Sumatory(m As Matrix, Optional dimension As AxisZero = AxisZero.none) As Matrix Dim output#(,) If dimension = AxisZero.none Then output = New Double(0, 0) {} ElseIf dimension = AxisZero.horizontal Then output = New Double(m.r - 1, 0) {} ElseIf dimension = AxisZero.vertical Then output = New Double(0, m.c - 1) {} Else Throw New ArgumentException("The dimension must be -1, 0 or 1") End If If dimension = AxisZero.none Then MatrixLoop((Sub(i, j) output(0, 0) += m.data(i, j)), m.r, m.c) ElseIf dimension = AxisZero.horizontal Then MatrixLoop((Sub(i, j) output(i, 0) += m.data(i, j)), m.r, m.c) ElseIf dimension = AxisZero.vertical Then MatrixLoop((Sub(i, j) output(0, j) += m.data(i, j)), m.r, m.c) End If Dim result As Matrix = output Return result End Function Public Shared Sub MatrixLoop(e As Action(Of Integer, Integer), r%, c%) If r > rowMax Then rowMax = r : Debug.WriteLine("rowMax=" & rowMax) If c > columnMax Then columnMax = c : Debug.WriteLine("columnMax=" & columnMax) ' Parallel loop is unstable there? 'If parallelLoop AndAlso r >= parallelMinSize AndAlso c >= parallelMinSize Then ' Parallel.For(0, r - 1 + 1, ' Sub(i) ' Parallel.For(0, c - 1 + 1, ' Sub(j) ' e(i, j) ' End Sub) ' End Sub) 'ElseIf parallelLoop AndAlso r >= parallelMinSize Then ' Parallel.For(0, r - 1 + 1, ' Sub(i) ' For j = 0 To c - 1 ' e(i, j) ' Next ' End Sub) 'ElseIf parallelLoop AndAlso c >= parallelMinSize Then ' For i = 0 To r - 1 ' Dim i0 = i ' Parallel.For(0, c - 1 + 1, ' Sub(j) ' e(i0, j) ' End Sub) ' Next 'Else For i = 0 To r - 1 For j = 0 To c - 1 e(i, j) Next Next 'End If End Sub #End Region #Region "Public operations" Public Function AddColumn(m2 As Matrix) As Matrix If m2.c <> 1 OrElse m2.r <> Me.r Then Throw New ArgumentException("Invalid dimensions") Dim newMatrix = New Double(Me.r - 1, Me.c + 1 - 1) {} For i = 0 To Me.r - 1 newMatrix(i, 0) = m2.data(i, 0) Next MatrixLoop((Sub(i, j) newMatrix(i, j + 1) = Me.data(i, j)), r, c) Dim result As Matrix = newMatrix Return result End Function ''' <summary> ''' Apply a function to every element of the array ''' </summary> Public Sub Map(lambdaFct As Func(Of Double, Double)) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) = lambdaFct.Invoke(Me.data(i, j)) Next Next End Sub Public Function Sumatory(Optional dimension As AxisZero = AxisZero.none) As Matrix Return Sumatory(Me, dimension) End Function ''' <summary> ''' Cut matrix from r1, c1 to r2, c2 ''' </summary> Public Function Slice(r1%, c1%, r2%, c2%) As Matrix If r1 >= r2 OrElse c1 >= c2 OrElse r1 < 0 OrElse r2 < 0 OrElse c1 < 0 OrElse c2 < 0 Then _ Throw New ArgumentException("Dimensions are not valid") Dim slice0 = New Double(r2 - r1 - 1, c2 - c1 - 1) {} For i = r1 To r2 - 1 For j = c1 To c2 - 1 slice0(i - r1, j - c1) = Me.data(i, j) Next Next Dim m As Matrix = slice0 Return m End Function #End Region #Region "Private operations" ''' <summary> ''' Add each element of the matrices ''' </summary> Private Sub Add(m As Matrix) If Me.r <> m.r OrElse Me.c <> m.c Then _ Throw New ArgumentException("Matrix must have the same dimensions") ' 20/03/2021 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) += m.data(i, j) Next Next End Sub '''' <summary> '''' Subtract a value to each element of the array '''' </summary> Private Overloads Sub Subtract(n#) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) -= n Next Next End Sub ''' <summary> ''' Subtract each element of the matrices ''' </summary> Private Overloads Sub Subtract(m As Matrix) If Me.r <> m.r OrElse Me.c <> m.c Then _ Throw New ArgumentException("Matrix must have the same dimensions") ' 20/03/2021 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) -= m.data(i, j) Next Next End Sub ''' <summary> ''' Subtract 2 matrices and return a new matrix ''' </summary> Private Overloads Shared Function Subtract(a As Matrix, b As Matrix) As Matrix If a.r <> b.r OrElse a.c <> b.c Then _ Throw New ArgumentException("Matrix must have the same dimensions") ' 20/03/2021 Dim c As New Matrix(a.r, a.c) For i = 0 To c.r - 1 For j = 0 To c.c - 1 c.data(i, j) = a.data(i, j) - b.data(i, j) Next Next Return c End Function ''' <summary> ''' Scalar product: Multiply each element of the array with the given number ''' </summary> Private Overloads Sub Multiply(n#) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) *= n Next Next End Sub ''' <summary> ''' Hadamard product (element-wise multiplication): ''' Multiply each element of the array with each element of the given array ''' </summary> Private Overloads Sub Multiply(m As Matrix) 'If Me.c <> m.r Then _ ' Throw New Exception("Columns of matrix must match rows of matrix m") For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 Me.data(i, j) *= m.data(i, j) Next Next End Sub ''' <summary> ''' Matrix product ''' </summary> Private Overloads Shared Function Multiply(a As Matrix, b As Matrix) As Matrix If a.c <> b.r Then _ Throw New Exception("Columns of A must match rows of B") Dim ab As New Matrix(a.r, b.c) If ab.r > rowMax Then rowMax = ab.r : Debug.WriteLine("rowMax=" & rowMax) If ab.c > columnMax Then columnMax = ab.c : Debug.WriteLine("columnMax=" & columnMax) If parallelLoop AndAlso ab.r >= parallelMinSize AndAlso ab.c >= parallelMinSize Then Parallel.For(0, ab.r - 1 + 1, Sub(i) Parallel.For(0, ab.c - 1 + 1, Sub(j) Dim sum# = 0 For k = 0 To a.c - 1 sum += a.data(i, k) * b.data(k, j) Next ab.data(i, j) = sum End Sub) End Sub) ElseIf parallelLoop AndAlso ab.r >= parallelMinSize Then Parallel.For(0, ab.r - 1 + 1, Sub(i) For j = 0 To ab.c - 1 Dim sum# = 0 For k = 0 To a.c - 1 sum += a.data(i, k) * b.data(k, j) Next ab.data(i, j) = sum Next End Sub) ElseIf parallelLoop AndAlso ab.c >= parallelMinSize Then For i = 0 To ab.r - 1 Dim i0 = i Parallel.For(0, ab.c - 1 + 1, Sub(j) Dim sum# = 0 For k = 0 To a.c - 1 sum += a.data(i0, k) * b.data(k, j) Next ab.data(i0, j) = sum End Sub) Next Else For i = 0 To ab.r - 1 For j = 0 To ab.c - 1 Dim sum# = 0 For k = 0 To a.c - 1 sum += a.data(i, k) * b.data(k, j) Next ab.data(i, j) = sum Next Next End If Return ab End Function ''' <summary> ''' Compute absolute values of a matrix ''' </summary> Private Function Abs_() As Matrix Dim c As New Matrix(Me.r, Me.c) For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 c.data(i, j) = Math.Abs(Me.data(i, j)) Next Next Return c End Function ''' <summary> ''' Compute average value of the matrix ''' </summary> Private Function Average_#() Dim nbElements% = Me.r * Me.c Dim sum# = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 sum += Me.data(i, j) Next Next Dim average1# = 0 If nbElements <= 1 Then average1 = sum Else average1 = sum / nbElements End If Return average1 End Function #End Region #Region "Exports" ''' <summary> ''' Override <c>ToString()</c> method to pretty-print the matrix ''' </summary> Public Overrides Function ToString$() Return ToStringWithFormat() End Function Public Function ToStringWithFormat$(Optional dec$ = format2Dec) dec = removeNegativeSignFromZero(dec) Dim sb As New StringBuilder() sb.AppendLine("{") For i = 0 To Me.r - 1 sb.Append(" {") For j = 0 To Me.c - 1 Dim strVal$ = Me.data(i, j).ToString(dec).ReplaceCommaByDot() sb.Append(strVal) If j < Me.c - 1 Then sb.Append(", ") Next sb.Append("}") If i < Me.r - 1 Then sb.Append("," & vbLf) Next sb.Append("}") Dim s$ = sb.ToString Return s End Function ''' <summary> ''' Convert whole Matrix object to an array of Single ''' </summary> Public Function ToArrayOfSingle() As Single() Dim array!() = New Single(Me.data.Length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = CSng(Me.data(i, j)) k += 1 Next Next Return array End Function ''' <summary> ''' Convert whole Matrix object to an array of Double ''' </summary> Public Function ToArray() As Double() Dim array#() = New Double(Me.data.Length - 1) {} Dim k = 0 For i = 0 To Me.r - 1 For j = 0 To Me.c - 1 array(k) = Me.data(i, j) k += 1 Next Next Return array End Function #End Region #Region "Miscellaneous" Public Function GetValue#(r%, c%) Return Me.data(r, c) End Function Public Function GetRow(r%) As Matrix Dim row = New Double(0, Me.c - 1) {} For j = 0 To Me.c - 1 row(0, j) = Me.data(r, j) Next Return row End Function Public Function GetColumn(c%) As Matrix Dim column = New Double(Me.r - 1, 0) {} For i = 0 To Me.r - 1 column(i, 0) = Me.data(i, c) Next Return column End Function Public Shared Function Randomize(r%, c%, rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) As Matrix Dim random_ = New Double(r - 1, c - 1) {} 'MatrixLoop((Sub(i, j) random_(i, j) = rnd.NextDouble), x, y) MatrixLoop((Sub(i, j) random_(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), r, c) Dim m As Matrix = random_ Return m End Function Public Sub Randomize(rnd As Random, Optional minValue! = -0.5!, Optional maxValue! = 0.5!) MatrixLoop((Sub(i, j) Me.data(i, j) = Math.Round(rnd.NextDouble(minValue, maxValue), clsMLPGeneric.nbRoundingDigits)), Me.r, Me.c) End Sub ''' <summary> ''' Set 1 or 0 for each value of the matrix whether it is inferior ''' to the threshold, and return a new matrix ''' </summary> Public Function Threshold(minThreshold!) As Matrix Dim c As New Matrix(Me.r, Me.c) For i = 0 To c.r - 1 For j = 0 To c.c - 1 c.data(i, j) = CDbl(IIf(Math.Abs(Me.data(i, j)) <= minThreshold, 1.0#, 0.0#)) Next Next Return c End Function #End Region End Class #End If End Namespace clsMLPGeneric.vb Imports Perceptron.MLP.ActivationFunction Imports Perceptron.Utility ' Matrix Imports System.Text ' StringBuilder ''' <summary> ''' MultiLayer Perceptron (MLP) generic class ''' </summary> Public MustInherit Class clsMLPGeneric #Region "Declaration" Public Enum TBias ' Bias Type Disabled = 0 WeightAdded = 1 NeuronAdded = 2 NeuronAddedSpecial = 3 End Enum 'Private biasType As TBias = TBias.Disabled Public Overridable Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) Me.useBias = addBiasColumn 'Me.biasType = TBias.Disabled 'If Me.useBias Then Me.biasType = TBias.WeightAdded Me.layerCount = neuronCount.Length Me.neuronCount = neuronCount Me.nbInputNeurons = Me.neuronCount(0) 'Me.nbHiddenNeurons = Me.neuronCount(1) Me.nbOutputNeurons = Me.neuronCount(Me.layerCount - 1) Me.trainingAlgorithm = enumTrainingAlgorithm.Default Me.useSeriesArray = False If Me.windowsSize > 0 AndAlso Not IsNothing(Me.seriesArray) Then Me.useSeriesArray = True ReDim Me.inputArray(Me.nbLinesToLearn - 1, Me.windowsSize - 1) ReDim Me.targetArray(Me.nbLinesToLearn - 1, Me.nbOutputNeurons - 1) For i = 0 To Me.nbLinesToLearn - 1 For j = 0 To Me.windowsSize - 1 Me.inputArray(i, j) = Me.seriesArray(i + j) Next For j = 0 To Me.nbOutputNeurons - 1 Me.targetArray(i, j) = Me.seriesArray(i + Me.windowsSize + j) Next Next Dim startLine = Me.nbLinesToLearn '+ Me.windowsSize Dim endLine = startLine + Me.nbLinesToPredict + Me.windowsSize Dim length = Me.seriesArray.Count If endLine > length Then Me.nbLinesToPredict = length - startLine - Me.windowsSize ReDim Me.inputArrayTest(Me.nbLinesToPredict - 1, Me.windowsSize - 1) ReDim Me.targetArrayTest(Me.nbLinesToPredict - 1, Me.nbOutputNeurons - 1) For i = 0 To Me.nbLinesToPredict - 1 For j = 0 To Me.windowsSize - 1 Me.inputArrayTest(i, j) = Me.seriesArray(startLine + i + j) Next For j = 0 To Me.nbOutputNeurons - 1 Me.targetArrayTest(i, j) = Me.seriesArray(startLine + i + Me.windowsSize + j) Next Next End If End Sub Public MustOverride Sub InitializeWeights(layer%, weights#(,)) Protected nbInputNeurons% Protected nbOutputNeurons% ''' <summary> ''' Round random weights to reproduce functionnal tests exactly ''' </summary> Public Const nbRoundingDigits% = 2 ''' <summary> ''' Set a random value other than 0 to avoid to nullify the gradient ''' (only for RProp MLP for the moment) ''' </summary> Public Const minRandomValue! = 0.0001! ''' <summary> ''' Use Nguyen-Widrow weights initialization (only for RProp MLP for the moment) ''' </summary> Public useNguyenWidrowWeightsInitialization As Boolean = False Public Const expMax! = 50 Public Const expMax20! = 20 Public Enum enumLearningMode Defaut = Systematic ''' <summary> ''' Learn all samples in order ''' </summary> Systematic = 0 ''' <summary> ''' Learn all samples randomly ''' </summary> SemiStochastic = 1 ''' <summary> ''' Learn samples randomly ''' </summary> Stochastic = 2 ''' <summary> ''' Learn all samples in order as a vector ''' </summary> Vectorial = 3 ''' <summary> ''' Learn all samples in order as a vector for a batch of iterations ''' </summary> VectorialBatch = 4 End Enum ''' <summary> ''' Learning mode of the MLP ''' </summary> Public learningMode As enumLearningMode Public printOutput_ As Boolean = False Public printOutputMatrix As Boolean = False Public useBias As Boolean = False #Region "Series array (for example time series)" ''' <summary> ''' Series array ''' </summary> Public seriesArray!() ''' <summary> ''' Use series array (for example time series) ''' </summary> Public useSeriesArray As Boolean ''' <summary> ''' Size of the window for the series array ''' </summary> Public windowsSize% ''' <summary> ''' Number of lines to learn ''' </summary> Public nbLinesToLearn% ''' <summary> ''' Number of lines to predict ''' </summary> Public nbLinesToPredict% Public inputArrayTest!(,) Public targetArrayTest!(,) #End Region Public inputArray!(,) Public targetArray!(,) ''' <summary> ''' Output matrix ''' </summary> Public output As Matrix ''' <summary> ''' Output array 1D ''' </summary> Public lastOutputArray1DSingle!() ''' <summary> ''' Output array 1D ''' </summary> Public lastOutputArray1D#() ' 29/11/2020 Public lastErrorArray#(,) ''' <summary> ''' Last error of the output matrix ''' </summary> Protected lastError As Matrix ''' <summary> ''' Result success matrix (1: success, 0: fail) ''' </summary> Protected success As Matrix ''' <summary> ''' Number of success according to the treshold between target and output ''' </summary> Public nbSuccess% ''' <summary> ''' Percentage of success according to the number of ouputs ''' </summary> Public successPC! ''' <summary> ''' Output must be 10% close to target to be considered successful ''' </summary> Public minimalSuccessTreshold! = 0.1 ' 10% Public nbIterations%, numIteration% Public nbSamples%, numSample% Public layerCount% Public neuronCount%() ''' <summary> ''' Learning rate of the MLP (Eta coeff.) ''' </summary> Public learningRate! ''' <summary> ''' Weight adjustment of the MLP (Alpha coeff. or momentum) ''' (can be 0, but works best if 0.1 for example) ''' </summary> Public weightAdjustment! ''' <summary> ''' Set true if the objective is to classify an output (1 among N) ''' within a homogeneous group (for example, the Iris Flower logical test, ''' but not the 2XOR test, nor the 3XOR test, because the outputs are independent) ''' Then a softmax activation is used for last layer, it can speed up training ''' (only for RProp MLP for the moment) ''' </summary> Public classificationObjective As Boolean = False Public Sub Initialize(learningRate!, Optional weightAdjustment! = 0) Me.learningRate = learningRate Me.weightAdjustment = weightAdjustment End Sub ''' <summary> ''' Average error (absolute) of the output matrix (all samples) ''' </summary> Public averageError# Public averageErrorOneSample# Public averageErrorSigned# Public averageErrorOneSampleSigned# ''' <summary> ''' Random generator (Shared) ''' </summary> Public Shared rndShared As New Random ''' <summary> ''' Random generator ''' </summary> Public rnd As Random #End Region #Region "Activation function" ''' <summary> ''' Lambda function for the activation function ''' </summary> Protected lambdaFnc As Func(Of Double, Double) ''' <summary> ''' Lambda function for the derivative of the activation function ''' </summary> Protected lambdaFncD As Func(Of Double, Double) ''' <summary> ''' Lambda function for the derivative of the activation function, ''' from the original function ''' </summary> Protected lambdaFncDFOF As Func(Of Double, Double) ''' <summary> ''' Activate function of each neuron of the MLP ''' </summary> Protected activFnc As MLP.ActivationFunction.IActivationFunction Protected m_gain! Protected m_center! Protected m_actFunc As enumActivationFunction = enumActivationFunction.Undefined Public Overridable Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Normal End Function ''' <summary> ''' Set registered activation function ''' </summary> Public Overridable Sub SetActivationFunction(actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) Select Case actFnc Case enumActivationFunction.Undefined : Me.activFnc = Nothing Case enumActivationFunction.Identity : Me.activFnc = New IdentityFunction Case enumActivationFunction.Sigmoid : Me.activFnc = New SigmoidFunction Case enumActivationFunction.HyperbolicTangent : Me.activFnc = New HyperbolicTangentFunction Case enumActivationFunction.Gaussian : Me.activFnc = New GaussianFunction Case enumActivationFunction.ArcTangent : Me.activFnc = New ArcTangentFunction Case enumActivationFunction.Sinus : Me.activFnc = New SinusFunction Case enumActivationFunction.ELU : Me.activFnc = New ELUFunction Case enumActivationFunction.ReLu : Me.activFnc = New ReLuFunction Case enumActivationFunction.ReLuSigmoid : Me.activFnc = New ReLuSigmoidFunction Case enumActivationFunction.DoubleThreshold : Me.activFnc = New DoubleThresholdFunction Case enumActivationFunction.Mish : Me.activFnc = New MishFunction Case Else Me.activFnc = Nothing Throw New ArgumentException("Activation function undefined!") End Select If Not IsNothing(Me.activFnc) Then Me.lambdaFnc = Function(x#) Me.activFnc.Activation(x, gain, center) Me.lambdaFncD = Function(x#) Me.activFnc.Derivative(x, gain, center) Me.lambdaFncDFOF = Function(x#) Me.activFnc.DerivativeFromOriginalFunction(x, gain) End If m_gain = gain m_center = center m_actFunc = actFnc End Sub ''' <summary> ''' Activation function using optimised derivative: ''' </summary> Public Overridable Sub SetActivationFunctionOptimized( fctAct As enumActivationFunctionOptimized, Optional gain! = 1, Optional center! = 0) Select Case fctAct Case enumActivationFunctionOptimized.Sigmoid Me.m_actFunc = enumActivationFunction.Sigmoid Me.activFnc = New SigmoidFunction Case enumActivationFunctionOptimized.HyperbolicTangent Me.m_actFunc = enumActivationFunction.HyperbolicTangent Me.activFnc = New HyperbolicTangentFunction Case enumActivationFunctionOptimized.ELU Me.m_actFunc = enumActivationFunction.ELU Me.activFnc = New ELUFunction Case Else Me.activFnc = Nothing Me.m_actFunc = enumActivationFunction.Undefined Throw New ArgumentException("Activation function undefined!") End Select Me.lambdaFnc = Function(x#) Me.activFnc.Activation(x, gain, center) Me.lambdaFncD = Function(x#) Me.activFnc.Derivative(x, gain, center) 'Me.lambdaFncD = Function(x#) Me.activFnc.DerivativeFromOriginalFunction(x, gain) Me.lambdaFncDFOF = Function(x#) Me.activFnc.DerivativeFromOriginalFunction(x, gain) m_gain = gain m_center = center ' Optimized activation function must be expressed from its direct function: f'(x)=g(f(x)) If Not IsNothing(Me.activFnc) AndAlso Not Me.activFnc.DoesDerivativeDependOnOriginalFunction() Then _ MsgBox("Activation function must be like this form: f'(x)=g(f(x))", MsgBoxStyle.Exclamation) End Sub #End Region #Region "Training algorithm (gradient descent optimisation)" Public Enum enumTrainingAlgorithm ''' <summary> ''' Undefined ''' </summary> Undefined ''' <summary> ''' Nesterov, Y. (1983). A method for unconstrained convex minimization problem with the rate of ''' convergence O(1/k^2). Doklady ANSSSR (translated as Soviet.Math.Docl.), vol. 269, pp. 543–547. ''' </summary> NesterovMomentum ''' <summary> ''' The plain stochastic gradient descent (SGD) training algorithm, by Yann Le Cun (1986) ''' (readen in his book "Quand la machine apprend", p. 50) ''' see https://en.wikipedia.org/wiki/Stochastic_gradient_descent ''' </summary> StochasticGradientDescent ''' <summary> ''' A variant of the stochastic gradient descent algorithm with momentum (1986) ''' </summary> Momentum ''' <summary> ''' Resilient Back Propagation, by Martin Riedmiller and Heinrich Braun (1992) ''' see https://en.wikipedia.org/wiki/Rprop ''' </summary> RProp ''' <summary> ''' The AdaGrad learning method, by John Duchi, Elad Hazan and Yoram Singer (2011) ''' see http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf ''' </summary> AdaGrad ''' <summary> ''' The AdaDelta adaptive learning method, by Matthew D. Zeiler (2012) ''' see https://arxiv.org/abs/1212.5701 ''' </summary> AdaDelta ''' <summary> ''' The RMSProp learning method, by Geoffrey Hinton ("unpublished", 2012) ''' see http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf ''' (Root Mean Square Propagation: a mini-batch version of RProp, slide 29) ''' </summary> RMSProp ''' <summary> ''' The Adam learning method, by Diederik P. Kingma and Jimmy Lei Ba (2014) ''' see https://arxiv.org/pdf/1412.6980v8.pdf ''' </summary> Adam ''' <summary> ''' The AdaMax learning method, by Diederik P. Kingma and Jimmy Lei Ba (2015) ''' see section 7.1 of https://arxiv.org/pdf/1412.6980v8.pdf ''' </summary> AdaMax ''' <summary> ''' Default: Stochastic Gradient Descent (SGD) ''' </summary> [Default] = StochasticGradientDescent End Enum ''' <summary> ''' Training algorithm (gradient descent optimisation) ''' </summary> Public trainingAlgorithm As enumTrainingAlgorithm = enumTrainingAlgorithm.Default #End Region #Region "Randomize" ''' <summary> ''' Randomize weights ''' </summary> Public MustOverride Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) Public Overridable Sub RoundWeights() End Sub #End Region #Region "Error" ''' <summary> ''' Compute error of the output matrix for all samples ''' </summary> Public Overridable Sub ComputeError() ' Calculate the error: ERROR = TARGETS - OUTPUTS Dim m As Matrix = Me.targetArray Me.lastError = m - Me.output ComputeSuccess() End Sub ''' <summary> ''' Compute error of the output matrix for one sample ''' </summary> Public Overridable Sub ComputeErrorOneSample(targetArray!(,)) ' Calculate the error: ERROR = TARGETS - OUTPUTS Dim m As Matrix = targetArray Me.lastError = m - Me.output ComputeSuccess() End Sub Public Overridable Sub ComputeSuccess() Me.success = Me.lastError.Threshold(minimalSuccessTreshold) Dim sum# = Me.success.Sumatory()(0, 0) Me.nbSuccess = CInt(Math.Round(sum)) Me.successPC = CSng(Me.nbSuccess / (Me.success.r * Me.success.c)) End Sub ''' <summary> ''' Compute average error of the output matrix for all samples from last error layer ''' </summary> Public Overridable Function ComputeAverageErrorFromLastError#() ' Compute first abs then average: 'Me.averageError = Me.lastError.Abs.Average Return Me.lastError.Abs.Average ' 23/05/2021 End Function Public Overridable Function ComputeAverageSignedErrorFromLastError#() Return Me.lastError.Average End Function ''' <summary> ''' Compute average error of the output matrix for all samples ''' </summary> Public Overridable Function ComputeAverageError#() Me.ComputeError() Me.averageError = Me.ComputeAverageErrorFromLastError() Me.averageErrorSigned = Me.ComputeAverageSignedErrorFromLastError() Return Me.averageError End Function ''' <summary> ''' Compute average error of the output matrix for one sample ''' </summary> Public Overridable Function ComputeAverageErrorOneSample#(targetArray!(,)) Me.ComputeErrorOneSample(targetArray) Me.averageErrorOneSample = Me.ComputeAverageErrorFromLastError() Me.averageErrorOneSampleSigned = Me.ComputeAverageSignedErrorFromLastError() Return Me.averageErrorOneSample End Function #End Region #Region "Train" ''' <summary> ''' Train one sample (run one iteration) ''' </summary> Public MustOverride Sub TrainOneSample(input!(), target!()) ''' <summary> ''' Train all samples (run epoch for one iteration) ''' </summary> Public Sub Train(Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Dim sw As New Stopwatch sw.Start() Debug.WriteLine(Now() & " Train...") Train(Me.inputArray, Me.targetArray, Me.nbIterations, learningMode) sw.Stop() Debug.WriteLine(Now() & " Train: Done. " & sw.Elapsed.TotalSeconds.ToString("0.0") & " sec.") ' If it is not already printed, print now If Not Me.printOutput_ Then PrintSuccess(Me.nbIterations - 1) End Sub Public Sub Train(nbIterations%, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Train(Me.inputArray, Me.targetArray, nbIterations, learningMode) End Sub Public Sub Train(inputs!(,), nbIterations%, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Train(inputs, Me.targetArray, nbIterations, learningMode) End Sub Public Sub Train(inputs!(,), targets!(,), nbIterations%, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Me.numIteration = 0 Me.nbIterations = nbIterations Me.learningMode = learningMode InitializeTraining() Select Case learningMode Case enumLearningMode.Vectorial, enumLearningMode.VectorialBatch TrainSystematic(inputs, targets, learningMode) Case enumLearningMode.Systematic TrainSystematic(inputs, targets) Case enumLearningMode.SemiStochastic TrainSemiStochastic(inputs, targets) Case enumLearningMode.Stochastic TrainStochastic(inputs, targets) End Select End Sub ''' <summary> ''' Train all samples (one iteration) ''' </summary> Public Sub TrainAllSamples(inputs!(,), targets!(,)) InitializeTraining() Me.nbSamples = inputs.GetLength(0) For j = 0 To Me.nbSamples - 1 ' Systematic learning Me.numSample = j Dim inp = clsMLPHelper.GetVector(inputs, j) Dim targ = clsMLPHelper.GetVector(targets, j) TrainOneSample(inp, targ) Next End Sub ''' <summary> ''' Train samples in random order ''' </summary> Public Overridable Sub TrainStochastic(inputs!(,), targets!(,)) Me.learningMode = enumLearningMode.Stochastic InitializeTraining() Me.nbSamples = inputs.GetLength(0) Dim nbTargets = targets.GetLength(1) For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration Dim r% = rndShared.Next(maxValue:=Me.nbSamples) ' Stochastic learning Dim inp = clsMLPHelper.GetVector(inputs, r) Dim targ = clsMLPHelper.GetVector(targets, r) TrainOneSample(inp, targ) If Me.printOutput_ Then PrintOutput(iteration) Next TestAllSamples(inputs, nbTargets) End Sub ''' <summary> ''' Train all samples in random order ''' </summary> Public Overridable Sub TrainSemiStochastic(inputs!(,), targets!(,)) Me.learningMode = enumLearningMode.SemiStochastic InitializeTraining() Me.nbSamples = inputs.GetLength(0) Dim nbInputs = inputs.GetLength(1) Dim nbTargets = targets.GetLength(1) For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration ' Semi-stochastic learning Dim lstEch As New List(Of Integer) For i = 0 To Me.nbSamples - 1 lstEch.Add(i) Next For j = 0 To Me.nbSamples - 1 Me.numSample = j Dim nbItemsRemaining = lstEch.Count ' 28/05/2020 In two stages! Dim k = rndShared.Next(maxValue:=nbItemsRemaining) Dim r = lstEch(k) lstEch.RemoveAt(k) Dim inp = clsMLPHelper.GetVector(inputs, r) Dim targ = clsMLPHelper.GetVector(targets, r) TrainOneSample(inp, targ) Next j If Me.printOutput_ Then PrintOutput(iteration) Next TestAllSamples(inputs, nbTargets) End Sub ''' <summary> ''' Train all samples in order ''' </summary> Public Overridable Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Me.learningMode = learningMode InitializeTraining() Dim nbTargets = targets.GetLength(1) For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainAllSamples(inputs, targets) If Me.printOutput_ Then PrintOutput(iteration) Next TestAllSamples(inputs, nbTargets) End Sub ''' <summary> ''' Initialize some training variables (for one iteration) ''' </summary> Public Sub InitializeTraining() Me.averageErrorSigned = 0 Me.averageErrorOneSampleSigned = 0 Me.numSample = 0 End Sub ''' <summary> ''' Close the training session ''' </summary> Public Overridable Sub CloseTrainingSession() End Sub #End Region #Region "Test" ''' <summary> ''' Test one sample: Propagate the input signal into the MLP (feed forward) ''' </summary> Public MustOverride Sub TestOneSample(input!()) ''' <summary> ''' Test one sample: Propagate the input signal into the MLP and return the ouput ''' </summary> Public Overridable Sub TestOneSample(input!(), ByRef ouput!()) TestOneSample(input) ouput = Me.lastOutputArray1DSingle End Sub Public Overridable Sub TestOneSampleAndComputeError(input!(), target!()) ' 29/11/2020 TestOneSample(input) Dim targetArray2D!(0, target.GetUpperBound(0)) clsMLPHelper.Fill2DArrayOfSingle(targetArray2D, target, 0) ComputeAverageErrorOneSample(targetArray2D) End Sub ''' <summary> ''' Test all samples ''' </summary> Public Overridable Sub TestAllSamples(inputs!(,), nbOutputs%) InitializeTraining() Me.nbSamples = inputs.GetLength(0) Dim nbInputs = inputs.GetLength(1) Dim outputs!(0 To Me.nbSamples - 1, 0 To nbOutputs - 1) For i = 0 To Me.nbSamples - 1 Me.numSample = i Dim inp = clsMLPHelper.GetVector(inputs, i) TestOneSample(inp) Dim output!() = Me.lastOutputArray1DSingle For j = 0 To output.GetLength(0) - 1 outputs(i, j) = output(j) Next Next Me.output = outputs ComputeAverageError() End Sub ''' <summary> ''' Test all samples ''' </summary> Public Sub TestAllSamples(inputs!(,)) Dim nbOutputs = Me.targetArray.GetLength(1) TestAllSamples(inputs, nbOutputs) End Sub ''' <summary> ''' Test all samples ''' </summary> Public Sub TestAllSamples(inputs!(,), targets!(,)) Me.targetArray = targets Dim nbOutputs = targets.GetLength(1) TestAllSamples(inputs, nbOutputs) End Sub ''' <summary> ''' Test all samples ''' </summary> Public Sub TestAllSamples(inputs!(,), targets!(,), nbOutputs%) Me.targetArray = targets TestAllSamples(inputs, nbOutputs) End Sub #End Region #Region "Print" ''' <summary> ''' Print weights for functionnal test ''' </summary> Public Overridable Sub PrintWeights() ShowMessage(ShowWeights()) End Sub Public Overridable Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New StringBuilder sb.Append(Me.ShowParameters()) For i = 0 To Me.layerCount - 1 sb.AppendLine("Neuron count(" & i & ")=" & Me.neuronCount(i)) Next sb.AppendLine() For i = 1 To Me.layerCount - 1 sb.AppendLine("W(" & i & ")={") Dim nbNeuronsLayer = Me.neuronCount(i) Dim nbNeuronsPreviousLayer = Me.neuronCount(i - 1) For j = 0 To nbNeuronsLayer - 1 sb.Append(" {") Dim nbWeights = nbNeuronsPreviousLayer If Me.useBias Then nbWeights += 1 For k = 0 To nbWeights - 1 Dim weight = GetWeight(i, j, k) Dim sVal$ = weight.ToString(format).ReplaceCommaByDot() sb.Append(sVal) If k < nbWeights - 1 Then sb.Append(", ") Next k sb.Append("}") If j < nbNeuronsLayer - 1 Then sb.Append("," & vbCrLf) Next j sb.Append("}" & vbCrLf) If i < Me.layerCount - 1 Then sb.AppendLine() Next i Return sb.ToString() End Function Public Overridable Function GetWeight#(layer%, neuron%, weight%) Return 0.0# End Function Public Overridable Function GetWeightSingle!(layer%, neuron%, weight%) Return 0.0! End Function Public Overridable Sub SetWeight(layer%, neuron%, weight%, weightValue#) End Sub Public Overridable Sub SetWeightSingle(layer%, neuron%, weight%, weightValue!) End Sub Public Overridable Sub PrintOutput(iteration%, Optional force As Boolean = False) If force OrElse ShowThisIteration(iteration) Then Dim nbTargets = Me.targetArray.GetLength(1) TestAllSamples(Me.inputArray, nbTargets) ComputeAverageError() PrintSuccess(iteration) End If End Sub Public MustOverride Function GetMLPType$() Public Function ShowParameters$() Dim sb As New StringBuilder() sb.AppendLine("") sb.AppendLine("MLP type=" & GetMLPType()) If Me.learningMode <> enumLearningMode.Defaut Then sb.AppendLine( "learning mode=" & clsMLPHelper.ReadEnumDescription(Me.learningMode)) If Me.trainingAlgorithm <> enumTrainingAlgorithm.Default Then sb.AppendLine( "Training algorithm=" & clsMLPHelper.ReadEnumDescription(Me.trainingAlgorithm)) sb.AppendLine("layer count=" & Me.layerCount) sb.AppendLine("neuron count=" & clsMLPHelper.ArrayToString(Me.neuronCount)) sb.AppendLine("use bias=" & Me.useBias) If Me.learningRate <> 0 Then sb.AppendLine("learning rate=" & Me.learningRate) If Me.weightAdjustment <> 0 Then sb.AppendLine( "weight adjustment=" & Me.weightAdjustment) Dim afType = Me.GetActivationFunctionType() If afType <> enumActivationFunctionType.Normal Then _ sb.AppendLine("activation function type=" & clsMLPHelper.ReadEnumDescription(afType)) sb.AppendLine("activation function=" & clsMLPHelper.ReadEnumDescription(Me.m_actFunc)) sb.AppendLine("gain=" & Me.m_gain) If Me.m_center <> 0 Then sb.AppendLine("center=" & Me.m_center) If Me.classificationObjective Then _ sb.AppendLine("use softmax for last layer=True (ojective is classification)") If Me.useNguyenWidrowWeightsInitialization Then _ sb.AppendLine("use Nguyen-Widrow weights initialization=True") If Me.minimalSuccessTreshold <> 0 Then sb.AppendLine( "minimal success treshold=" & Me.minimalSuccessTreshold) If Me.useSeriesArray Then sb.AppendLine("use series array=True") sb.AppendLine("windows size=" & Me.windowsSize) sb.AppendLine("nb lines to predict=" & Me.nbLinesToPredict) sb.AppendLine("nb lines to learn=" & Me.nbLinesToLearn) sb.AppendLine("nb lines total=" & Me.seriesArray.Length) End If sb.AppendLine("iterations=" & Me.nbIterations) sb.AppendLine("") Return sb.ToString() End Function Public Sub PrintParameters() Dim sb As New StringBuilder() sb.AppendLine("") sb.AppendLine(Now() & " :") sb.Append(ShowParameters()) ShowMessage(sb.ToString()) End Sub Public Function ShowThisIteration(iteration%) As Boolean If (iteration >= Me.nbIterations - 1 OrElse iteration < 10 OrElse ((iteration + 1) Mod 10 = 0 AndAlso iteration < 100) OrElse ((iteration + 1) Mod 100 = 0 AndAlso iteration < 1000) OrElse ((iteration + 1) Mod 1000 = 0 AndAlso iteration < 10000) OrElse (iteration + 1) Mod 10000 = 0) Then Return True Return False End Function Public Sub ShowMessage(msg$) If isConsoleApp() Then Console.WriteLine(msg) Debug.WriteLine(msg) End Sub Protected Sub PrintSuccess(iteration%) Dim msg$ = vbLf & "Iteration n°" & iteration + 1 & "/" & Me.nbIterations & vbLf If Me.printOutputMatrix Then msg &= "Output: " & Me.output.ToString() & vbLf If Not IsNothing(Me.success) Then msg &= "Average error: " & Me.averageError.ToString(format6Dec) & vbLf & "Success (" & (minimalSuccessTreshold).ToString("0%") & "): " & Me.nbSuccess & "/" & Me.success.r * Me.success.c & ": " & Me.successPC.ToString("0.0%") ShowMessage(msg) End Sub Public Sub PrintSuccessPrediction() Dim msg$ = vbLf & "Prediction: " & vbLf & "Success (" & (minimalSuccessTreshold).ToString("0%") & "): " & Me.nbSuccess & "/" & Me.success.r * Me.success.c & ": " & Me.successPC.ToString("0.0%") ShowMessage(msg) End Sub #End Region End Class clsMLPGenericVec.vb Public MustInherit Class clsVectorizedMLPGeneric : Inherits clsMLPGeneric Public vectorizedLearningMode As Boolean = True Public exampleCount% Public minBatchSize% = 1 'Protected neuronCount%() ''' <summary> ''' Train all samples at once (run epoch for all iterations: all samples ordered in one vector) ''' </summary> Public MustOverride Sub TrainVector() ''' <summary> ''' Train all samples at once (run epoch for one iteration: all samples ordered in one vector) ''' </summary> Public MustOverride Sub TrainVectorOneIteration() Private m_nbIterationsBatch% = 10 Public Property nbIterationsBatch%() Get Return m_nbIterationsBatch End Get Set(value%) m_nbIterationsBatch = value End Set End Property ''' <summary> ''' Train all samples at once for a batch of iterations ''' </summary> Public Overridable Sub TrainVectorBatch(nbIterationsBatch%) ' Default implementation: call TrainVectorOneIteration() Me.learningMode = enumLearningMode.VectorialBatch Me.vectorizedLearningMode = True For iteration = 0 To nbIterationsBatch - 1 TrainVectorOneIteration() Next SetOuput1D() End Sub ''' <summary> ''' Train all samples in batch learning mode ''' </summary> Public Overridable Sub TrainVectorBatch() Me.learningMode = enumLearningMode.VectorialBatch Me.vectorizedLearningMode = True If Not Me.printOutput_ Then Dim nbIterationsBatch0 = Me.nbIterationsBatch If nbIterationsBatch0 < 1 Then nbIterationsBatch0 = 1 Dim nbIterations0 = CInt(Me.nbIterations / nbIterationsBatch0) If Me.nbIterations < nbIterationsBatch0 Then nbIterations0 = 1 Dim iteration = 0 Dim iterationTot = 0 Do While iteration < nbIterations0 If iterationTot + nbIterationsBatch0 > Me.nbIterations Then nbIterationsBatch0 = Me.nbIterations - iterationTot End If If nbIterationsBatch0 <= 0 Then Exit Do TrainVectorBatch(nbIterationsBatch0) iteration += 1 iterationTot += nbIterationsBatch0 Loop 'Debug.WriteLine(iterationTot & "/" & Me.nbIterations) Else Dim minBatchSizeFound = False Dim iteration = 0 Do While iteration < Me.nbIterations Dim nbIterationsBatch0% If iteration < 10 - 1 Then nbIterationsBatch0 = 1 ElseIf iteration < 100 - 1 Then nbIterationsBatch0 = 10 ElseIf iteration < 1000 - 1 Then nbIterationsBatch0 = 100 Else nbIterationsBatch0 = 1000 End If If nbIterationsBatch0 < Me.minBatchSize Then nbIterationsBatch0 = Me.minBatchSize minBatchSizeFound = True End If If iteration + nbIterationsBatch0 > Me.nbIterations Then nbIterationsBatch0 = Me.nbIterations - iteration End If If nbIterationsBatch0 > Me.nbIterationsBatch Then nbIterationsBatch0 = Me.nbIterationsBatch End If If nbIterationsBatch0 <= 0 Then Exit Do TrainVectorBatch(nbIterationsBatch0) If minBatchSizeFound AndAlso iteration > 0 Then PrintOutput(iteration - 1) Else PrintOutput(iteration) End If If iteration + nbIterationsBatch0 >= Me.nbIterations Then iteration = Me.nbIterations - 1 Exit Do End If iteration += nbIterationsBatch0 Loop If minBatchSizeFound OrElse Not ShowThisIteration(iteration) Then _ PrintOutput(iteration, force:=True) End If SetOuput1D() ComputeAverageError() End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) Me.learningMode = learningMode If learningMode = enumLearningMode.VectorialBatch Then TrainVectorBatch() Exit Sub End If If learningMode = enumLearningMode.Vectorial Then TrainVector() Exit Sub End If Me.vectorizedLearningMode = False Me.exampleCount = 1 MyBase.TrainSystematic(inputs, targets, learningMode) End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) Me.learningMode = enumLearningMode.Stochastic Me.vectorizedLearningMode = False Me.exampleCount = 1 MyBase.TrainStochastic(inputs, targets) End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) Me.learningMode = enumLearningMode.SemiStochastic Me.vectorizedLearningMode = False Me.exampleCount = 1 MyBase.TrainSemiStochastic(inputs, targets) End Sub Public Overridable Sub SetOuput1D() End Sub Public Overrides Sub PrintOutput(iteration%, Optional force As Boolean = False) If force OrElse ShowThisIteration(iteration) Then If Not Me.vectorizedLearningMode Then TestAllSamples(Me.inputArray) Else SetOuput1D() ComputeAverageError() End If PrintSuccess(iteration) End If End Sub End Class clsMLPHelper.vb Imports System.ComponentModel ' DescriptionAttribute Imports System.Text ' StringBuilder Public Class clsMLPHelper Public Shared Function GetVector(array2D!(,), index%) As Single() Dim length = array2D.GetLength(1) Dim vect!(0 To length - 1) For k = 0 To length - 1 vect(k) = array2D(index, k) Next Return vect End Function Public Shared Function GetColumn(array2D!(,), index%) As Single() Dim length = array2D.GetLength(0) Dim vect!(0 To length - 1) For k = 0 To length - 1 vect(k) = array2D(k, index) Next Return vect End Function Public Shared Sub Fill2DArrayOfSingle(array2D!(,), array1D!(), index%) Dim nbItems = array1D.GetLength(0) For j = 0 To nbItems - 1 array2D(index, j) = array1D(j) Next End Sub Public Shared Sub Fill2DArrayOfSingle2(array2D!(,), array1D!(), index%) Dim nbItems = array1D.GetLength(0) For j = 0 To nbItems - 1 array2D(j, index) = array1D(j) Next End Sub Public Shared Sub Fill2DArrayOfDoubleByArrayOfSingle(array2D#(,), array1D!(), index%) Dim nbItems = array1D.GetLength(0) For j = 0 To nbItems - 1 array2D(index, j) = array1D(j) Next End Sub Public Shared Sub Fill2DArrayOfDoubleByArrayOfSingle2(array2D#(,), array1D!(), index%) Dim nbItems = array2D.GetLength(1) For j = 0 To nbItems - 1 array2D(index, j) = array1D(j + index * nbItems) Next End Sub Public Shared Sub Fill2DArrayOfDoubleByArray(array2D#(,), array1D#(), index%) Dim nbItems = array2D.GetLength(1) For j = 0 To nbItems - 1 array2D(index, j) = array1D(j + index * nbItems) Next End Sub Public Shared Sub Fill2DArrayOfDouble(array2D#(,), array1D#(), index%) Dim nbItems = array2D.GetLength(1) For j = 0 To nbItems - 1 array2D(index, j) = array1D(j) Next End Sub Public Shared Function Convert2DArrayOfSingleToDouble(array2D!(,)) As Double(,) Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr#(0 To length0 - 1, 0 To length1 - 1) For i = 0 To length0 - 1 For j = 0 To length1 - 1 arr(i, j) = array2D(i, j) Next Next Return arr End Function Public Shared Function Convert1DArrayOfSingleToDouble(array1D!()) As Double() Dim length0 = array1D.GetLength(0) Dim arr#(0 To length0 - 1) For i = 0 To length0 - 1 arr(i) = array1D(i) Next Return arr End Function Public Shared Function Convert1DArrayOfDoubleToSingle(array1D#()) As Single() Dim length0 = array1D.GetLength(0) Dim arr!(0 To length0 - 1) For i = 0 To length0 - 1 arr(i) = CSng(array1D(i)) Next Return arr End Function Public Shared Function Transform2DArrayToJaggedArray(array2D#(,)) As Double()() ' Transform a 2D array into a jagged array Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr As Double()() = New Double(length0 - 1)() {} For i = 0 To length0 - 1 arr(i) = New Double() {} ReDim arr(i)(length1 - 1) For j = 0 To length1 - 1 arr(i)(j) = array2D(i, j) Next j Next i Return arr End Function Public Shared Function Transform2DArrayToJaggedArraySingle(array2D!(,)) As Single()() ' Transform a 2D array into a jagged array Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr As Single()() = New Single(length0 - 1)() {} For i = 0 To length0 - 1 arr(i) = New Single() {} ReDim arr(i)(length1 - 1) For j = 0 To length1 - 1 arr(i)(j) = array2D(i, j) Next j Next i Return arr End Function Public Shared Function Transform2DArrayDoubleToJaggedArraySingle(array2D#(,)) As Single()() ' Transform a 2D array into a jagged array Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr As Single()() = New Single(length0 - 1)() {} For i = 0 To length0 - 1 arr(i) = New Single() {} ReDim arr(i)(length1 - 1) For j = 0 To length1 - 1 arr(i)(j) = CSng(array2D(i, j)) Next j Next i Return arr End Function Public Shared Function Transform2DArrayDoubleToArraySingle(array2D#(,)) As Single() ' Transform a 2D array into an array Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr!(length0 * length1 - 1) For i = 0 To length0 - 1 For j = 0 To length1 - 1 arr(i * length1 + j) = CSng(array2D(i, j)) Next j Next i Return arr End Function Public Shared Function Transform2DArrayDoubleToArraySingle2(array2D#(,)) As Single() ' Transform a 2D array into an array Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr!(length0 * length1 - 1) For i = 0 To length1 - 1 For j = 0 To length0 - 1 arr(i * length1 + j) = CSng(array2D(j, i)) Next j Next i Return arr End Function ' Inspired from: ' https://stackoverflow.com/questions/26291609/converting-jagged-array-to-2d-array-c-sharp Public Shared Function TransformArrayTo2DArray(Of T)(ByVal source() As T, firstDim%, secondDim%) As T(,) Try Dim result = New T(firstDim - 1, secondDim - 1) {} Dim i% = 0 Do While i < firstDim Dim j% = 0 Do While j < secondDim result(i, j) = source(i * secondDim + j) j += 1 Loop i += 1 Loop Return result Catch ex As InvalidOperationException Throw New InvalidOperationException("Wrong size!") End Try End Function Public Shared Function Swap2DArray(array2D#(,)) As Double(,) Dim length0 = array2D.GetLength(0) Dim length1 = array2D.GetLength(1) Dim arr#(length1 - 1, length0 - 1) For i = 0 To length0 - 1 For j = 0 To length1 - 1 arr(j, i) = array2D(i, j) Next j Next i Return arr End Function Public Shared Function Compare(val1#, val2#, dec%) As Boolean If Double.IsNaN(val1) AndAlso Double.IsNaN(val2) Then Return True Dim delta# = Math.Abs(Math.Round(val2 - val1, dec)) If delta = 0 Then Return True Return False End Function Public Shared Function CompareArray(array2Da!(,), array2Db!(,)) As Boolean If IsNothing(array2Da) Then Return False If IsNothing(array2Db) Then Return False Dim length0a = array2Da.GetLength(0) Dim length1a = array2Da.GetLength(1) Dim length0b = array2Db.GetLength(0) Dim length1b = array2Db.GetLength(1) If length0a <> length0b Then Return False If length1a <> length1b Then Return False For i = 0 To length0a - 1 For j = 0 To length1a - 1 If array2Da(i, j) <> array2Db(i, j) Then Return False Next Next Return True End Function Public Shared Function CompareArray1D(array1Da#(), array1Db#()) As Boolean If IsNothing(array1Da) Then Return False If IsNothing(array1Db) Then Return False Dim length0a = array1Da.GetLength(0) Dim length0b = array1Db.GetLength(0) If length0a <> length0b Then Return False For i = 0 To length0a - 1 Dim va = array1Da(i) Dim vb = array1Db(i) If va <> vb Then Return False End If Next Return True End Function Public Shared Function CompareArray1DSingle(array1Da!(), array1Db!(), Optional lengthMax% = 0, Optional startingFrom% = 0) As Boolean If IsNothing(array1Da) Then Return False If IsNothing(array1Db) Then Return False Dim length0a = array1Da.GetLength(0) Dim length0b = array1Db.GetLength(0) If lengthMax > 0 AndAlso length0a > lengthMax Then length0a = lengthMax If lengthMax > 0 AndAlso length0b > lengthMax Then length0b = lengthMax If length0a <> length0b Then Return False For i = 0 To length0a - 1 Dim va = array1Da(i) Dim vb = array1Db(i + startingFrom) If va <> vb Then Return False End If Next Return True End Function Public Shared Function ReadEnumDescription$(myEnum As [Enum]) Dim fi As Reflection.FieldInfo = myEnum.GetType().GetField(myEnum.ToString()) Dim attr() As DescriptionAttribute = DirectCast( fi.GetCustomAttributes(GetType(DescriptionAttribute), False), DescriptionAttribute()) If attr.Length > 0 Then Return attr(0).Description Else Return myEnum.ToString() End If End Function Public Shared Function ArrayToString$(singleArray!()) Dim sb As New StringBuilder For i = 0 To singleArray.GetUpperBound(0) sb.Append(singleArray(i).ToString("0.00") & " ") Next Return sb.ToString End Function Public Shared Function ArrayToString$(intArray%()) Dim sb As New StringBuilder("{") Dim upB = intArray.GetUpperBound(0) For i = 0 To upB sb.Append(intArray(i)) If i < upB Then sb.Append(", ") Next sb.Append("}") Return sb.ToString End Function End Class clsRndExtension.vb ' From https://github.com/nlabiris/perceptrons : C# -> VB .NET conversion Imports System.Runtime.CompilerServices ' Extension ''' <summary> ''' Add functions to standard Random generator ''' </summary> Public Module modExt <Extension> Public Function NextDoubleGreaterThanZero#(rand As Random, minValue#, maxValue#, minAbsValue#) Retry: Dim r# = rand.NextDouble * Math.Abs(maxValue - minValue) + minValue If Math.Abs(r) < minAbsValue Then GoTo Retry Return r End Function ''' <summary> ''' Add NextDouble function to standard Random generator ''' </summary> <Extension> Public Function NextDouble#(rand As Random, minValue#, maxValue#) Return rand.NextDouble * Math.Abs(maxValue - minValue) + minValue End Function ''' <summary> ''' Add NextFloat function to standard Random generator ''' </summary> <Extension> Public Function NextFloat!(rand As Random) Return CSng(rand.NextDouble) End Function ''' <summary> ''' Add NextFloat function to standard Random generator ''' </summary> <Extension> Public Function NextFloat!(rand As Random, maxValue!) Return CSng(rand.NextDouble * maxValue) End Function ''' <summary> ''' Add NextFloat function to standard Random generator ''' </summary> <Extension> Public Function NextFloat!(rand As Random, minValue!, maxValue!) Return CSng(rand.NextDouble * Math.Abs(CDbl(maxValue - minValue))) + minValue End Function End Module mod1XOR.vb Public Module mod1XOR Public ReadOnly m_inputArrayXOR!(,) = { {1, 0}, {0, 0}, {0, 1}, {1, 1}} Public ReadOnly m_targetArrayXOR!(,) = { {1}, {0}, {1}, {0}} Public ReadOnly m_inputArrayXORAnalog!(,) = { {1, 0}, {0, 0}, {0, 1}, {1, 1}, {0.9, 0.1}, {0.1, 0.1}, {0.1, 0.9}, {0.9, 0.9}, {0.5, 0.5}} Public ReadOnly m_targetArrayXORAnalog!(,) = { {1}, {0}, {1}, {0}, {0.9}, {0.1}, {0.9}, {0.1}, {0.5}} Public ReadOnly m_inputArrayXOR90PC!(,) = { {0.9!, 0.1!}, {0.1!, 0.1!}, {0.1!, 0.9!}, {0.9!, 0.9!}} Public ReadOnly m_inputArrayXOR80PC!(,) = { {0.8!, 0.2!}, {0.2!, 0.2!}, {0.2!, 0.8!}, {0.8!, 0.8!}} Public ReadOnly m_inputArrayXOR70PC!(,) = { {0.7!, 0.3!}, {0.3!, 0.3!}, {0.3!, 0.7!}, {0.7!, 0.7!}} End Module mod2XOR.vb Public Module mod2XOR Public ReadOnly m_inputArray2XOR!(,) = { {1, 0, 1, 0}, {1, 0, 0, 0}, {1, 0, 0, 1}, {1, 0, 1, 1}, {0, 0, 1, 0}, {0, 0, 0, 0}, {0, 0, 0, 1}, {0, 0, 1, 1}, {0, 1, 1, 0}, {0, 1, 0, 0}, {0, 1, 0, 1}, {0, 1, 1, 1}, {1, 1, 1, 0}, {1, 1, 0, 0}, {1, 1, 0, 1}, {1, 1, 1, 1}} Public ReadOnly m_targetArray2XOR!(,) = { {1, 1}, {1, 0}, {1, 1}, {1, 0}, {0, 1}, {0, 0}, {0, 1}, {0, 0}, {1, 1}, {1, 0}, {1, 1}, {1, 0}, {0, 1}, {0, 0}, {0, 1}, {0, 0}} End Module mod3XOR.vb Public Module mod3XOR Public ReadOnly m_inputArray3XOR!(,) = { {1, 0, 1, 0, 1, 0}, {1, 0, 1, 0, 0, 0}, {1, 0, 1, 0, 0, 1}, {1, 0, 1, 0, 1, 1}, {1, 0, 0, 0, 1, 0}, {1, 0, 0, 0, 0, 0}, {1, 0, 0, 0, 0, 1}, {1, 0, 0, 0, 1, 1}, {1, 0, 0, 1, 1, 0}, {1, 0, 0, 1, 0, 0}, {1, 0, 0, 1, 0, 1}, {1, 0, 0, 1, 1, 1}, {1, 0, 1, 1, 1, 0}, {1, 0, 1, 1, 0, 0}, {1, 0, 1, 1, 0, 1}, {1, 0, 1, 1, 1, 1}, {0, 0, 1, 0, 1, 0}, {0, 0, 1, 0, 0, 0}, {0, 0, 1, 0, 0, 1}, {0, 0, 1, 0, 1, 1}, {0, 0, 0, 0, 1, 0}, {0, 0, 0, 0, 0, 0}, {0, 0, 0, 0, 0, 1}, {0, 0, 0, 0, 1, 1}, {0, 0, 0, 1, 1, 0}, {0, 0, 0, 1, 0, 0}, {0, 0, 0, 1, 0, 1}, {0, 0, 0, 1, 1, 1}, {0, 0, 1, 1, 1, 0}, {0, 0, 1, 1, 0, 0}, {0, 0, 1, 1, 0, 1}, {0, 0, 1, 1, 1, 1}, {0, 1, 1, 0, 1, 0}, {0, 1, 1, 0, 0, 0}, {0, 1, 1, 0, 0, 1}, {0, 1, 1, 0, 1, 1}, {0, 1, 0, 0, 1, 0}, {0, 1, 0, 0, 0, 0}, {0, 1, 0, 0, 0, 1}, {0, 1, 0, 0, 1, 1}, {0, 1, 0, 1, 1, 0}, {0, 1, 0, 1, 0, 0}, {0, 1, 0, 1, 0, 1}, {0, 1, 0, 1, 1, 1}, {0, 1, 1, 1, 1, 0}, {0, 1, 1, 1, 0, 0}, {0, 1, 1, 1, 0, 1}, {0, 1, 1, 1, 1, 1}, {1, 1, 1, 0, 1, 0}, {1, 1, 1, 0, 0, 0}, {1, 1, 1, 0, 0, 1}, {1, 1, 1, 0, 1, 1}, {1, 1, 0, 0, 1, 0}, {1, 1, 0, 0, 0, 0}, {1, 1, 0, 0, 0, 1}, {1, 1, 0, 0, 1, 1}, {1, 1, 0, 1, 1, 0}, {1, 1, 0, 1, 0, 0}, {1, 1, 0, 1, 0, 1}, {1, 1, 0, 1, 1, 1}, {1, 1, 1, 1, 1, 0}, {1, 1, 1, 1, 0, 0}, {1, 1, 1, 1, 0, 1}, {1, 1, 1, 1, 1, 1}} Public ReadOnly m_targetArray3XOR!(,) = { {1, 1, 1}, {1, 1, 0}, {1, 1, 1}, {1, 1, 0}, {1, 0, 1}, {1, 0, 0}, {1, 0, 1}, {1, 0, 0}, {1, 1, 1}, {1, 1, 0}, {1, 1, 1}, {1, 1, 0}, {1, 0, 1}, {1, 0, 0}, {1, 0, 1}, {1, 0, 0}, {0, 1, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 0}, {0, 0, 1}, {0, 0, 0}, {0, 1, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 0}, {0, 0, 1}, {0, 0, 0}, {1, 1, 1}, {1, 1, 0}, {1, 1, 1}, {1, 1, 0}, {1, 0, 1}, {1, 0, 0}, {1, 0, 1}, {1, 0, 0}, {1, 1, 1}, {1, 1, 0}, {1, 1, 1}, {1, 1, 0}, {1, 0, 1}, {1, 0, 0}, {1, 0, 1}, {1, 0, 0}, {0, 1, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 0}, {0, 0, 1}, {0, 0, 0}, {0, 1, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 0}, {0, 0, 1}, {0, 0, 0}} End Module modIrisFlowerInputCorrected.vb Public Module modIrisFlowerInput ' Iris flower data set ' https://en.wikipedia.org/wiki/Iris_flower_data_set ' http://archive.ics.uci.edu/ml/datasets/Iris ' 1. sepal length in cm ' 2. sepal width in cm ' 3. petal length in cm ' 4. petal width in cm ' 5. class: ' Analog data set: a single output neuron for all three classes: ' 0 : -- Iris Setosa ' 0.5 : -- Iris Versicolour ' 1 : -- Iris Virginica ' Logical data set: one output neuron for each class, so three output neurons: ' 100 : -- Iris Setosa ' 010 : -- Iris Versicolour ' 001 : -- Iris Virginica ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links ' Corrected: Contains two errors which are documented ' 4.9, 3.1, 1.5, 0.1 -> 4.9, 3.1, 1.5, 0.2 ' 4.9, 3.1, 1.5, 0.1 -> 4.9, 3.6, 1.4, 0.1 Public ReadOnly m_inputArrayIrisFlower!(,) = { {5.1, 3.5, 1.4, 0.2}, {4.9, 3, 1.4, 0.2}, {4.7, 3.2, 1.3, 0.2}, {4.6, 3.1, 1.5, 0.2}, {5, 3.6, 1.4, 0.2}, {5.4, 3.9, 1.7, 0.4}, {4.6, 3.4, 1.4, 0.3}, {5, 3.4, 1.5, 0.2}, {4.4, 2.9, 1.4, 0.2}, {4.9, 3.1, 1.5, 0.1}, {5.4, 3.7, 1.5, 0.2}, {4.8, 3.4, 1.6, 0.2}, {4.8, 3, 1.4, 0.1}, {4.3, 3, 1.1, 0.1}, {5.8, 4, 1.2, 0.2}, {5.7, 4.4, 1.5, 0.4}, {5.4, 3.9, 1.3, 0.4}, {5.1, 3.5, 1.4, 0.3}, {5.7, 3.8, 1.7, 0.3}, {5.1, 3.8, 1.5, 0.3}, {5.4, 3.4, 1.7, 0.2}, {5.1, 3.7, 1.5, 0.4}, {4.6, 3.6, 1, 0.2}, {5.1, 3.3, 1.7, 0.5}, {4.8, 3.4, 1.9, 0.2}, {5, 3, 1.6, 0.2}, {5, 3.4, 1.6, 0.4}, {5.2, 3.5, 1.5, 0.2}, {5.2, 3.4, 1.4, 0.2}, {4.7, 3.2, 1.6, 0.2}, {4.8, 3.1, 1.6, 0.2}, {5.4, 3.4, 1.5, 0.4}, {5.2, 4.1, 1.5, 0.1}, {5.5, 4.2, 1.4, 0.2}, {4.9, 3.1, 1.5, 0.2}, {5, 3.2, 1.2, 0.2}, {5.5, 3.5, 1.3, 0.2}, {4.9, 3.6, 1.4, 0.1}, {4.4, 3, 1.3, 0.2}, {5.1, 3.4, 1.5, 0.2}, {5, 3.5, 1.3, 0.3}, {4.5, 2.3, 1.3, 0.3}, {4.4, 3.2, 1.3, 0.2}, {5, 3.5, 1.6, 0.6}, {5.1, 3.8, 1.9, 0.4}, {4.8, 3, 1.4, 0.3}, {5.1, 3.8, 1.6, 0.2}, {4.6, 3.2, 1.4, 0.2}, {5.3, 3.7, 1.5, 0.2}, {5, 3.3, 1.4, 0.2}, {7, 3.2, 4.7, 1.4}, {6.4, 3.2, 4.5, 1.5}, {6.9, 3.1, 4.9, 1.5}, {5.5, 2.3, 4, 1.3}, {6.5, 2.8, 4.6, 1.5}, {5.7, 2.8, 4.5, 1.3}, {6.3, 3.3, 4.7, 1.6}, {4.9, 2.4, 3.3, 1}, {6.6, 2.9, 4.6, 1.3}, {5.2, 2.7, 3.9, 1.4}, {5, 2, 3.5, 1}, {5.9, 3, 4.2, 1.5}, {6, 2.2, 4, 1}, {6.1, 2.9, 4.7, 1.4}, {5.6, 2.9, 3.6, 1.3}, {6.7, 3.1, 4.4, 1.4}, {5.6, 3, 4.5, 1.5}, {5.8, 2.7, 4.1, 1}, {6.2, 2.2, 4.5, 1.5}, {5.6, 2.5, 3.9, 1.1}, {5.9, 3.2, 4.8, 1.8}, {6.1, 2.8, 4, 1.3}, {6.3, 2.5, 4.9, 1.5}, {6.1, 2.8, 4.7, 1.2}, {6.4, 2.9, 4.3, 1.3}, {6.6, 3, 4.4, 1.4}, {6.8, 2.8, 4.8, 1.4}, {6.7, 3, 5, 1.7}, {6, 2.9, 4.5, 1.5}, {5.7, 2.6, 3.5, 1}, {5.5, 2.4, 3.8, 1.1}, {5.5, 2.4, 3.7, 1}, {5.8, 2.7, 3.9, 1.2}, {6, 2.7, 5.1, 1.6}, {5.4, 3, 4.5, 1.5}, {6, 3.4, 4.5, 1.6}, {6.7, 3.1, 4.7, 1.5}, {6.3, 2.3, 4.4, 1.3}, {5.6, 3, 4.1, 1.3}, {5.5, 2.5, 4, 1.3}, {5.5, 2.6, 4.4, 1.2}, {6.1, 3, 4.6, 1.4}, {5.8, 2.6, 4, 1.2}, {5, 2.3, 3.3, 1}, {5.6, 2.7, 4.2, 1.3}, {5.7, 3, 4.2, 1.2}, {5.7, 2.9, 4.2, 1.3}, {6.2, 2.9, 4.3, 1.3}, {5.1, 2.5, 3, 1.1}, {5.7, 2.8, 4.1, 1.3}, {6.3, 3.3, 6, 2.5}, {5.8, 2.7, 5.1, 1.9}, {7.1, 3, 5.9, 2.1}, {6.3, 2.9, 5.6, 1.8}, {6.5, 3, 5.8, 2.2}, {7.6, 3, 6.6, 2.1}, {4.9, 2.5, 4.5, 1.7}, {7.3, 2.9, 6.3, 1.8}, {6.7, 2.5, 5.8, 1.8}, {7.2, 3.6, 6.1, 2.5}, {6.5, 3.2, 5.1, 2}, {6.4, 2.7, 5.3, 1.9}, {6.8, 3, 5.5, 2.1}, {5.7, 2.5, 5, 2}, {5.8, 2.8, 5.1, 2.4}, {6.4, 3.2, 5.3, 2.3}, {6.5, 3, 5.5, 1.8}, {7.7, 3.8, 6.7, 2.2}, {7.7, 2.6, 6.9, 2.3}, {6, 2.2, 5, 1.5}, {6.9, 3.2, 5.7, 2.3}, {5.6, 2.8, 4.9, 2}, {7.7, 2.8, 6.7, 2}, {6.3, 2.7, 4.9, 1.8}, {6.7, 3.3, 5.7, 2.1}, {7.2, 3.2, 6, 1.8}, {6.2, 2.8, 4.8, 1.8}, {6.1, 3, 4.9, 1.8}, {6.4, 2.8, 5.6, 2.1}, {7.2, 3, 5.8, 1.6}, {7.4, 2.8, 6.1, 1.9}, {7.9, 3.8, 6.4, 2}, {6.4, 2.8, 5.6, 2.2}, {6.3, 2.8, 5.1, 1.5}, {6.1, 2.6, 5.6, 1.4}, {7.7, 3, 6.1, 2.3}, {6.3, 3.4, 5.6, 2.4}, {6.4, 3.1, 5.5, 1.8}, {6, 3, 4.8, 1.8}, {6.9, 3.1, 5.4, 2.1}, {6.7, 3.1, 5.6, 2.4}, {6.9, 3.1, 5.1, 2.3}, {5.8, 2.7, 5.1, 1.9}, {6.8, 3.2, 5.9, 2.3}, {6.7, 3.3, 5.7, 2.5}, {6.7, 3, 5.2, 2.3}, {6.3, 2.5, 5, 1.9}, {6.5, 3, 5.2, 2}, {6.2, 3.4, 5.4, 2.3}, {5.9, 3, 5.1, 1.8}} ' Dataset split in train set and test set: ' https://github.com/vmitt/iris_dataset_prediction_using_tensorflow ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links ' Corrected: Contains two errors which are documented Public ReadOnly m_inputArrayIrisFlowerTrain!(,) = { {6.4, 2.8, 5.6, 2.2}, {5.0, 2.3, 3.3, 1.0}, {4.9, 2.5, 4.5, 1.7}, {4.9, 3.1, 1.5, 0.1}, {5.7, 3.8, 1.7, 0.3}, {4.4, 3.2, 1.3, 0.2}, {5.4, 3.4, 1.5, 0.4}, {6.9, 3.1, 5.1, 2.3}, {6.7, 3.1, 4.4, 1.4}, {5.1, 3.7, 1.5, 0.4}, {5.2, 2.7, 3.9, 1.4}, {6.9, 3.1, 4.9, 1.5}, {5.8, 4.0, 1.2, 0.2}, {5.4, 3.9, 1.7, 0.4}, {7.7, 3.8, 6.7, 2.2}, {6.3, 3.3, 4.7, 1.6}, {6.8, 3.2, 5.9, 2.3}, {7.6, 3.0, 6.6, 2.1}, {6.4, 3.2, 5.3, 2.3}, {5.7, 4.4, 1.5, 0.4}, {6.7, 3.3, 5.7, 2.1}, {6.4, 2.8, 5.6, 2.1}, {5.4, 3.9, 1.3, 0.4}, {6.1, 2.6, 5.6, 1.4}, {7.2, 3.0, 5.8, 1.6}, {5.2, 3.5, 1.5, 0.2}, {5.8, 2.6, 4.0, 1.2}, {5.9, 3.0, 5.1, 1.8}, {5.4, 3.0, 4.5, 1.5}, {6.7, 3.0, 5.0, 1.7}, {6.3, 2.3, 4.4, 1.3}, {5.1, 2.5, 3.0, 1.1}, {6.4, 3.2, 4.5, 1.5}, {6.8, 3.0, 5.5, 2.1}, {6.2, 2.8, 4.8, 1.8}, {6.9, 3.2, 5.7, 2.3}, {6.5, 3.2, 5.1, 2.0}, {5.8, 2.8, 5.1, 2.4}, {5.1, 3.8, 1.5, 0.3}, {4.8, 3.0, 1.4, 0.3}, {7.9, 3.8, 6.4, 2.0}, {5.8, 2.7, 5.1, 1.9}, {6.7, 3.0, 5.2, 2.3}, {5.1, 3.8, 1.9, 0.4}, {4.7, 3.2, 1.6, 0.2}, {6.0, 2.2, 5.0, 1.5}, {4.8, 3.4, 1.6, 0.2}, {7.7, 2.6, 6.9, 2.3}, {4.6, 3.6, 1.0, 0.2}, {7.2, 3.2, 6.0, 1.8}, {5.0, 3.3, 1.4, 0.2}, {6.6, 3.0, 4.4, 1.4}, {6.1, 2.8, 4.0, 1.3}, {5.0, 3.2, 1.2, 0.2}, {7.0, 3.2, 4.7, 1.4}, {6.0, 3.0, 4.8, 1.8}, {7.4, 2.8, 6.1, 1.9}, {5.8, 2.7, 5.1, 1.9}, {6.2, 3.4, 5.4, 2.3}, {5.0, 2.0, 3.5, 1.0}, {5.6, 2.5, 3.9, 1.1}, {6.7, 3.1, 5.6, 2.4}, {6.3, 2.5, 5.0, 1.9}, {6.4, 3.1, 5.5, 1.8}, {6.2, 2.2, 4.5, 1.5}, {7.3, 2.9, 6.3, 1.8}, {4.4, 3.0, 1.3, 0.2}, {7.2, 3.6, 6.1, 2.5}, {6.5, 3.0, 5.5, 1.8}, {5.0, 3.4, 1.5, 0.2}, {4.7, 3.2, 1.3, 0.2}, {6.6, 2.9, 4.6, 1.3}, {5.5, 3.5, 1.3, 0.2}, {7.7, 3.0, 6.1, 2.3}, {6.1, 3.0, 4.9, 1.8}, {4.9, 3.1, 1.5, 0.2}, {5.5, 2.4, 3.8, 1.1}, {5.7, 2.9, 4.2, 1.3}, {6.0, 2.9, 4.5, 1.5}, {6.4, 2.7, 5.3, 1.9}, {5.4, 3.7, 1.5, 0.2}, {6.1, 2.9, 4.7, 1.4}, {6.5, 2.8, 4.6, 1.5}, {5.6, 2.7, 4.2, 1.3}, {6.3, 3.4, 5.6, 2.4}, {4.9, 3.6, 1.4, 0.1}, {6.8, 2.8, 4.8, 1.4}, {5.7, 2.8, 4.5, 1.3}, {6.0, 2.7, 5.1, 1.6}, {5.0, 3.5, 1.3, 0.3}, {6.5, 3.0, 5.2, 2.0}, {6.1, 2.8, 4.7, 1.2}, {5.1, 3.5, 1.4, 0.3}, {4.6, 3.1, 1.5, 0.2}, {6.5, 3.0, 5.8, 2.2}, {4.6, 3.4, 1.4, 0.3}, {4.6, 3.2, 1.4, 0.2}, {7.7, 2.8, 6.7, 2.0}, {5.9, 3.2, 4.8, 1.8}, {5.1, 3.8, 1.6, 0.2}, {4.9, 3.0, 1.4, 0.2}, {4.9, 2.4, 3.3, 1.0}, {4.5, 2.3, 1.3, 0.3}, {5.8, 2.7, 4.1, 1.0}, {5.0, 3.4, 1.6, 0.4}, {5.2, 3.4, 1.4, 0.2}, {5.3, 3.7, 1.5, 0.2}, {5.0, 3.6, 1.4, 0.2}, {5.6, 2.9, 3.6, 1.3}, {4.8, 3.1, 1.6, 0.2}, {6.3, 2.7, 4.9, 1.8}, {5.7, 2.8, 4.1, 1.3}, {5.0, 3.0, 1.6, 0.2}, {6.3, 3.3, 6.0, 2.5}, {5.0, 3.5, 1.6, 0.6}, {5.5, 2.6, 4.4, 1.2}, {5.7, 3.0, 4.2, 1.2}, {4.4, 2.9, 1.4, 0.2}, {4.8, 3.0, 1.4, 0.1}, {5.5, 2.4, 3.7, 1.0}} ' Same array, no corrections here: 'Public ReadOnly m_inputArrayIrisFlowerTest!(,) = { ' {5.9, 3.0, 4.2, 1.5}, ' {6.9, 3.1, 5.4, 2.1}, ' {5.1, 3.3, 1.7, 0.5}, ' {6.0, 3.4, 4.5, 1.6}, ' {5.5, 2.5, 4.0, 1.3}, ' {6.2, 2.9, 4.3, 1.3}, ' {5.5, 4.2, 1.4, 0.2}, ' {6.3, 2.8, 5.1, 1.5}, ' {5.6, 3.0, 4.1, 1.3}, ' {6.7, 2.5, 5.8, 1.8}, ' {7.1, 3.0, 5.9, 2.1}, ' {4.3, 3.0, 1.1, 0.1}, ' {5.6, 2.8, 4.9, 2.0}, ' {5.5, 2.3, 4.0, 1.3}, ' {6.0, 2.2, 4.0, 1.0}, ' {5.1, 3.5, 1.4, 0.2}, ' {5.7, 2.6, 3.5, 1.0}, ' {4.8, 3.4, 1.9, 0.2}, ' {5.1, 3.4, 1.5, 0.2}, ' {5.7, 2.5, 5.0, 2.0}, ' {5.4, 3.4, 1.7, 0.2}, ' {5.6, 3.0, 4.5, 1.5}, ' {6.3, 2.9, 5.6, 1.8}, ' {6.3, 2.5, 4.9, 1.5}, ' {5.8, 2.7, 3.9, 1.2}, ' {6.1, 3.0, 4.6, 1.4}, ' {5.2, 4.1, 1.5, 0.1}, ' {6.7, 3.1, 4.7, 1.5}, ' {6.7, 3.3, 5.7, 2.5}, ' {6.4, 2.9, 4.3, 1.3}} End Module modIrisFlowerInputOriginal.vb Public Module modIrisFlowerInputOriginal ' Iris flower data set ' https://en.wikipedia.org/wiki/Iris_flower_data_set ' http://archive.ics.uci.edu/ml/datasets/Iris ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links ' Original: Not corrected ' Contains two errors which are documented ' 4.9, 3.1, 1.5, 0.1 -> 4.9, 3.1, 1.5, 0.2 ' 4.9, 3.1, 1.5, 0.1 -> 4.9, 3.6, 1.4, 0.1 Public ReadOnly m_inputArrayIrisFlowerOriginal!(,) = { {5.1, 3.5, 1.4, 0.2}, {4.9, 3, 1.4, 0.2}, {4.7, 3.2, 1.3, 0.2}, {4.6, 3.1, 1.5, 0.2}, {5, 3.6, 1.4, 0.2}, {5.4, 3.9, 1.7, 0.4}, {4.6, 3.4, 1.4, 0.3}, {5, 3.4, 1.5, 0.2}, {4.4, 2.9, 1.4, 0.2}, {4.9, 3.1, 1.5, 0.1}, {5.4, 3.7, 1.5, 0.2}, {4.8, 3.4, 1.6, 0.2}, {4.8, 3, 1.4, 0.1}, {4.3, 3, 1.1, 0.1}, {5.8, 4, 1.2, 0.2}, {5.7, 4.4, 1.5, 0.4}, {5.4, 3.9, 1.3, 0.4}, {5.1, 3.5, 1.4, 0.3}, {5.7, 3.8, 1.7, 0.3}, {5.1, 3.8, 1.5, 0.3}, {5.4, 3.4, 1.7, 0.2}, {5.1, 3.7, 1.5, 0.4}, {4.6, 3.6, 1, 0.2}, {5.1, 3.3, 1.7, 0.5}, {4.8, 3.4, 1.9, 0.2}, {5, 3, 1.6, 0.2}, {5, 3.4, 1.6, 0.4}, {5.2, 3.5, 1.5, 0.2}, {5.2, 3.4, 1.4, 0.2}, {4.7, 3.2, 1.6, 0.2}, {4.8, 3.1, 1.6, 0.2}, {5.4, 3.4, 1.5, 0.4}, {5.2, 4.1, 1.5, 0.1}, {5.5, 4.2, 1.4, 0.2}, {4.9, 3.1, 1.5, 0.1}, {5, 3.2, 1.2, 0.2}, {5.5, 3.5, 1.3, 0.2}, {4.9, 3.1, 1.5, 0.1}, {4.4, 3, 1.3, 0.2}, {5.1, 3.4, 1.5, 0.2}, {5, 3.5, 1.3, 0.3}, {4.5, 2.3, 1.3, 0.3}, {4.4, 3.2, 1.3, 0.2}, {5, 3.5, 1.6, 0.6}, {5.1, 3.8, 1.9, 0.4}, {4.8, 3, 1.4, 0.3}, {5.1, 3.8, 1.6, 0.2}, {4.6, 3.2, 1.4, 0.2}, {5.3, 3.7, 1.5, 0.2}, {5, 3.3, 1.4, 0.2}, {7, 3.2, 4.7, 1.4}, {6.4, 3.2, 4.5, 1.5}, {6.9, 3.1, 4.9, 1.5}, {5.5, 2.3, 4, 1.3}, {6.5, 2.8, 4.6, 1.5}, {5.7, 2.8, 4.5, 1.3}, {6.3, 3.3, 4.7, 1.6}, {4.9, 2.4, 3.3, 1}, {6.6, 2.9, 4.6, 1.3}, {5.2, 2.7, 3.9, 1.4}, {5, 2, 3.5, 1}, {5.9, 3, 4.2, 1.5}, {6, 2.2, 4, 1}, {6.1, 2.9, 4.7, 1.4}, {5.6, 2.9, 3.6, 1.3}, {6.7, 3.1, 4.4, 1.4}, {5.6, 3, 4.5, 1.5}, {5.8, 2.7, 4.1, 1}, {6.2, 2.2, 4.5, 1.5}, {5.6, 2.5, 3.9, 1.1}, {5.9, 3.2, 4.8, 1.8}, {6.1, 2.8, 4, 1.3}, {6.3, 2.5, 4.9, 1.5}, {6.1, 2.8, 4.7, 1.2}, {6.4, 2.9, 4.3, 1.3}, {6.6, 3, 4.4, 1.4}, {6.8, 2.8, 4.8, 1.4}, {6.7, 3, 5, 1.7}, {6, 2.9, 4.5, 1.5}, {5.7, 2.6, 3.5, 1}, {5.5, 2.4, 3.8, 1.1}, {5.5, 2.4, 3.7, 1}, {5.8, 2.7, 3.9, 1.2}, {6, 2.7, 5.1, 1.6}, {5.4, 3, 4.5, 1.5}, {6, 3.4, 4.5, 1.6}, {6.7, 3.1, 4.7, 1.5}, {6.3, 2.3, 4.4, 1.3}, {5.6, 3, 4.1, 1.3}, {5.5, 2.5, 4, 1.3}, {5.5, 2.6, 4.4, 1.2}, {6.1, 3, 4.6, 1.4}, {5.8, 2.6, 4, 1.2}, {5, 2.3, 3.3, 1}, {5.6, 2.7, 4.2, 1.3}, {5.7, 3, 4.2, 1.2}, {5.7, 2.9, 4.2, 1.3}, {6.2, 2.9, 4.3, 1.3}, {5.1, 2.5, 3, 1.1}, {5.7, 2.8, 4.1, 1.3}, {6.3, 3.3, 6, 2.5}, {5.8, 2.7, 5.1, 1.9}, {7.1, 3, 5.9, 2.1}, {6.3, 2.9, 5.6, 1.8}, {6.5, 3, 5.8, 2.2}, {7.6, 3, 6.6, 2.1}, {4.9, 2.5, 4.5, 1.7}, {7.3, 2.9, 6.3, 1.8}, {6.7, 2.5, 5.8, 1.8}, {7.2, 3.6, 6.1, 2.5}, {6.5, 3.2, 5.1, 2}, {6.4, 2.7, 5.3, 1.9}, {6.8, 3, 5.5, 2.1}, {5.7, 2.5, 5, 2}, {5.8, 2.8, 5.1, 2.4}, {6.4, 3.2, 5.3, 2.3}, {6.5, 3, 5.5, 1.8}, {7.7, 3.8, 6.7, 2.2}, {7.7, 2.6, 6.9, 2.3}, {6, 2.2, 5, 1.5}, {6.9, 3.2, 5.7, 2.3}, {5.6, 2.8, 4.9, 2}, {7.7, 2.8, 6.7, 2}, {6.3, 2.7, 4.9, 1.8}, {6.7, 3.3, 5.7, 2.1}, {7.2, 3.2, 6, 1.8}, {6.2, 2.8, 4.8, 1.8}, {6.1, 3, 4.9, 1.8}, {6.4, 2.8, 5.6, 2.1}, {7.2, 3, 5.8, 1.6}, {7.4, 2.8, 6.1, 1.9}, {7.9, 3.8, 6.4, 2}, {6.4, 2.8, 5.6, 2.2}, {6.3, 2.8, 5.1, 1.5}, {6.1, 2.6, 5.6, 1.4}, {7.7, 3, 6.1, 2.3}, {6.3, 3.4, 5.6, 2.4}, {6.4, 3.1, 5.5, 1.8}, {6, 3, 4.8, 1.8}, {6.9, 3.1, 5.4, 2.1}, {6.7, 3.1, 5.6, 2.4}, {6.9, 3.1, 5.1, 2.3}, {5.8, 2.7, 5.1, 1.9}, {6.8, 3.2, 5.9, 2.3}, {6.7, 3.3, 5.7, 2.5}, {6.7, 3, 5.2, 2.3}, {6.3, 2.5, 5, 1.9}, {6.5, 3, 5.2, 2}, {6.2, 3.4, 5.4, 2.3}, {5.9, 3, 5.1, 1.8}} ' Dataset split in train set and test set: ' https://github.com/vmitt/iris_dataset_prediction_using_tensorflow ' Original: Not corrected Public ReadOnly m_inputArrayIrisFlowerTrainOriginal!(,) = { {6.4, 2.8, 5.6, 2.2}, {5.0, 2.3, 3.3, 1.0}, {4.9, 2.5, 4.5, 1.7}, {4.9, 3.1, 1.5, 0.1}, {5.7, 3.8, 1.7, 0.3}, {4.4, 3.2, 1.3, 0.2}, {5.4, 3.4, 1.5, 0.4}, {6.9, 3.1, 5.1, 2.3}, {6.7, 3.1, 4.4, 1.4}, {5.1, 3.7, 1.5, 0.4}, {5.2, 2.7, 3.9, 1.4}, {6.9, 3.1, 4.9, 1.5}, {5.8, 4.0, 1.2, 0.2}, {5.4, 3.9, 1.7, 0.4}, {7.7, 3.8, 6.7, 2.2}, {6.3, 3.3, 4.7, 1.6}, {6.8, 3.2, 5.9, 2.3}, {7.6, 3.0, 6.6, 2.1}, {6.4, 3.2, 5.3, 2.3}, {5.7, 4.4, 1.5, 0.4}, {6.7, 3.3, 5.7, 2.1}, {6.4, 2.8, 5.6, 2.1}, {5.4, 3.9, 1.3, 0.4}, {6.1, 2.6, 5.6, 1.4}, {7.2, 3.0, 5.8, 1.6}, {5.2, 3.5, 1.5, 0.2}, {5.8, 2.6, 4.0, 1.2}, {5.9, 3.0, 5.1, 1.8}, {5.4, 3.0, 4.5, 1.5}, {6.7, 3.0, 5.0, 1.7}, {6.3, 2.3, 4.4, 1.3}, {5.1, 2.5, 3.0, 1.1}, {6.4, 3.2, 4.5, 1.5}, {6.8, 3.0, 5.5, 2.1}, {6.2, 2.8, 4.8, 1.8}, {6.9, 3.2, 5.7, 2.3}, {6.5, 3.2, 5.1, 2.0}, {5.8, 2.8, 5.1, 2.4}, {5.1, 3.8, 1.5, 0.3}, {4.8, 3.0, 1.4, 0.3}, {7.9, 3.8, 6.4, 2.0}, {5.8, 2.7, 5.1, 1.9}, {6.7, 3.0, 5.2, 2.3}, {5.1, 3.8, 1.9, 0.4}, {4.7, 3.2, 1.6, 0.2}, {6.0, 2.2, 5.0, 1.5}, {4.8, 3.4, 1.6, 0.2}, {7.7, 2.6, 6.9, 2.3}, {4.6, 3.6, 1.0, 0.2}, {7.2, 3.2, 6.0, 1.8}, {5.0, 3.3, 1.4, 0.2}, {6.6, 3.0, 4.4, 1.4}, {6.1, 2.8, 4.0, 1.3}, {5.0, 3.2, 1.2, 0.2}, {7.0, 3.2, 4.7, 1.4}, {6.0, 3.0, 4.8, 1.8}, {7.4, 2.8, 6.1, 1.9}, {5.8, 2.7, 5.1, 1.9}, {6.2, 3.4, 5.4, 2.3}, {5.0, 2.0, 3.5, 1.0}, {5.6, 2.5, 3.9, 1.1}, {6.7, 3.1, 5.6, 2.4}, {6.3, 2.5, 5.0, 1.9}, {6.4, 3.1, 5.5, 1.8}, {6.2, 2.2, 4.5, 1.5}, {7.3, 2.9, 6.3, 1.8}, {4.4, 3.0, 1.3, 0.2}, {7.2, 3.6, 6.1, 2.5}, {6.5, 3.0, 5.5, 1.8}, {5.0, 3.4, 1.5, 0.2}, {4.7, 3.2, 1.3, 0.2}, {6.6, 2.9, 4.6, 1.3}, {5.5, 3.5, 1.3, 0.2}, {7.7, 3.0, 6.1, 2.3}, {6.1, 3.0, 4.9, 1.8}, {4.9, 3.1, 1.5, 0.1}, {5.5, 2.4, 3.8, 1.1}, {5.7, 2.9, 4.2, 1.3}, {6.0, 2.9, 4.5, 1.5}, {6.4, 2.7, 5.3, 1.9}, {5.4, 3.7, 1.5, 0.2}, {6.1, 2.9, 4.7, 1.4}, {6.5, 2.8, 4.6, 1.5}, {5.6, 2.7, 4.2, 1.3}, {6.3, 3.4, 5.6, 2.4}, {4.9, 3.1, 1.5, 0.1}, {6.8, 2.8, 4.8, 1.4}, {5.7, 2.8, 4.5, 1.3}, {6.0, 2.7, 5.1, 1.6}, {5.0, 3.5, 1.3, 0.3}, {6.5, 3.0, 5.2, 2.0}, {6.1, 2.8, 4.7, 1.2}, {5.1, 3.5, 1.4, 0.3}, {4.6, 3.1, 1.5, 0.2}, {6.5, 3.0, 5.8, 2.2}, {4.6, 3.4, 1.4, 0.3}, {4.6, 3.2, 1.4, 0.2}, {7.7, 2.8, 6.7, 2.0}, {5.9, 3.2, 4.8, 1.8}, {5.1, 3.8, 1.6, 0.2}, {4.9, 3.0, 1.4, 0.2}, {4.9, 2.4, 3.3, 1.0}, {4.5, 2.3, 1.3, 0.3}, {5.8, 2.7, 4.1, 1.0}, {5.0, 3.4, 1.6, 0.4}, {5.2, 3.4, 1.4, 0.2}, {5.3, 3.7, 1.5, 0.2}, {5.0, 3.6, 1.4, 0.2}, {5.6, 2.9, 3.6, 1.3}, {4.8, 3.1, 1.6, 0.2}, {6.3, 2.7, 4.9, 1.8}, {5.7, 2.8, 4.1, 1.3}, {5.0, 3.0, 1.6, 0.2}, {6.3, 3.3, 6.0, 2.5}, {5.0, 3.5, 1.6, 0.6}, {5.5, 2.6, 4.4, 1.2}, {5.7, 3.0, 4.2, 1.2}, {4.4, 2.9, 1.4, 0.2}, {4.8, 3.0, 1.4, 0.1}, {5.5, 2.4, 3.7, 1.0}} Public ReadOnly m_inputArrayIrisFlowerTest!(,) = { {5.9, 3.0, 4.2, 1.5}, {6.9, 3.1, 5.4, 2.1}, {5.1, 3.3, 1.7, 0.5}, {6.0, 3.4, 4.5, 1.6}, {5.5, 2.5, 4.0, 1.3}, {6.2, 2.9, 4.3, 1.3}, {5.5, 4.2, 1.4, 0.2}, {6.3, 2.8, 5.1, 1.5}, {5.6, 3.0, 4.1, 1.3}, {6.7, 2.5, 5.8, 1.8}, {7.1, 3.0, 5.9, 2.1}, {4.3, 3.0, 1.1, 0.1}, {5.6, 2.8, 4.9, 2.0}, {5.5, 2.3, 4.0, 1.3}, {6.0, 2.2, 4.0, 1.0}, {5.1, 3.5, 1.4, 0.2}, {5.7, 2.6, 3.5, 1.0}, {4.8, 3.4, 1.9, 0.2}, {5.1, 3.4, 1.5, 0.2}, {5.7, 2.5, 5.0, 2.0}, {5.4, 3.4, 1.7, 0.2}, {5.6, 3.0, 4.5, 1.5}, {6.3, 2.9, 5.6, 1.8}, {6.3, 2.5, 4.9, 1.5}, {5.8, 2.7, 3.9, 1.2}, {6.1, 3.0, 4.6, 1.4}, {5.2, 4.1, 1.5, 0.1}, {6.7, 3.1, 4.7, 1.5}, {6.7, 3.3, 5.7, 2.5}, {6.4, 2.9, 4.3, 1.3}} End Module modIrisFlowerTargetAnalog.vb Public Module modIrisFlowerTargetAnalog ' Iris flower data set ' https://en.wikipedia.org/wiki/Iris_flower_data_set ' http://archive.ics.uci.edu/ml/datasets/Iris ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links Public ReadOnly m_targetArrayIrisFlowerAnalog!(,) = { {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}} ' Dataset split in train set and test set: ' https://github.com/vmitt/iris_dataset_prediction_using_tensorflow Public ReadOnly m_targetArrayIrisFlowerAnalogTrain!(,) = { {1}, {0.5}, {1}, {0}, {0}, {0}, {0}, {1}, {0.5}, {0}, {0.5}, {0.5}, {0}, {0}, {1}, {0.5}, {1}, {1}, {1}, {0}, {1}, {1}, {0}, {1}, {1}, {0}, {0.5}, {1}, {0.5}, {0.5}, {0.5}, {0.5}, {0.5}, {1}, {1}, {1}, {1}, {1}, {0}, {0}, {1}, {1}, {1}, {0}, {0}, {1}, {0}, {1}, {0}, {1}, {0}, {0.5}, {0.5}, {0}, {0.5}, {1}, {1}, {1}, {1}, {0.5}, {0.5}, {1}, {1}, {1}, {0.5}, {1}, {0}, {1}, {1}, {0}, {0}, {0.5}, {0}, {1}, {1}, {0}, {0.5}, {0.5}, {0.5}, {1}, {0}, {0.5}, {0.5}, {0.5}, {1}, {0}, {0.5}, {0.5}, {0.5}, {0}, {1}, {0.5}, {0}, {0}, {1}, {0}, {0}, {1}, {0.5}, {0}, {0}, {0.5}, {0}, {0.5}, {0}, {0}, {0}, {0}, {0.5}, {0}, {1}, {0.5}, {0}, {1}, {0}, {0.5}, {0.5}, {0}, {0}, {0.5}} Public ReadOnly m_targetArrayIrisFlowerAnalogTest!(,) = { {0.5}, {1}, {0}, {0.5}, {0.5}, {0.5}, {0}, {1}, {0.5}, {1}, {1}, {0}, {1}, {0.5}, {0.5}, {0}, {0.5}, {0}, {0}, {1}, {0}, {0.5}, {1}, {0.5}, {0.5}, {0.5}, {0}, {0.5}, {1}, {0.5}} End Module modIrisFlowerTargetAnalogOriginal.vb Module modIrisFlowerTargetAnalogOriginal ' Iris flower data set ' https://en.wikipedia.org/wiki/Iris_flower_data_set ' http://archive.ics.uci.edu/ml/datasets/Iris ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links Public ReadOnly m_targetArrayIrisFlowerAnalogUnnormalized!(,) = { {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {0}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}, {2}} End Module modIrisFlowerTargetLogical.vb Public Module modIrisFlowerTargetLogical ' Iris flower data set ' https://en.wikipedia.org/wiki/Iris_flower_data_set ' http://archive.ics.uci.edu/ml/datasets/Iris ' https://en.wikipedia.org/wiki/Iris_flower_data_set#External_links Public ReadOnly m_targetArrayIrisFlowerLogical!(,) = { {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}} ' Dataset split in train set and test set: ' https://github.com/vmitt/iris_dataset_prediction_using_tensorflow Public ReadOnly m_targetArrayIrisFlowerLogicalTrain!(,) = { {0, 0, 1}, {0, 1, 0}, {0, 0, 1}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 1, 0}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {1, 0, 0}, {0, 1, 0}, {1, 0, 0}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {0, 1, 0}, {1, 0, 0}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {1, 0, 0}, {0, 1, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {0, 1, 0}} Public ReadOnly m_targetArrayIrisFlowerLogicalTest!(,) = { {0, 1, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 0, 1}, {0, 0, 1}, {1, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {0, 1, 0}, {1, 0, 0}, {1, 0, 0}, {0, 0, 1}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {1, 0, 0}, {0, 1, 0}, {0, 0, 1}, {0, 1, 0}} End Module modSunspot.vb Public Module modSunspot ' Network: Backpropagation Network with Bias Terms and Momentum ' ==================================================== ' ' Application: Time-Series Forecasting ' Prediction of the Annual Number of Sunspots ' ' Author: Karsten Kutza ' Date: 17.4.96 ' ' Reference: D.E. Rumelhart, G.E. Hinton, R.J. Williams ' Learning Internal Representations by Error Propagation ' in: ' D.E. Rumelhart, J.L. McClelland (Eds.) ' Parallel Distributed Processing, Volume 1 ' MIT Press, Cambridge, MA, pp. 318-362, 1986 ' ' STARTING_YEAR = 1700 ' https://courses.cs.washington.edu/courses/cse599/01wi/admin/Assignments/nn.html : bpn.html, nn.zip Public ReadOnly m_sunspotArray!() = { 0.0262, 0.0575, 0.0837, 0.1203, 0.1883, 0.3033, 0.1517, 0.1046, 0.0523, 0.0418, 0.0157, 0.0, 0.0, 0.0105, 0.0575, 0.1412, 0.2458, 0.3295, 0.3138, 0.204, 0.1464, 0.136, 0.1151, 0.0575, 0.1098, 0.2092, 0.4079, 0.6381, 0.5387, 0.3818, 0.2458, 0.1831, 0.0575, 0.0262, 0.0837, 0.1778, 0.3661, 0.4236, 0.5805, 0.5282, 0.3818, 0.2092, 0.1046, 0.0837, 0.0262, 0.0575, 0.1151, 0.2092, 0.3138, 0.4231, 0.4362, 0.2495, 0.25, 0.1606, 0.0638, 0.0502, 0.0534, 0.17, 0.2489, 0.2824, 0.329, 0.4493, 0.3201, 0.2359, 0.1904, 0.1093, 0.0596, 0.1977, 0.3651, 0.5549, 0.5272, 0.4268, 0.3478, 0.182, 0.16, 0.0366, 0.1036, 0.4838, 0.8075, 0.6585, 0.4435, 0.3562, 0.2014, 0.1192, 0.0534, 0.126, 0.4336, 0.6904, 0.6846, 0.6177, 0.4702, 0.3483, 0.3138, 0.2453, 0.2144, 0.1114, 0.0837, 0.0335, 0.0214, 0.0356, 0.0758, 0.1778, 0.2354, 0.2254, 0.2484, 0.2207, 0.147, 0.0528, 0.0424, 0.0131, 0.0, 0.0073, 0.0262, 0.0638, 0.0727, 0.1851, 0.2395, 0.215, 0.1574, 0.125, 0.0816, 0.0345, 0.0209, 0.0094, 0.0445, 0.0868, 0.1898, 0.2594, 0.3358, 0.3504, 0.3708, 0.25, 0.1438, 0.0445, 0.069, 0.2976, 0.6354, 0.7233, 0.5397, 0.4482, 0.3379, 0.1919, 0.1266, 0.056, 0.0785, 0.2097, 0.3216, 0.5152, 0.6522, 0.5036, 0.3483, 0.3373, 0.2829, 0.204, 0.1077, 0.035, 0.0225, 0.1187, 0.2866, 0.4906, 0.501, 0.4038, 0.3091, 0.2301, 0.2458, 0.1595, 0.0853, 0.0382, 0.1966, 0.387, 0.727, 0.5816, 0.5314, 0.3462, 0.2338, 0.0889, 0.0591, 0.0649, 0.0178, 0.0314, 0.1689, 0.284, 0.3122, 0.3332, 0.3321, 0.273, 0.1328, 0.0685, 0.0356, 0.033, 0.0371, 0.1862, 0.3818, 0.4451, 0.4079, 0.3347, 0.2186, 0.137, 0.1396, 0.0633, 0.0497, 0.0141, 0.0262, 0.1276, 0.2197, 0.3321, 0.2814, 0.3243, 0.2537, 0.2296, 0.0973, 0.0298, 0.0188, 0.0073, 0.0502, 0.2479, 0.2986, 0.5434, 0.4215, 0.3326, 0.1966, 0.1365, 0.0743, 0.0303, 0.0873, 0.2317, 0.3342, 0.3609, 0.4069, 0.3394, 0.1867, 0.1109, 0.0581, 0.0298, 0.0455, 0.1888, 0.4168, 0.5983, 0.5732, 0.4644, 0.3546, 0.2484, 0.16, 0.0853, 0.0502, 0.1736, 0.4843, 0.7929, 0.7128, 0.7045, 0.4388, 0.363, 0.1647, 0.0727, 0.023, 0.1987, 0.7411, 0.9947, 0.9665, 0.8316, 0.5873, 0.2819, 0.1961, 0.1459, 0.0534, 0.079, 0.2458, 0.4906, 0.5539, 0.5518, 0.5465, 0.3483, 0.3603, 0.1987, 0.1804, 0.0811, 0.0659, 0.1428, 0.4838, 0.8127} End Module clsMLPAccord.vb ' From http://accord-framework.net/docs/html/T_Accord_Neuro_Learning_BackPropagationLearning.htm : C# -> VB .NET conversion ' See also : ' https://github.com/accord-net/framework ' https://github.com/accord-net/framework/releases/download/v3.8.0/Accord.NET-3.8.0-archive.rar ' https://www.nuget.org/packages/Accord : 3.8.0 ' http://accord-framework.net Imports Accord.Neuro Imports Accord.Neuro.Learning Imports Perceptron.Utility ' Matrix Imports System.Text Public Class clsMLPAccord : Inherits clsVectorizedMLPGeneric Private network As ActivationNetwork Private teacherBPL As BackPropagationLearning ' Reliable ''' <summary> ''' Resilient Backpropagation Learning ''' </summary> Private teacherRBPL As ResilientBackpropagationLearning ''' <summary> ''' Parallel Resilient Backpropagation Learning ''' </summary> Public PRBPLAlgo As Boolean = False Private teacherPRBPL As ParallelResilientBackpropagationLearning ' Less reliable? Private neuronCountWithoutInputLayer%() Public inputJaggedDblArray#()() Public targetJaggedDblArray#()() Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.LibraryOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) If Not Me.useBias Then Throw New NotImplementedException( "useBias=False is not implemented for clsAccordMLP!") End If Dim sigmoidAlphaValue! = Me.m_gain ' 2, 2, 1 : ' 2 : two inputs in the network ' 2 : two neurons in the first layer ' 1 : one neuron in the second layer 'Dim neuronCountWithoutInputLayer%() ReDim Me.neuronCountWithoutInputLayer(0 To Me.layerCount - 2) For i = 1 To Me.layerCount - 1 Me.neuronCountWithoutInputLayer(i - 1) = neuronCount(i) Next If IsNothing(Me.inputArray) Then Exit Sub Dim inputArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.inputArray) Me.inputJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(inputArrayDbl) If IsNothing(Me.targetArray) Then Exit Sub Dim targetArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.targetArray) Me.targetJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(targetArrayDbl) End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) Select Case actFnc Case enumActivationFunction.Sigmoid SetActivationFunctionOptimized( enumActivationFunctionOptimized.Sigmoid, gain, center) Case enumActivationFunction.HyperbolicTangent SetActivationFunctionOptimized( enumActivationFunctionOptimized.HyperbolicTangent, gain, center) Case Else Throw New NotImplementedException( "This activation function is not available!") End Select End Sub Public Overrides Sub SetActivationFunctionOptimized( actFnc As enumActivationFunctionOptimized, Optional gain! = 1, Optional center! = 0) MyBase.SetActivationFunctionOptimized(actFnc, gain, center) Dim sigmoidAlphaValue! = Me.m_gain Select Case actFnc Case enumActivationFunctionOptimized.Sigmoid Me.m_actFunc = enumActivationFunction.Sigmoid Me.network = New ActivationNetwork( New SigmoidFunction(sigmoidAlphaValue), Me.nbInputNeurons, Me.neuronCountWithoutInputLayer) Case enumActivationFunctionOptimized.HyperbolicTangent Me.m_actFunc = enumActivationFunction.HyperbolicTangent Me.network = New ActivationNetwork( New BipolarSigmoidFunction(sigmoidAlphaValue), Me.nbInputNeurons, Me.neuronCountWithoutInputLayer) Case enumActivationFunctionOptimized.ELU Me.m_actFunc = Nothing Throw New NotImplementedException( "ELU activation function is not available!") Case Else Me.activFnc = Nothing End Select If Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso Not PRBPLAlgo Then Me.teacherRBPL = New ResilientBackpropagationLearning(Me.network) Me.teacherRBPL.LearningRate = Me.learningRate ' default value: 0.0125 Me.weightAdjustment = 0 ElseIf Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso PRBPLAlgo Then Me.teacherPRBPL = New ParallelResilientBackpropagationLearning(Me.network) 'Me.teacherPRBPL.Reset(Me.learningRate) 'Me.teacherPRBPL.DecreaseFactor = 0.5 ' eta minus 'Me.teacherPRBPL.IncreaseFactor = 1.2 ' eta plus 'Me.teacherPRBPL.UpdateLowerBound = 0.000001 ' delta max 'Me.teacherPRBPL.UpdateUpperBound = 50 ' delta min Else Me.teacherBPL = New BackPropagationLearning(Me.network) Me.teacherBPL.LearningRate = Me.learningRate Me.teacherBPL.Momentum = Me.weightAdjustment End If End Sub Public Overrides Sub InitializeWeights(numLayer%, weights#(,)) Dim i = numLayer - 1 Dim layer = Me.network.Layers(i) Dim nbNeurons = layer.Neurons.Count For j = 0 To nbNeurons - 1 ' Neuron: no Threshold property! 'Dim neuron = layer.Neurons(j) ' ActivationNeuron: Threshold property Dim neuron = CType(layer.Neurons(j), ActivationNeuron) Dim nbWeights = neuron.Weights.Count For k = 0 To nbWeights - 1 neuron.Weights(k) = weights(j, k) Next k If Me.useBias Then ' The Threshold value is added to inputs weighted sum before ' it is passed to activation function neuron.Threshold = weights(j, nbWeights) Else neuron.Threshold = 0 End If Next j End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ' No Reset function for RBPL algo. If PRBPLAlgo Then Me.teacherPRBPL.Reset(Me.learningRate) ' Randomly initialize the network ' Random between -0.5 and +0.5 with statistical normalization ' according to the network structure Dim x As New NguyenWidrow(Me.network) x.Randomize() ' Alternative: initialize the network with Gaussian weights ' Dim x As New GaussianWeights(Me.network, 0.1) ' x.Randomize() RoundWeights() End Sub Public Overrides Sub RoundWeights() ' Round the weights (to reproduce all tests exactly) For i = 0 To Me.layerCount - 2 Dim layer = Me.network.Layers(i) Dim nbNeurons = layer.Neurons.Count For j = 0 To nbNeurons - 1 Dim neuron = CType(layer.Neurons(j), ActivationNeuron) Dim nbWeights = neuron.Weights.Count For k = 0 To nbWeights - 1 Dim r = neuron.Weights(k) Dim rounded = Math.Round(r, clsMLPGeneric.nbRoundingDigits) neuron.Weights(k) = rounded Next k If Me.useBias Then Dim r = neuron.Threshold Dim rounded = Math.Round(r, clsMLPGeneric.nbRoundingDigits) neuron.Threshold = rounded Else neuron.Threshold = 0 End If Next j Next i End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next SetOuput1D() ComputeAverageError() End Sub Public Overrides Sub TrainVectorOneIteration() Dim avgError# If Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso Not PRBPLAlgo Then avgError = Me.teacherRBPL.RunEpoch(Me.inputJaggedDblArray, Me.targetJaggedDblArray) ElseIf Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso PRBPLAlgo Then avgError = Me.teacherPRBPL.RunEpoch(Me.inputJaggedDblArray, Me.targetJaggedDblArray) Else avgError = Me.teacherBPL.RunEpoch(Me.inputJaggedDblArray, Me.targetJaggedDblArray) End If ' Does not work fine, too high!? 'Me.averageError = avgError End Sub Public Overrides Sub SetOuput1D() Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs1D#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 Dim inputs#(1) inputs = Me.inputJaggedDblArray(i) Dim outputs#() = Me.network.Compute(inputs) For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = outputs(j) outputs1D(k) = outputs(j) k += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Dim inputArrayDbl = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim targetArrayDbl = clsMLPHelper.Convert1DArrayOfSingleToDouble(target) Dim avgError# If Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso Not PRBPLAlgo Then avgError = Me.teacherRBPL.Run(inputArrayDbl, targetArrayDbl) ElseIf Me.trainingAlgorithm = enumTrainingAlgorithm.RProp AndAlso PRBPLAlgo Then avgError = Me.teacherPRBPL.Run(inputArrayDbl, targetArrayDbl) Else avgError = Me.teacherBPL.Run(inputArrayDbl, targetArrayDbl) End If 'Me.averageErrorOneSample = avgError Dim outputs#() = Me.network.Compute(inputArrayDbl) Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs) End Sub Public Overrides Sub TestOneSample(input!()) Dim inputsDbl#() = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim outputs#() = Me.network.Compute(inputsDbl) Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs) ' 20/11/2020 Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputs, 0) Me.output = outputs2D End Sub Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim layer_ = Me.network.Layers(layer - 1) Dim neuron_ = CType(layer_.Neurons(neuron), ActivationNeuron) Dim nbWeights = neuron_.Weights.Count Dim wd# If weight < nbWeights Then wd = neuron_.Weights(weight) Else wd = neuron_.Threshold End If Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim wd# = Me.GetWeight(layer, neuron, weight) Dim ws! = CSng(wd) Return ws End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim layer_ = Me.network.Layers(layer - 1) Dim neuron_ = CType(layer_.Neurons(neuron), ActivationNeuron) Dim nbWeights = neuron_.Weights.Count If weight < nbWeights Then neuron_.Weights(weight) = weightWalue Else neuron_.Threshold = weightWalue End If End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim wd# = weightWalue SetWeight(layer, neuron, weight, wd) End Sub End Class modMLPAccordTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPAccordTest Sub MainAccordMLP() Console.WriteLine("Accord MLP with the classical XOR test.") AccordMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub AccordMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPAccord mlp.ShowMessage("Accord MLP Xor test") mlp.ShowMessage("-------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'mlp.nbIterations = 5000 ' Sigmoid: works mlp.nbIterations = 2000 ' Hyperbolic tangent: works fine 'mlp.nbIterations = 20000 ' Stochastic mlp.Initialize(learningRate:=0.05!, weightAdjustment:=0.1!) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'num_input:=2, num_hidden:={5}, num_output:=1 'mlp.InitializeStruct({2, 5, 1}, addBiasColumn:=True) mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If 'mlp.SetActivationFunctionOptimized( ' enumActivationFunctionOptimized.Sigmoid, gain:=2) mlp.SetActivationFunctionOptimized( enumActivationFunctionOptimized.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.TrainVector() ' Works fine 'mlp.Train() ' Works fine 'mlp.Train(enumLearningMode.Systematic) ' Works fine 'mlp.Train(enumLearningMode.SemiStochastic) ' Works 'mlp.Train(enumLearningMode.Stochastic) ' Works mlp.ShowMessage("Accord MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPBrightWire.vb #If NET4 Then ' From https://github.com/jdermody/brightwire-v2 .Net 4.6 #ElseIf NETCORE Then ' From https://github.com/jdermody/brightwire .Net core (.Net 5 and .Net 6) Imports BrightData Imports BrightData.Numerics Imports BrightWire.Models #End If Imports System.Text Imports BrightWire ' CreateGraphFactory Imports BrightWire.ExecutionGraph ' GraphFactory Public Class clsMLPBrightWire : Inherits clsVectorizedMLPGeneric Const defaultBatchSize = 10 Dim m_testData, m_trainingData As IDataSource Dim m_errorMetric As IErrorMetric Dim m_graph As ExecutionGraph.GraphFactory Private m_weights!()() Private m_biases!()() #If NET4 Then Dim m_dataTableTest, m_dataTableTraining, m_dataTable As IDataTable Dim m_executionContext As IExecutionContext Dim m_bestGraph As Models.ExecutionGraph Dim m_engine As IGraphTrainingEngine Dim m_output As IReadOnlyList(Of Models.ExecutionResult) #ElseIf NETCORE Then Dim m_context As BrightDataContext Dim m_training, m_test As IRowOrientedDataTable Dim m_model, m_bestGraph As ExecutionGraphModel Dim m_executionEngine As IGraphExecutionEngine Dim m_graphTrainingEngine As IGraphTrainingEngine Dim m_output As List(Of ExecutionResult) #End If Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.LibraryOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) Me.useBias = addBiasColumn If Not Me.useBias Then Throw New NotImplementedException( "useBias=False is not implemented for clsMLPBrightWire!") End If MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.trainingAlgorithm = enumTrainingAlgorithm.RMSProp Me.weightAdjustment = 0 Me.minBatchSize = defaultBatchSize Me.nbIterationsBatch = defaultBatchSize ' Useful for testing other MLP tests not using learning rate If Me.learningRate = 0 Then Me.learningRate = 0.1 If IsNothing(Me.inputArray) Then Exit Sub If IsNothing(Me.m_actFunc) Then Exit Sub If Me.m_actFunc = enumActivationFunction.Undefined Then Exit Sub End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) center = 0 If actFnc = enumActivationFunction.Sigmoid Then gain = 1 If actFnc = enumActivationFunction.HyperbolicTangent Then gain = 2 MyBase.SetActivationFunction(actFnc, gain, center) BuildGraph() End Sub #If NET4 Then Private Sub BuildSamples(inputArray0(,) As Single, targetArray0(,) As Single) Dim builder = BrightWireProvider.CreateDataTableBuilder() builder.AddVectorColumn(Me.nbInputNeurons, "Input") builder.AddVectorColumn(Me.nbOutputNeurons, "Output", isTarget:=True) Me.nbSamples = Me.targetArray.GetUpperBound(0) + 1 For j = 0 To Me.nbSamples - 1 Dim fvi As New Models.FloatVector fvi.Data = clsMLPHelper.GetVector(inputArray0, j) Dim fvo As New Models.FloatVector fvo.Data = clsMLPHelper.GetVector(targetArray0, j) builder.Add(fvi, fvo) Next m_dataTable = builder.Build() Dim data0 = m_graph.CreateDataSource(m_dataTable) m_engine.Test(data0, m_errorMetric) Dim networkGraph = m_engine.Graph Dim executionEngine = m_graph.CreateEngine(networkGraph) m_output = executionEngine.Execute(data0) Me.averageError = m_output.Average(Function(o) o.CalculateError(m_errorMetric)) End Sub Private Sub BuildGraph() Dim lap = BrightWireProvider.CreateLinearAlgebra Dim builder = BrightWireProvider.CreateDataTableBuilder() builder.AddVectorColumn(Me.nbInputNeurons, "Input") builder.AddVectorColumn(Me.nbOutputNeurons, "Output", isTarget:=True) Me.nbSamples = Me.targetArray.GetUpperBound(0) + 1 For j = 0 To Me.nbSamples - 1 Dim fvi As New Models.FloatVector fvi.Data = clsMLPHelper.GetVector(Me.inputArray, j) Dim fvo As New Models.FloatVector fvo.Data = clsMLPHelper.GetVector(Me.targetArray, j) builder.Add(fvi, fvo) Next m_dataTableTraining = builder.Build() m_dataTableTest = Nothing If Not IsNothing(Me.targetArrayTest) Then Dim nbTestSamples = Me.targetArrayTest.GetUpperBound(0) + 1 For j = 0 To nbTestSamples - 1 Dim fvi As New Models.FloatVector fvi.Data = clsMLPHelper.GetVector(Me.inputArrayTest, j) Dim fvo As New Models.FloatVector fvo.Data = clsMLPHelper.GetVector(Me.targetArrayTest, j) builder.Add(fvi, fvo) Next m_dataTableTest = builder.Build() End If ' Create the graph m_graph = New GraphFactory(lap) m_errorMetric = m_graph.ErrorMetric.Quadratic ' BinaryClassification: Binary classification rounds outputs to 1 or 0 ' and compares them against the target ' OneHotEncoding: Finds the single index of the highest activation ' and compares it to the target index ' Quadratic: Quadratic error ' CrossEntropy : Cross entropy error https://en.wikipedia.org/wiki/Cross_entropy Select Case Me.trainingAlgorithm Case enumTrainingAlgorithm.NesterovMomentum m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.NesterovMomentum) Case enumTrainingAlgorithm.AdaGrad m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.AdaGrad) Case enumTrainingAlgorithm.Adam m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.Adam) Case enumTrainingAlgorithm.Momentum m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.Momentum) Case enumTrainingAlgorithm.RMSProp m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.RmsProp) Case Else 'Throw New NotImplementedException("This training algorithm is not available!") ' Default training algorithm: RMSProp m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.RmsProp) End Select m_graph.CurrentPropertySet.Use(m_graph.WeightInitialisation.Gaussian) ' Create the engine m_trainingData = m_graph.CreateDataSource(m_dataTableTraining) If Not IsNothing(Me.targetArrayTest) Then m_testData = m_graph.CreateDataSource(m_dataTableTest) End If Dim batchSize = defaultBatchSize If Me.learningMode = enumLearningMode.VectorialBatch Then batchSize = Me.nbIterationsBatch m_engine = m_graph.CreateTrainingEngine(m_trainingData, learningRate:=Me.learningRate, batchSize:=batchSize) ' Create the network With m_graph.Connect(m_engine) Dim actFctOut As INode = Nothing ' Must be distinct from actFct For l = 1 To Me.layerCount - 2 Dim actFct As INode Select Case m_actFunc Case enumActivationFunction.Sigmoid actFct = m_graph.SigmoidActivation actFctOut = m_graph.SigmoidActivation Case enumActivationFunction.HyperbolicTangent actFct = m_graph.TanhActivation actFctOut = m_graph.TanhActivation Case enumActivationFunction.ReLu actFct = m_graph.ReluActivation actFctOut = m_graph.ReluActivation Case Else Throw New NotImplementedException( "This activation function is not available!") End Select ' Create a feed forward layer with the activation function .AddFeedForward(neuronCount(l)).Add(actFct) Next ' Create a second feed forward layer with the activation function Dim outputSize = m_engine.DataSource.OutputSize .AddFeedForward(outputSize).Add(actFctOut) ' Calculate the error and backpropagate the error signal .AddBackpropagation(m_errorMetric) End With ' Train the network m_executionContext = m_graph.CreateExecutionContext End Sub #ElseIf NETCORE Then Private Sub BuildSamples(inputArray0(,) As Single, targetArray0(,) As Single) Dim builder = m_context.BuildTable builder = m_context.BuildTable builder.AddColumn(BrightDataType.Vector, "Input") builder.AddColumn(BrightDataType.Vector, "Output").SetTarget(True) Dim nbTestSamples = targetArray0.GetUpperBound(0) + 1 For j = 0 To nbTestSamples - 1 Dim v_in0 = clsMLPHelper.GetVector(inputArray0, j) Dim v_out0 = clsMLPHelper.GetVector(targetArray0, j) Dim v_in = m_context.CreateVector(v_in0) Dim v_out = m_context.CreateVector(v_out0) builder.AddRow(v_in, v_out) Next m_test = builder.BuildRowOriented m_testData = m_graph.CreateDataSource(m_test) If Not (m_bestGraph Is Nothing) Then ' Create a new network to execute the learned network Dim executionEngine = m_graph.CreateExecutionEngine(m_bestGraph) m_output = executionEngine.Execute(m_testData).ToList End If End Sub Private Sub BuildGraph() m_context = New BrightDataContext(randomSeed:=Nothing) m_context.ResetRandom(seed:=Nothing) m_context.UseNumericsLinearAlgebra() m_context.UserNotifications = Nothing m_graph = m_context.CreateGraphFactory Dim hiddenLayerSize% = Me.neuronCount(1) Dim builder = m_context.BuildTable Me.nbSamples = Me.targetArray.GetUpperBound(0) + 1 builder.AddColumn(BrightDataType.Vector, "Input") builder.AddColumn(BrightDataType.Vector, "Output").SetTarget(True) For j = 0 To Me.nbSamples - 1 Dim v_in0 = clsMLPHelper.GetVector(Me.inputArray, j) Dim v_out0 = clsMLPHelper.GetVector(Me.targetArray, j) Dim v_in = m_context.CreateVector(v_in0) Dim v_out = m_context.CreateVector(v_out0) builder.AddRow(v_in, v_out) Next m_training = builder.BuildRowOriented m_test = Nothing If Not IsNothing(Me.targetArrayTest) Then builder = m_context.BuildTable builder.AddColumn(BrightDataType.Vector, "Input") builder.AddColumn(BrightDataType.Vector, "Output").SetTarget(True) Dim nbTestSamples = Me.targetArrayTest.GetUpperBound(0) + 1 For j = 0 To nbTestSamples - 1 Dim v_in0 = clsMLPHelper.GetVector(Me.inputArrayTest, j) Dim v_out0 = clsMLPHelper.GetVector(Me.targetArrayTest, j) Dim v_in = m_context.CreateVector(v_in0) Dim v_out = m_context.CreateVector(v_out0) builder.AddRow(v_in, v_out) Next m_test = builder.BuildRowOriented End If Dim batchSize = defaultBatchSize If Me.learningMode = enumLearningMode.VectorialBatch Then batchSize = Me.nbIterationsBatch ' BinaryClassification: Binary classification rounds outputs to 1 or 0 ' and compares them against the target ' OneHotEncoding: Finds the single index of the highest activation ' and compares it to the target index ' Quadratic: Quadratic error ' CrossEntropy : Cross entropy error https://en.wikipedia.org/wiki/Cross_entropy m_errorMetric = m_graph.ErrorMetric.Quadratic ' Create the property set m_graph.CurrentPropertySet. Use(m_graph.GaussianWeightInitialisation( zeroBias:=True, stdDev:=0.1F, varianceCalibration:=GaussianVarianceCalibration.SquareRoot2N)) Select Case Me.trainingAlgorithm Case enumTrainingAlgorithm.NesterovMomentum m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.NesterovMomentum) Case enumTrainingAlgorithm.AdaGrad m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.AdaGrad) Case enumTrainingAlgorithm.Adam m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.Adam) Case enumTrainingAlgorithm.Momentum m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.Momentum) Case enumTrainingAlgorithm.RMSProp m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.RmsProp) Case Else 'Throw New NotImplementedException("This training algorithm is not available!") ' Default training algorithm: RMSProp m_graph.CurrentPropertySet.Use(m_graph.GradientDescent.RmsProp) End Select m_trainingData = m_graph.CreateDataSource(m_training) m_testData = Nothing If Not IsNothing(Me.targetArrayTest) Then m_testData = m_graph.CreateDataSource(m_test) m_graphTrainingEngine = m_graph.CreateTrainingEngine(m_trainingData, m_errorMetric, learningRate:=Me.learningRate, batchSize:=CType(Me.nbIterationsBatch, UInt32)) With m_graph.Connect(m_graphTrainingEngine) Dim actFct As Node.NodeBase = Nothing Dim actFctOut As Node.NodeBase = Nothing For l = 1 To Me.layerCount - 2 Select Case m_actFunc Case enumActivationFunction.Sigmoid actFct = m_graph.SigmoidActivation actFctOut = m_graph.SigmoidActivation Case enumActivationFunction.HyperbolicTangent actFct = m_graph.TanhActivation actFctOut = m_graph.TanhActivation Case enumActivationFunction.ReLu actFct = m_graph.ReluActivation actFctOut = m_graph.ReluActivation Case Else Throw New NotImplementedException( "This activation function is not available!") End Select ' Create a feed forward layer with the activation function Dim nc = CType(neuronCount(l), UInteger) .AddFeedForward(nc).Add(actFct) Next ' Create a second feed forward layer with the activation function Dim outputSize = CType(m_trainingData.OutputSize, UInteger) .AddFeedForward(outputSize).Add(actFctOut) .AddBackpropagation() End With m_bestGraph = Nothing End Sub Public Overrides Sub TrainVector() TrainVectorBatch() End Sub Public Overrides Sub TrainVectorBatch() TrainVectorBatch(Me.nbIterationsBatch) ComputeAverageError() If Me.printOutput_ Then PrintOutput(Me.nbIterations - 1, force:=True) End Sub Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) ' Default implementation: call TrainVectorOneIteration() Me.learningMode = enumLearningMode.VectorialBatch Me.vectorizedLearningMode = True ' Works only with testCadence = 1, but this is very slow 'For iteration = 0 To nbIterationsBatch - 1 ' TrainVectorOneIteration() 'Next ShowMessage("Training...") Dim nbIter = CType(Me.nbIterations, UInteger) Dim testCadence = Me.nbIterations m_graphTrainingEngine.Train( numIterations:=nbIter, testData:=m_trainingData, onImprovement:= Function(model) m_bestGraph = model.Graph Return m_bestGraph End Function, testCadence:=testCadence) If m_bestGraph Is Nothing Then m_bestGraph = m_graphTrainingEngine.Graph ShowMessage("Training: Done.") SetOuput1D() End Sub Public Overrides Sub TrainVectorOneIteration() Throw New NotImplementedException() ' Too slow: 'm_graphTrainingEngine.Train(numIterations:=1, testData:=m_trainingData, ' onImprovement:= ' Function(model) ' m_bestGraph = model.Graph ' Return m_bestGraph ' End Function, ' testCadence:=1) End Sub Public Overrides Sub SetOuput1D() If m_trainingData Is Nothing Then Exit Sub Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputs1D#(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) If m_bestGraph Is Nothing Then m_bestGraph = m_graphTrainingEngine.Graph m_executionEngine = m_graph.CreateExecutionEngine(m_bestGraph) Dim output = m_executionEngine.Execute(m_trainingData).ToList Dim i% = 0 For Each item In output For Each index In item.MiniBatchSequence.MiniBatch.Rows Dim indexInt = CType(index, Integer) Dim result = item.Output(indexInt) For j = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputs1D(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput outputs2D(i, j) = rOutput Next i += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Overrides Sub TrainOneSample(input() As Single, target() As Single) Throw New NotImplementedException() End Sub Public Overrides Sub TestAllSamples(inputs!(,), nbOutputs%) BuildSamples(inputs, Me.targetArray) InitializeTraining() Me.nbSamples = inputs.GetLength(0) Dim nbInputs = inputs.GetLength(1) Dim outputs!(0 To Me.nbSamples - 1, 0 To nbOutputs - 1) For i = 0 To Me.nbSamples - 1 Me.numSample = i TestOneSampleByIndex(i) Dim output!() = Me.lastOutputArray1DSingle For j = 0 To output.GetLength(0) - 1 outputs(i, j) = output(j) Next Next Me.output = outputs ComputeAverageError() End Sub Public Sub TestOneSampleByIndex(index%) Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim i% = 0 For Each item In m_output Dim result = item.Output(index) For j% = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Next Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Public Overrides Sub TestOneSample(input() As Single) If m_testData Is Nothing Then Exit Sub Dim output = m_executionEngine.Execute(m_testData).ToList Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim samplesFound% = 0 Dim i% = 0 For Each item In output For Each index In item.MiniBatchSequence.MiniBatch.Rows Dim indexInt = CType(index, Integer) Dim result = item.Output(indexInt) Dim row = m_test.Row(index) Dim fv = CType(row.Item(0), LinearAlgebra.Vector(Of Single)) Dim sampleFound = True For k = 0 To input.Length - 1 Dim xi! = CType(fv(k), Single) If xi <> input(k) Then sampleFound = False : Exit For Next If Not sampleFound Then Continue For samplesFound += 1 For j% = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Next Next If samplesFound <> 1 Then Stop Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Public Sub GetWeights() m_model = m_bestGraph If IsNothing(m_model) Then m_model = m_graphTrainingEngine.Graph Dim layer% = Me.layerCount - 2 For Each node0 In m_model.OtherNodes If node0.TypeName.Contains("FeedForward") Then Dim node1 = m_graphTrainingEngine.Start.FindById(node0.Id) node1.LoadParameters(m_graph, node0) Dim ff = CType(node1, IFeedForward) Dim l% = 0 Dim nbRows% = CType(ff.Weight.RowCount, Integer) Dim nbCols% = CType(ff.Weight.ColumnCount, Integer) Dim nbNeuronsPreviousLayer = Me.neuronCount(layer) Dim nbNeuronsLayer = Me.neuronCount(layer + 1) If nbRows <> nbNeuronsPreviousLayer Then Stop If nbCols <> nbNeuronsLayer Then Stop For j = 0 To nbRows - 1 Dim fm = ff.Weight.Data Dim fv = fm.Row(CType(j, UInteger)) For k% = 0 To nbCols - 1 Me.m_weights(layer)(l + k) = fv.Values(k) Next l += CType(ff.Weight.ColumnCount, Integer) Next nbRows = 1 For j = 0 To nbRows - 1 Dim fv = ff.Bias.Data For k% = 0 To CType(fv.Size, Integer) - 1 Me.m_biases(layer)(k) = fv.Values(k) Next Next layer -= 1 End If Next End Sub Public Sub SetWeights() m_model = m_bestGraph If IsNothing(m_model) Then m_model = m_graphTrainingEngine.Graph Dim layer% = Me.layerCount - 2 For Each node0 In m_model.OtherNodes If node0.TypeName.Contains("FeedForward") Then Dim node1 = m_graphTrainingEngine.Start.FindById(node0.Id) node1.LoadParameters(m_graph, node0) Dim ff = CType(node1, IFeedForward) Dim nbRowsUI = ff.Weight.RowCount Dim nbColsUI = ff.Weight.ColumnCount Dim nbRows% = CType(nbRowsUI, Integer) Dim nbCols% = CType(nbColsUI, Integer) Dim nbNeuronsPreviousLayer = Me.neuronCount(layer) Dim nbNeuronsLayer = Me.neuronCount(layer + 1) If nbRows <> nbNeuronsPreviousLayer Then Stop If nbCols <> nbNeuronsLayer Then Stop Dim m As Utility.Matrix = clsMLPHelper.TransformArrayTo2DArray(Me.m_weights(layer), nbRows, nbCols) Dim v2 = m_context.CreateMatrix(Of Single)(nbRowsUI, nbColsUI, Function(m1, m2) m.ItemUIntSng(m1, m2)) ff.Weight.Data = v2 Dim mb As Utility.Matrix = clsMLPHelper.TransformArrayTo2DArray(Me.m_biases(layer), 1, nbCols) Dim vb = m_context.CreateVector(Of Single)(nbColsUI, Function(m1) mb.ItemUIntSng(0, m1)) ff.Bias.Data = vb layer -= 1 End If Next End Sub #End If Public Overrides Sub InitializeWeights(layer%, weights#(,)) If layer = 1 Then ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) End If Dim i = layer - 1 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) ReDim Me.m_weights(i)(0 To nbWeights - 1) ReDim Me.m_biases(i)(0 To nbNeuronsLayer - 1) For j = 0 To nbNeuronsLayer - 1 Me.m_biases(i)(j) = 0 Next j Dim nbNeuronsPreviousLayer = Me.neuronCount(i) Dim l = 0 For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 Dim r = weights(j, k) Me.m_weights(i)(l) = CSng(r) l += 1 Next k If Me.useBias Then Dim r = weights(j, nbNeuronsPreviousLayer) Me.m_biases(i)(j) = CSng(r) End If Next j If layer = Me.layerCount - 1 Then SetWeights() End If End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5, Optional maxValue! = 0.5) ' Re-build the graphe to randomize again the network! BuildGraph() ' Round the weights (to reproduce all tests exactly) RoundWeights() End Sub Public Sub ReDimWeights() ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) For i = 0 To Me.layerCount - 2 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) ReDim Me.m_weights(i)(0 To nbWeights - 1) ReDim Me.m_biases(i)(0 To nbNeuronsLayer - 1) Next End Sub Public Overrides Sub RoundWeights() ReDimWeights() GetWeights() For i = 0 To Me.layerCount - 2 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) For k = 0 To nbWeights - 1 Dim weight = Me.m_weights(i)(k) Dim rounded = Math.Round(weight, clsMLPGeneric.nbRoundingDigits) Me.m_weights(i)(k) = CSng(rounded) Next k If Me.useBias Then For k = 0 To nbNeuronsLayer - 1 Dim weightT = Me.m_biases(i)(k) Dim rounded = Math.Round(weightT, clsMLPGeneric.nbRoundingDigits) Me.m_biases(i)(k) = CSng(rounded) Next k End If Next SetWeights() End Sub Public Overrides Function ShowWeights$(Optional format$ = format2Dec) ReDimWeights() GetWeights() Dim sb As New StringBuilder If Me.learningMode = enumLearningMode.VectorialBatch Then _ sb.AppendLine("nb iterations batch=" & Me.nbIterationsBatch) Dim weightsBase = MyBase.ShowWeights(format) sb.Append(weightsBase) Dim weights = sb.ToString Return weights End Function Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim ws! = Me.GetWeightSingle(layer, neuron, weight) Dim wd# = ws Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Dim biasValue = Me.m_biases(layer - 1)(l2) Return biasValue End If Dim l% = neuron * nbNeuronsLayer + weight Dim weightValue = Me.m_weights(layer - 1)(l) Return weightValue End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim ws! = CSng(weightWalue) SetWeightSingle(layer, neuron, weight, ws) End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Me.m_biases(layer - 1)(l2) = weightWalue Exit Sub End If Dim l% = neuron * nbNeuronsLayer + weight Me.m_weights(layer - 1)(l) = weightWalue End Sub #If NET4 Then Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next SetOuput1D() ComputeAverageError() End Sub Public Overrides Sub TrainVectorOneIteration() m_engine.Train(m_executionContext) End Sub Public Overrides Sub SetOuput1D() ' Create a new network to execute the learned network Dim networkGraph = m_engine.Graph Dim executionEngine = m_graph.CreateEngine(networkGraph) Dim output = executionEngine.Execute(m_trainingData) Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputs1D#(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim i% = 0 For Each item In output For Each index In item.MiniBatchSequence.MiniBatch.Rows Dim result = item.Output(index) For j = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result.Data(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputs1D(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput outputs2D(i, j) = rOutput Next i += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Overrides Sub TrainOneSample(input() As Single, target() As Single) m_engine.Train(m_executionContext) End Sub Public Overrides Sub TestOneSampleAndComputeError(input!(), target!()) TestOneSampleStatic(Me.numSample) Dim targetArray2D!(0, target.GetUpperBound(0)) clsMLPHelper.Fill2DArrayOfSingle(targetArray2D, target, 0) ComputeAverageErrorOneSample(targetArray2D) End Sub Public Sub TestOneSampleAndComputeErrorTest(input!(), target!()) TestOneSampleStaticTest(Me.numSample) Dim targetArray2D!(0, target.GetUpperBound(0)) clsMLPHelper.Fill2DArrayOfSingle(targetArray2D, target, 0) ComputeAverageErrorOneSample(targetArray2D) End Sub Private Sub TestOneSampleStatic(index0%) Static networkGraph As Models.ExecutionGraph Static executionEngine As IGraphEngine Static output As IReadOnlyList(Of Models.ExecutionResult) If Me.numSample = 0 Then m_engine.Test(m_trainingData, m_errorMetric) ' Create a new network to execute the learned network networkGraph = m_engine.Graph executionEngine = m_graph.CreateEngine(networkGraph) output = executionEngine.Execute(m_trainingData) Me.averageError = output.Average(Function(o) o.CalculateError(m_errorMetric)) End If Dim nbInputs = 1 Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim i% = 0, j% = 0 For Each item In output Dim size = item.Output.Count If index0 >= size Then Stop Exit For End If Dim result = item.Output(index0) Const verify = False If verify Then ' Seek the right sample into the graph Dim row = m_dataTableTraining.GetRow(index0) Dim fvobj = row.Data(0) Dim fv As Models.FloatVector = CType(fvobj, Models.FloatVector) Dim sampleFound = True For k = 0 To Me.nbInputNeurons - 1 Dim xi! = CType(fv.Data(k), Single) If xi <> Me.inputArray(index0, k) Then sampleFound = False : Exit For Next If Not sampleFound Then Stop End If For j = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result.Data(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Next Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Private Sub TestOneSampleStaticTest(index0%) Static networkGraph As Models.ExecutionGraph Static executionEngine As IGraphEngine Static output As IReadOnlyList(Of Models.ExecutionResult) If Me.numSample = 0 Then m_engine.Test(m_testData, m_errorMetric) ' Create a new network to execute the learned network networkGraph = m_engine.Graph executionEngine = m_graph.CreateEngine(networkGraph) output = executionEngine.Execute(m_testData) Me.averageError = output.Average(Function(o) o.CalculateError(m_errorMetric)) End If Dim nbInputs = 1 Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim i% = 0, j% = 0 For Each item In output Dim size = item.Output.Count If index0 >= size Then Stop Exit For End If Dim result = item.Output(index0) Const verify = False If verify Then ' Seek the right sample into the graph Dim row = m_dataTableTest.GetRow(index0) Dim fvobj = row.Data(0) Dim fv As Models.FloatVector = CType(fvobj, Models.FloatVector) Dim sampleFound = True For k = 0 To Me.nbInputNeurons - 1 Dim xi! = CType(fv.Data(k), Single) If xi <> Me.inputArrayTest(index0, k) Then sampleFound = False : Exit For Next If Not sampleFound Then Stop End If For j = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result.Data(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Next Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) BuildSamples(inputs, targets) MyBase.TrainSystematic(inputs, targets, learningMode) End Sub Public Overrides Sub TestAllSamples(inputs!(,), nbOutputs%) BuildSamples(inputs, Me.targetArray) InitializeTraining() Me.nbSamples = inputs.GetLength(0) Dim nbInputs = inputs.GetLength(1) Dim outputs!(0 To Me.nbSamples - 1, 0 To nbOutputs - 1) For i = 0 To Me.nbSamples - 1 Me.numSample = i TestOneSampleByIndex(i) Dim output!() = Me.lastOutputArray1DSingle For j = 0 To output.GetLength(0) - 1 outputs(i, j) = output(j) Next Next Me.output = outputs ComputeAverageError() End Sub Public Overrides Sub TestOneSample(input() As Single) Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim i% = 0, j% = 0 Dim samplesFound% = 0 For Each item In m_output For Each index In item.MiniBatchSequence.MiniBatch.Rows Dim indexMax = item.Output.Count If index >= indexMax Then Exit For Dim result = item.Output(index) ' Seek the right sample into the graph Dim row = m_dataTable.GetRow(index) Dim fvobj = row.Data(0) Dim fv As Models.FloatVector = CType(fvobj, Models.FloatVector) Dim sampleFound = True For k = 0 To input.Length - 1 Dim xi! = CType(fv.Data(k), Single) If xi <> input(k) Then sampleFound = False : Exit For Next If Not sampleFound Then Continue For samplesFound += 1 For j = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result.Data(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Exit For Next If samplesFound > 0 Then Exit For Next If samplesFound <> 1 Then Stop Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Public Sub TestOneSampleByIndex(index%) Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim outputsDbl#(lengthTot - 1) Dim i% = 0 For Each item In m_output Dim result = item.Output(index) For j% = 0 To Me.nbOutputNeurons - 1 Dim rOutput! = result.Data(j) outputs(i * Me.nbOutputNeurons + j) = rOutput outputsDbl(i * Me.nbOutputNeurons + j) = rOutput Next Next Me.lastOutputArray1DSingle = outputs Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputsDbl, 0) Me.output = outputs2D End Sub Public Sub GetWeights() m_bestGraph = m_engine.Graph If m_bestGraph Is Nothing Then Exit Sub If m_bestGraph.OtherNodes Is Nothing Then Exit Sub Dim layer% = Me.layerCount - 2 For Each node0 In m_bestGraph.OtherNodes If node0.TypeName.Contains("FeedForward") Then Dim node1 = m_engine.Start.FindById(node0.Id) node1.LoadParameters(node0) Dim ff = CType(node1, IFeedForward) Dim nbRows = ff.Weight.RowCount Dim nbCols = ff.Weight.ColumnCount Dim nbNeuronsPreviousLayer = Me.neuronCount(layer) Dim nbNeuronsLayer = Me.neuronCount(layer + 1) If nbRows <> nbNeuronsPreviousLayer Then Stop If nbCols <> nbNeuronsLayer Then Stop Dim l = 0 For j = 0 To nbRows - 1 Dim fm = ff.Weight.Data Dim fv = fm.Row(j) For k = 0 To nbCols - 1 Me.m_weights(layer)(l + k) = fv.Data(k) Next l += ff.Weight.ColumnCount Next nbRows = 1 For j = 0 To nbRows - 1 Dim fv = ff.Bias.Data For k = 0 To fv.Count - 1 Me.m_biases(layer)(k) = fv.Data(k) Next Next layer -= 1 End If Next End Sub Public Sub SetWeights() If m_engine.Graph Is Nothing Then Exit Sub If m_engine.Graph.OtherNodes Is Nothing Then Exit Sub Dim layer% = Me.layerCount - 2 For Each node0 In m_engine.Graph.OtherNodes If node0.TypeName.Contains("FeedForward") Then Dim node1 = m_engine.Start.FindById(node0.Id) node1.LoadParameters(node0) Dim ff = CType(node1, IFeedForward) Dim nbRows = ff.Weight.RowCount Dim nbCols = ff.Weight.ColumnCount If layer < 0 Then layer = Me.layerCount - 2 : Stop ' ??? Dim nbNeuronsPreviousLayer = Me.neuronCount(layer) Dim nbNeuronsLayer = Me.neuronCount(layer + 1) If nbRows <> nbNeuronsPreviousLayer Then Stop If nbCols <> nbNeuronsLayer Then Stop Dim fm = ff.Weight.Data Dim l = 0 For j = 0 To nbRows - 1 Dim fv = fm.Row(j) For k = 0 To nbCols - 1 Dim w = Me.m_weights(layer)(l + k) fv.Data(k) = w Next fm.Row(j) = fv l += ff.Weight.ColumnCount Next nbRows = 1 Dim fvb = ff.Bias.Data For j = 0 To nbRows - 1 For k = 0 To fvb.Count - 1 Dim b = Me.m_biases(layer)(k) fvb.Data(k) = b Next Next ff.Weight.Data = fm ff.Bias.Data = fvb layer -= 1 End If Next m_engine = m_graph.CreateTrainingEngine(m_trainingData, m_engine.Graph, Me.learningRate, Me.nbIterationsBatch) End Sub #End If End Class modMLPBrightWireTest.vb Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPBrightWireTest Sub MainBrightWireMLP() Console.WriteLine("BrightWire.NET MLP with the classical XOR test.") BrightWireMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub BrightWireMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPBrightWire mlp.ShowMessage("BrightWire.NET MLP test") mlp.ShowMessage("-----------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.Initialize(learningRate:=0.1) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.InitializeStruct(m_neuronCountXOR241, addBiasColumn:=True) mlp.printOutputMatrix = True mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct({4, 9, 2}, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct({6, 12, 3}, addBiasColumn:=True) End If mlp.trainingAlgorithm = enumTrainingAlgorithm.RMSProp mlp.nbIterationsBatch = 20 mlp.nbIterations = 2000 ' Sigmoid: works mlp.SetActivationFunction(enumActivationFunction.Sigmoid) 'mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) 'mlp.SetActivationFunction(enumActivationFunction.ReLu) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.Train(learningMode:=enumLearningMode.VectorialBatch) mlp.ShowMessage("BrightWire.NET MLP test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPClassic.vb ' From: ' ********************************************************************* ' * File : mlp.h (+ mlp.cpp) ' * Author: Sylvain BARTHELEMY ' * https://github.com/sylbarth/mlp ' * Date : 2000-08 ' ********************************************************************* ' http://patrice.dargenton.free.fr/ia/ialab/perceptron.html (french) Imports System.Text ' StringBuilder Imports Perceptron.Utility ' Matrix Public Class clsMLPClassic : Inherits clsMLPGeneric #Region "Declarations" Public Const floatMax! = 99999 Public Const floatMin! = -99999 Structure TNeuron Dim signal! Dim err! Dim w!() Dim dw!() ' Weight adjustment Dim wCopy!() ' Weight copy, if the average error decreases Dim signalCopy! ' Signal copy used for some derivate End Structure Structure TLayer Dim nbNeurons% Dim nbWeights% Dim Neurons() As TNeuron End Structure Public biasType As TBias = TBias.WeightAdded Private Layers() As TLayer = Nothing Private minValue! = 0 Private maxValue! = 1 #End Region #Region "Initialization" Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Normal End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) ReDim Me.Layers(Me.layerCount - 1) Dim nbWeights% For i = 0 To Me.layerCount - 1 With Me.Layers(i) .nbNeurons = neuronCount(i) If Me.biasType >= TBias.NeuronAdded AndAlso i > 0 AndAlso i < Me.layerCount - 1 Then .nbNeurons -= 1 .nbWeights = .nbNeurons If addBiasColumn AndAlso i < Me.layerCount - 1 Then .nbWeights += 1 ReDim .Neurons(.nbNeurons - 1) For j = 0 To .nbNeurons - 1 .Neurons(j).signal = Me.minValue .Neurons(j).err = 0.0! If Me.biasType >= TBias.NeuronAdded AndAlso j = .nbNeurons - 1 Then .Neurons(j).signal = Me.maxValue If i > 0 Then nbWeights = Me.Layers(i - 1).nbWeights ReDim .Neurons(j).w(nbWeights - 1) ReDim .Neurons(j).dw(nbWeights - 1) ReDim .Neurons(j).wCopy(nbWeights - 1) End If Next j End With Next i 'Me.nbInputNeurons = Me.Layers(0).nbNeurons 'Me.nbOutputNeurons = Me.Layers(Me.layerCount - 1).nbNeurons End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) For i = 1 To Me.layerCount - 1 For j = 0 To Me.Layers(i).nbNeurons - 1 For k = 0 To Me.Layers(i - 1).nbWeights - 1 With Me.Layers(i).Neurons(j) Dim r# = rndShared.NextDouble(minValue, maxValue) Dim rounded# = Math.Round(r, clsMLPGeneric.nbRoundingDigits) .w(k) = CSng(rounded) .dw(k) = 0 .wCopy(k) = 0 End With Next k Next j Next i End Sub Public Overrides Sub RoundWeights() ' Round the weights (to reproduce all tests exactly) For i As Integer = 1 To Me.layerCount - 1 For j As Integer = 0 To Me.Layers(i).nbNeurons - 1 For k As Integer = 0 To Me.Layers(i - 1).nbWeights - 1 With Me.Layers(i).Neurons(j) Dim r! = .w(k) Dim rounded# = Math.Round(r, clsMLPGeneric.nbRoundingDigits) .w(k) = CSng(rounded) .dw(k) = 0 .wCopy(k) = 0 End With Next k Next j Next i End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) For j = 0 To Me.Layers(layer).nbNeurons - 1 For k = 0 To Me.Layers(layer - 1).nbWeights - 1 Me.Layers(layer).Neurons(j).w(k) = CSng(weights(j, k)) Next k Next j End Sub #End Region #Region "Compute" Public Overrides Sub TestOneSample(input!()) Dim output1D!(Me.nbOutputNeurons - 1) TestOneSample(input, output1D) Me.lastOutputArray1DSingle = output1D Me.lastOutputArray1D = clsMLPHelper.Convert1DArrayOfSingleToDouble( Me.lastOutputArray1DSingle) Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, Me.lastOutputArray1D, 0) Me.output = outputs2D End Sub Public Overrides Sub TestOneSample(input!(), ByRef output1D!()) SetInputSignal(input) ForwardPropogateSignal() GetOutputSignal(output1D) Me.lastOutputArray1DSingle = output1D Me.lastOutputArray1D = clsMLPHelper.Convert1DArrayOfSingleToDouble( Me.lastOutputArray1DSingle) Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, Me.lastOutputArray1D, 0) Me.output = outputs2D End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Dim output!(Me.nbOutputNeurons - 1) TestOneSample(input, output) Me.averageErrorOneSample = ComputeOutputError(target) ' 21/05/2021 Keep average error BackwardPropagateError() ComputeGradientAndAdjustWeights() End Sub Private Sub SetInputSignal(input!()) For i = 0 To Me.nbInputNeurons - 1 Me.Layers(0).Neurons(i).signal = input(i) Next i End Sub Private Sub ForwardPropogateSignal() ' Calculate and feedforward outputs from the first layer to the last For i = 1 To Me.layerCount - 1 Dim nbNeurons = Me.Layers(i).nbNeurons For j = 0 To nbNeurons - 1 Dim rSum! = 0 For k = 0 To Me.Layers(i - 1).nbWeights - 1 Dim signal! = Me.maxValue If k < Me.Layers(i - 1).nbNeurons Then _ signal = Me.Layers(i - 1).Neurons(k).signal rSum += Me.Layers(i).Neurons(j).w(k) * signal Next k Me.Layers(i).Neurons(j).signalCopy = rSum If Not (Me.biasType = TBias.NeuronAdded AndAlso i < Me.layerCount - 1 AndAlso j = nbNeurons - 1) Then Dim r# = Me.lambdaFnc.Invoke(rSum) Me.Layers(i).Neurons(j).signal = CSng(r) End If Next j Next i End Sub Private Sub GetOutputSignal(ByRef ouput!()) For i = 0 To Me.nbOutputNeurons - 1 ouput(i) = Me.Layers(Me.layerCount - 1).Neurons(i).signal If ouput(i) > floatMax Then ouput(i) = Me.maxValue If ouput(i) < floatMin Then ouput(i) = Me.minValue If Single.IsNaN(ouput(i)) Then ' -1.#IND ouput(i) = Me.minValue End If If Single.IsPositiveInfinity(ouput(i)) Then ouput(i) = Me.maxValue End If If Single.IsNegativeInfinity(ouput(i)) Then ouput(i) = Me.minValue End If Next i End Sub Public Sub BackwardPropagateError() ' Backward propagate error from the output layer through to the first layer For i = Me.layerCount - 2 To 0 Step -1 For j = 0 To Me.Layers(i).nbNeurons - 1 Dim sumError! = 0 For k = 0 To Me.Layers(i + 1).nbNeurons - 1 sumError += Me.Layers(i + 1).Neurons(k).w(j) * Me.Layers(i + 1).Neurons(k).err Next k Dim signalCopy! = Layers(i).Neurons(j).signalCopy Dim deriv# = Me.lambdaFncD.Invoke(signalCopy) Me.Layers(i).Neurons(j).err = sumError * CSng(deriv) Next j Next i End Sub Public Sub ComputeGradientAndAdjustWeights() ' Gradient descend: Compute gradient and adjust weights ' Update weights for all of the neurons from the first to the last layer For i = 1 To Me.layerCount - 1 For j = 0 To Me.Layers(i).nbNeurons - 1 For k = 0 To Me.Layers(i - 1).nbWeights - 1 With Me.Layers(i).Neurons(j) Dim signal! = Me.maxValue If k < Me.Layers(i - 1).nbNeurons Then _ signal = Me.Layers(i - 1).Neurons(k).signal If .err > floatMax OrElse .err < floatMin Then .w(k) = 0 .dw(k) = 0 Else If signal = 0 AndAlso .dw(k) = 0 Then .dw(k) = 0 Else Dim adjust! = Me.learningRate * signal * .err .w(k) += adjust '+ Me.weightAdjustment * .dw(k) If Me.weightAdjustment <> 0 Then .w(k) += Me.weightAdjustment * .dw(k) .dw(k) = adjust End If End If End With Next k Next j Next i End Sub #End Region #Region "Error" Public Function ComputeOutputError!(target!()) Dim totalErr! = 0 Dim totalAbsErr! = 0 Dim outputLayerIndex = Me.layerCount - 1 For i = 0 To Me.nbOutputNeurons - 1 Dim signal = Me.Layers(outputLayerIndex).Neurons(i).signal Dim delta = target(i) - signal Dim deriv# If Me.activFnc.DoesDerivativeDependOnOriginalFunction Then ' Optimization is possible in this case deriv = Me.lambdaFncDFOF.Invoke(signal) Else Dim signalCopy = Me.Layers(outputLayerIndex).Neurons(i).signalCopy deriv = Me.lambdaFncD.Invoke(signalCopy) End If Me.Layers(outputLayerIndex).Neurons(i).err = delta * CSng(deriv) totalAbsErr += Math.Abs(delta) totalErr += delta Next i Me.averageErrorOneSample = totalAbsErr / Me.nbOutputNeurons Me.averageErrorOneSampleSigned = totalErr / Me.nbOutputNeurons Return totalAbsErr End Function #End Region #Region "Get/Set weights" Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim ws! = Me.GetWeightSingle(layer, neuron, weight) Dim wd# = ws Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Return Me.Layers(layer).Neurons(neuron).w(weight) End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim ws! = CSng(weightWalue) SetWeightSingle(layer, neuron, weight, ws) End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Me.Layers(layer).Neurons(neuron).w(weight) = weightWalue End Sub #End Region End Class modMLPClassicTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPClassicTest Sub MainClassicMLP() Console.WriteLine("Classic MLP with the classical XOR test.") ClassicMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub ClassicMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPClassic mlp.ShowMessage("Classic MLP Xor test") mlp.ShowMessage("--------------------") mlp.Initialize(learningRate:=0.1!, weightAdjustment:=0.1!) Dim nbIterations% 'mlp.SetActivationFunction(enumActivationFunction.Sigmoid) 'nbIterations = 10000 ' Sigmoid: works mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent) nbIterations = 2000 ' Hyperbolic tangent: works fine 'mlp.SetActivationFunction(enumActivationFunction.Gaussian) 'nbIterations = 1000 ' Gaussian: works fine 'mlp.SetActivationFunction(enumActivationFunction.Sinus) 'nbIterations = 1000 ' Sinus: works fine 'mlp.SetActivationFunction(enumActivationFunction.ArcTangent) 'nbIterations = 1000 ' ArcTangent: works fine 'mlp.SetActivationFunction(enumActivationFunction.ELU) 'nbIterations = 2000 ' ELU: works 'mlp.SetActivationFunction(enumActivationFunction.ReLu, gain:=0.9) 'nbIterations = 1000 ' ReLU: works fine 'mlp.SetActivationFunction(enumActivationFunction.ReLuSigmoid) 'nbIterations = 10000 ' ReLUSigmoid: works? 'mlp.SetActivationFunction(enumActivationFunction.DoubleThreshold) 'nbIterations = 10000 ' DoubleThreshold: works fine mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.nbIterations = nbIterations If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True mlp.nbIterations = 4000 ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If mlp.Randomize() 'mlp.Randomize(minValue:=-0.5, maxValue:=0.5) mlp.PrintWeights() WaitForKeyToStart() 'mlp.InitWeights(1, { ' {0.28, 0.28, 0.76}, ' {0.25, 0.88, 0.62}}) 'mlp.InitWeights(2, { ' {0.56, 0.92, 0.19}}) mlp.Train() 'mlp.Train(enumLearningMode.SemiStochastic) 'mlp.Train(enumLearningMode.Stochastic) mlp.ShowMessage("Classic MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPEncog.vb ' From https://github.com/encog/encog-dotnet-core : C# -> VB .NET conversion ' https://www.nuget.org/packages/encog-dotnet-core ' <package id="encog-dotnet-core" version="3.4.0" targetFramework="net472" /> ' https://www.heatonresearch.com/encog ' https://github.com/jeffheaton/encog-dotnet-core/issues/108 Porting to .Net Core #108 ' The reason it is named "core" is because the library in Java, C#, C, and JavaScript that contain ' the core functions is named "core". (there is no relation to .Net core) ' Solution: disable NU1701 warning for the .Net core version of the project using .Net4.7 encog: ' (there is no .Net core version of encog, and the version 3.4.0 works fine with .Net core anyway) ' <PackageReference Include="encog-dotnet-core" Version="3.4.0"> ' <NoWarn>NU1701</NoWarn> ' </PackageReference> Imports Encog.Engine.Network.Activation Imports Encog.ML.Data Imports Encog.ML.Data.Basic Imports Encog.ML.Train Imports Encog.Neural.Networks Imports Encog.Neural.Networks.Layers Imports Encog.Neural.Networks.Training.Propagation.Resilient ' ResilientPropagation Imports Encog.Neural.Networks.Training.Propagation.Back ' Backpropagation Imports System.Text Public Class clsMLPEncog : Inherits clsVectorizedMLPGeneric Private network As BasicNetwork Private trainingSet As IMLDataSet Private imlTrain As IMLTrain Public inputJaggedDblArray#()() Public targetJaggedDblArray#()() Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Library End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.trainingAlgorithm = enumTrainingAlgorithm.RProp If IsNothing(Me.inputArray) Then Exit Sub Dim inputArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.inputArray) Me.inputJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(inputArrayDbl) Dim targetArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.targetArray) Me.targetJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(targetArrayDbl) End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) ' gain can only be 1 for Encog MLP gain = 1 If actFnc = enumActivationFunction.HyperbolicTangent Then gain = 2 center = 0 If Me.trainingAlgorithm = enumTrainingAlgorithm.RProp Then Me.weightAdjustment = 0 ' Not used Me.learningRate = 0 ' Learning rate is not use with ResilientPropagation: ' http://heatonresearch-site.s3-website-us-east-1.amazonaws.com/javadoc/encog-3.3/org/encog/neural/networks/training/propagation/resilient/ResilientPropagation.html ' One problem with the backpropagation algorithm is that the magnitude of the ' partial derivative is usually too large or too small. Further, the learning ' rate is a single value for the entire neural network. The resilient propagation ' learning algorithm uses a special update value (similar to the learning rate) ' for every neuron connection. Further these update values are automatically ' determined, unlike the learning rate of the backpropagation algorithm. End If MyBase.SetActivationFunction(actFnc, gain, center) If IsNothing(Me.inputJaggedDblArray) Then Exit Sub If Me.inputJaggedDblArray.Length = 0 Then Exit Sub Me.network = New BasicNetwork() ' Input layer Me.network.AddLayer(New BasicLayer(Nothing, Me.useBias, neuronCount:=Me.neuronCount(0))) 'Dim act As Encog.Engine.Network.Activation.IActivationFunction Dim act As IActivationFunction Select Case actFnc Case enumActivationFunction.Sigmoid : act = New ActivationSigmoid() Case enumActivationFunction.HyperbolicTangent : act = New ActivationTANH() Case enumActivationFunction.Gaussian : act = New ActivationGaussian() ' Does not work? Case enumActivationFunction.Sinus : act = New ActivationSIN() Case Else Throw New NotImplementedException( "This activation function is not available!") End Select For i = 0 To Me.layerCount - 3 ' Hidden layers Me.network.AddLayer(New BasicLayer(act, Me.useBias, Me.neuronCount(i + 1))) Next ' Output layer : no bias Me.network.AddLayer(New BasicLayer(act, hasBias:=False, neuronCount:=Me.neuronCount(Me.layerCount - 1))) Me.network.Structure.FinalizeStructure() ' Reset the weight matrix and the bias values: random weights to start Me.network.Reset() Me.trainingSet = New BasicMLDataSet(Me.inputJaggedDblArray, Me.targetJaggedDblArray) If Me.trainingAlgorithm = enumTrainingAlgorithm.RProp Then ' maxStep: The maximum that a delta can reach Me.imlTrain = New ResilientPropagation(Me.network, Me.trainingSet, initialUpdate:=0.1#, maxStep:=50.0#) Else Me.imlTrain = New Backpropagation(Me.network, Me.trainingSet, Me.learningRate, momentum:=Me.weightAdjustment) End If End Sub Public Overrides Sub InitializeWeights(numLayer%, weights#(,)) Dim i = numLayer - 1 Dim nbNeuronsLayer = Me.network.GetLayerNeuronCount(i + 1) Dim nbNeuronsPreviousLayer = Me.network.GetLayerNeuronCount(i) For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 Dim r = weights(j, k) Me.network.SetWeight(i, k, j, r) Next k If Me.useBias Then Dim r = weights(j, nbNeuronsPreviousLayer) Me.network.SetWeight(i, nbNeuronsPreviousLayer, j, r) End If Next j End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ' Reset the weight matrix and the bias values: random weights to start Me.network.Reset() RoundWeights() End Sub Public Overrides Sub RoundWeights() ' Round the weights (to reproduce all tests exactly) For i = 1 To Me.layerCount - 1 Dim nbNeuronsLayer = Me.network.GetLayerNeuronCount(i) Dim nbNeuronsPreviousLayer = Me.network.GetLayerNeuronCount(i - 1) For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 Dim weight = Me.network.GetWeight(i - 1, k, j) Dim rounded = Math.Round(weight, clsMLPGeneric.nbRoundingDigits) Me.network.SetWeight(i - 1, k, j, rounded) Next k If Me.useBias Then Dim weightT = Me.network.GetWeight(i - 1, nbNeuronsPreviousLayer, j) Dim rounded = Math.Round(weightT, clsMLPGeneric.nbRoundingDigits) Me.network.SetWeight(i - 1, nbNeuronsPreviousLayer, j, rounded) End If Next j Next i End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next CloseTrainingSession() ' 21/11/2020 SetOuput1D() 'ComputeError() ComputeAverageError() ' 14/11/2020 End Sub Public Overrides Sub CloseTrainingSession() ' Should be called once training is complete and no more iterations are ' needed. Calling iteration again will simply begin the training again, and ' require finishTraining to be called once the new training session is ' complete. ' It is particularly important to call finishTraining for multithreaded ' training techniques. Me.imlTrain.FinishTraining() End Sub Public Overrides Sub TrainVectorOneIteration() Me.imlTrain.Iteration() Me.averageError = Me.imlTrain.Error End Sub Public Overrides Sub SetOuput1D() Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs1D#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 Dim outputs#(Me.nbOutputNeurons - 1) Me.network.Compute(Me.inputJaggedDblArray(i), outputs) For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = outputs(j) outputs1D(k) = outputs(j) k += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) 'If learningMode = enumLearningMode.Vectorial Then ' This is the unique learning mode for this MLP TrainVector() ' Exit Sub 'End If 'Me.vectorizedLearningMode = False 'TrainAllSamples(inputs, targets) End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TestOneSample(input!()) Dim inputArrayDbl = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim outputs#(Me.nbOutputNeurons - 1) Me.network.Compute(inputArrayDbl, outputs) Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs) Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputs, 0) Me.output = outputs2D End Sub Public Overrides Function GetWeight#(layer%, neuron%, weight%) Return Me.network.GetWeight(layer - 1, weight, neuron) End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim wd# = Me.GetWeight(layer, neuron, weight) Dim ws! = CSng(wd) Return ws End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Me.network.SetWeight(layer - 1, weight, neuron, weightWalue) End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim wd# = weightWalue SetWeight(layer, neuron, weight, wd) End Sub End Class modMLPEncogTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPEncogTest Sub MainEncogMLP() Console.WriteLine("Encog MLP with the classical XOR test.") EncogMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub EncogMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPEncog mlp.ShowMessage("Encog MLP Xor test") mlp.ShowMessage("------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'mlp.nbIterations = 500 ' Sigmoid: works mlp.nbIterations = 2000 ' Hyperbolic tangent: works fine 'mlp.nbIterations = 20000 ' Stochastic mlp.Initialize(learningRate:=0) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then 'mlp.nbIterations = 20000 mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If 'mlp.SetActivationFunction(enumActivationFunction.Sigmoid) mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.TrainVector() ' Works fine mlp.ShowMessage("Encog MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPKeras.vb ' From https://github.com/SciSharp/Keras.NET : C# -> VB .NET conversion ' https://www.nuget.org/packages/Keras.NET Nuget install ' https://scisharp.github.io/SciSharp Other .NET Machine Learning projects ' https://scisharp.github.io/Keras.NET Documentation ' https://keras.io Documentation (not only for Keras.NET but Keras) ' Keras.NET -> packages added: ' <package id = "Keras.NET" version="3.8.5" targetFramework="net472" /> ' <package id = "Microsoft.CSharp" version="4.7.0" targetFramework="net472" /> ' <package id = "Numpy.Bare" version="3.8.1.25" targetFramework="net472" /> ' <package id = "Python.Runtime.NETStandard" version="3.7.1" targetFramework="net472" /> ' <package id = "System.Reflection.Emit" version="4.7.0" targetFramework="net472" /> ' Python 3.8 is required at runtime: ' https://www.python.org/ftp/python/3.8.8/python-3.8.8-amd64.exe ' https://www.python.org/downloads ' For PowerShell installations, type: ' python -mpip install numpy : fix "No module named 'numpy'" ' python -mpip install keras : fix "No module named 'keras'" ' python -mpip install tensorflow : fix "Keras requires TensorFlow 2.2 or higher" Imports Keras.Layers Imports Keras.Models Imports Keras.Optimizers Imports Numpy Imports System.Text Public Class clsMLPKeras : Inherits clsVectorizedMLPGeneric Private inputNDA, targetNDA As NDarray Private model As Model Dim weightsNDA As List(Of NDarray) Public Const batch_size% = 2 Private nbHiddenNeurons% Public inputJaggedDblArray#()() Public targetJaggedDblArray#()() Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.LibraryOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.nbHiddenNeurons = Me.neuronCount(Me.layerCount - 2) ' lastHidden Me.useBias = False 'addBiasColumn Me.nbIterationsBatch = batch_size If IsNothing(Me.inputArray) Then Exit Sub Dim inputArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.inputArray) Me.inputJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(inputArrayDbl) Dim targetArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.targetArray) Me.targetJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(targetArrayDbl) ' 29/11/2020 Moved here ' Load train data Me.inputNDA = Me.inputArray Me.targetNDA = np.array(Me.targetArray, dtype:=np.float32) Me.exampleCount = Me.inputArray.GetLength(0) End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) gain = 1 ' gain can only be 1 for Keras MLP (sigmoid) If actFnc = enumActivationFunction.HyperbolicTangent Then gain = 2 center = 0 'Me.weightAdjustment = 0 ' Not used MyBase.SetActivationFunction(actFnc, gain, center) Me.ShowMessage(Now() & " Initializing python engine...") ' 29/11/2020 Moved above '' Load train data 'Me.inputNDA = Me.inputArray 'Me.targetNDA = np.array(Me.targetArray, dtype:=np.float32) 'Me.exampleCount = Me.inputArray.GetLength(0) 'batch_size = Me.exampleCount ' Does not work? ' Build functional model ' Minimal XOR example: 'Dim hidden1 = New Dense(Me.nbHiddenNeurons, activation:="relu").Set(input) 'Dim hidden2 = New Dense(Me.nbHiddenNeurons, activation:="relu").Set(hidden1) 'Dim outputLayer = New Dense(Me.nbOutputNeurons, activation:="sigmoid").Set(hidden2) Dim input = New Input(shape:=New Keras.Shape(Me.nbInputNeurons)) Dim prevHidden As BaseLayer = Nothing Dim lastHidden As BaseLayer = Nothing Dim nbNeurons = Me.nbHiddenNeurons For i = 0 To Me.layerCount - 3 If i = 0 Then prevHidden = input lastHidden = New Dense(Me.nbHiddenNeurons, activation:="relu").Set(prevHidden) ' relu works better than sigmoid or tanh in hidden layer(s): 'lastHidden = New Dense(Me.nbHiddenNeurons, activation:="sigmoid").Set(prevHidden) 'Select Case Me.m_actFunc ' Case enumActivationFunction.Sigmoid ' lastHidden = New Dense(Me.nbHiddenNeurons, ' activation:="sigmoid").Set(prevHidden) ' Case enumActivationFunction.HyperbolicTangent ' lastHidden = New Dense(Me.nbHiddenNeurons, ' activation:="tanh").Set(prevHidden) ' Case Else ' Throw New NotImplementedException( ' "This activation function is not available!") 'End Select prevHidden = lastHidden Next Dim outputLayer As BaseLayer = Nothing Select Case Me.m_actFunc Case enumActivationFunction.Sigmoid outputLayer = New Dense(Me.nbOutputNeurons, activation:="sigmoid").Set(lastHidden) Case enumActivationFunction.HyperbolicTangent outputLayer = New Dense(Me.nbOutputNeurons, activation:="tanh").Set(lastHidden) Case Else Throw New NotImplementedException( "This activation function is not available!") End Select Me.model = New Keras.Models.Model( New Input() {input}, New BaseLayer() {outputLayer}) ' Other optimizer: 'Me.model.Compile( ' optimizer:=New Adam(lr:=Me.learningRate), ' loss:="binary_crossentropy", ' metrics:=New String() {"accuracy"}) Me.model.Compile(optimizer:=New SGD( lr:=Me.learningRate, momentum:=Me.weightAdjustment), loss:="binary_crossentropy", metrics:=New String() {"accuracy"}) ' Other possible loss: mse Me.ShowMessage(Now() & " Initializing python engine: Done.") End Sub Public Overrides Sub InitializeWeights(numLayer%, weights#(,)) If numLayer = 1 Then Me.weightsNDA = New List(Of NDarray) Dim nbWeightLayers = (Me.layerCount - 1) * 2 Dim nda As NDarray If numLayer Mod 2 = 0 Then Dim adbl = clsMLPHelper.Transform2DArrayToJaggedArray(weights) nda = adbl(0) Else nda = weights End If Me.weightsNDA.Add(nda) If numLayer = nbWeightLayers Then Me.model.SetWeights(Me.weightsNDA) End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ' Round the weights (to reproduce all tests exactly) Dim weights = Me.model.GetWeights() Dim nbWeightLayers = (Me.layerCount - 1) * 2 Dim ws(nbWeightLayers - 1)() As Single For i = 0 To nbWeightLayers - 1 ws(i) = weights(i).GetData(Of Single)() Next For i = 1 To nbWeightLayers Dim wsi = ws(i - 1) Dim nbItems = wsi.GetUpperBound(0) + 1 Dim nbNeuronsPreviousLayer = weights(i - 1).len Dim nbNeuronsLayer = nbItems \ nbNeuronsPreviousLayer Dim w2#(nbNeuronsPreviousLayer - 1, nbNeuronsLayer - 1) Dim l = 0 For j = 0 To nbNeuronsPreviousLayer - 1 For k = 0 To nbNeuronsLayer - 1 Dim weight = wsi(l) Dim rounded = Math.Round(weight, clsMLPGeneric.nbRoundingDigits) wsi(l) = CSng(rounded) l += 1 Next k If (i - 1) Mod 2 = 0 Then clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle2(w2, wsi, j) Else w2(j, 0) = wsi(j) End If Next j If Not ((i - 1) Mod 2 = 0) Then w2 = clsMLPHelper.Swap2DArray(w2) InitializeWeights(i, w2) Next i End Sub Public Overrides Sub TrainVector() Throw New NotImplementedException( "Use TrainVectorBatch(nbIterationsBatch)!") End Sub Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) Me.learningMode = enumLearningMode.VectorialBatch Me.vectorizedLearningMode = True Dim history = Me.model.Fit( Me.inputNDA, Me.targetNDA, batch_size:=batch_size, epochs:=nbIterationsBatch, verbose:=0) Dim err = history.HistoryLogs("loss").GetValue(0) ' Does not work fine, too high!? 'Me.averageError = CDbl(err) End Sub Public Sub ModelEvaluate() Me.model.Evaluate(Me.inputNDA, Me.targetNDA, batch_size:=batch_size, verbose:=1) End Sub Public Overrides Sub TrainVectorOneIteration() Throw New NotImplementedException( "Use TrainVectorBatch(nbIterationsBatch)!") End Sub Public Overrides Sub SetOuput1D() ' ForwardPropogateSignal Dim score = model.Predict(Me.inputNDA, verbose:=0) Dim outputs!() = score.GetData(Of Single)() 'Dim outputs#() = score.GetData(Of Double)() ' Does not work? Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = outputs(i * Me.nbOutputNeurons + j) k += 1 Next Next 'Me.lastOutputArray1D = outputs 'Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs) Me.lastOutputArray1DSingle = outputs Me.output = outputs2D End Sub Public Sub SetOuput1DOneSample() ' ForwardPropogateSignal Dim score = model.Predict(Me.inputNDA, verbose:=0) 'Dim score = model.Predict(Me.inputNDA, verbose:=1) Dim outputs!() = score.GetData(Of Single)() Dim o1 = outputs(0) If Single.IsNaN(o1) Then Debug.WriteLine("!") Throw New Exception("Keras MLP model is not initialized!") End If Dim nbInputs = 1 Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = outputs(i * Me.nbOutputNeurons + j) k += 1 Next Next Me.lastOutputArray1DSingle = outputs Me.output = outputs2D End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) ' This is the unique learning mode for this MLP TrainVectorBatch() End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) ' TrainStochastic requires TrainOneSample ' Possibility: shuffle:= True in model.fit() Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) ' TrainSemiStochastic requires TrainOneSample Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TestOneSample(input!()) Dim inputsDble#(0, input.Length - 1) clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle(inputsDble, input, 0) Me.inputNDA = inputsDble SetOuput1DOneSample() End Sub Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New StringBuilder If Me.learningMode = enumLearningMode.VectorialBatch Then _ sb.AppendLine("nb iterations batch=" & Me.nbIterationsBatch) sb.Append(Me.ShowParameters()) For i = 0 To Me.layerCount - 1 sb.AppendLine("Neuron count(" & i & ")=" & Me.neuronCount(i)) Next sb.AppendLine("") Dim weights = Me.model.GetWeights() Dim nbWeightLayers = (Me.layerCount - 1) * 2 Dim ws(nbWeightLayers - 1)() As Single For i = 0 To nbWeightLayers - 1 ws(i) = weights(i).GetData(Of Single)() Next For i = 1 To nbWeightLayers sb.AppendLine("W(" & i & ")={") Dim wsi = ws(i - 1) Dim nbItems = wsi.GetUpperBound(0) + 1 Dim nbNeuronsPreviousLayer = weights(i - 1).len Dim nbNeuronsLayer = nbItems \ nbNeuronsPreviousLayer Dim oneDim = (i Mod 2 = 0) Dim l = 0 For j = 0 To nbNeuronsPreviousLayer - 1 sb.Append(" ") If Not oneDim Then sb.Append("{") For k = 0 To nbNeuronsLayer - 1 Dim weight = wsi(l) Dim sVal$ = weight.ToString(format).ReplaceCommaByDot() sb.Append(sVal) If Me.useBias OrElse k < nbNeuronsLayer - 1 Then sb.Append(", ") l += 1 Next k If Not oneDim Then sb.Append("}") If j < nbNeuronsPreviousLayer - 1 Then sb.Append(",") If Not oneDim Then sb.Append(vbLf) End If Next j sb.Append("}" & vbLf) If i < nbWeightLayers Then sb.AppendLine() Next i Return sb.ToString() End Function End Class modMLPKerasTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPKerasTest Sub MainKerasMLP() Console.WriteLine("Keras MLP with the classical XOR test.") KerasMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub KerasMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPKeras mlp.ShowMessage("Keras MLP Xor test") mlp.ShowMessage("------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.nbIterations = 1500 ' Sigmoid: works 'mlp.nbIterations = 2500 ' Sigmoid: works 'mlp.nbIterations = 2000 ' Hyperbolic tangent: works fine 'mlp.Initialize(learningRate:=0.001!) 'mlp.Initialize(learningRate:=0.01!) mlp.Initialize(learningRate:=0.02!) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then 'mlp.nbIterations = 1000 mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR291, addBiasColumn:=False) 'mlp.InitializeStruct(m_neuronCountXOR2_10_1, addBiasColumn:=False) mlp.InitializeStruct(m_neuronCountXOR2_16_1, addBiasColumn:=False) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2661, addBiasColumn:=False) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.nbIterations = 1500 mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR 'mlp.InitializeStruct(m_neuronCount2XOR482, addBiasColumn:=False) mlp.InitializeStruct(m_neuronCount2XOR4_10_2, addBiasColumn:=False) ElseIf nbXor = 3 Then mlp.nbIterations = 1500 mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR 'mlp.InitializeStruct(m_neuronCount3XOR673, addBiasColumn:=False) 'mlp.InitializeStruct(m_neuronCount3XOR683, addBiasColumn:=False) mlp.InitializeStruct(m_neuronCount3XOR6_10_3, addBiasColumn:=False) End If mlp.SetActivationFunction(enumActivationFunction.Sigmoid) 'mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.Train(learningMode:=enumLearningMode.VectorialBatch) ' Works fine mlp.ShowMessage("Keras MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMatrixMLP.vb ' From https://github.com/nlabiris/perceptrons : C# -> VB .NET conversion Imports Perceptron.Utility ' Matrix Imports Perceptron.MLP.ActivationFunction Imports System.Text ' StringBuilder ' Note: Me.weightAdjustment is not used in this implementation ''' <summary> ''' Multi-Layer Perceptron ''' </summary> Public Class clsMPLMatrix : Inherits clsMLPGeneric ''' <summary> ''' hidden x input weights matrix ''' </summary> Private weights_ih As Matrix ''' <summary> ''' ouput x hidden weights matrix ''' </summary> Private weights_ho As Matrix ''' <summary> ''' Hidden bias matrix ''' </summary> Private bias_h As Matrix ''' <summary> ''' Output bias matrix ''' </summary> Private bias_o As Matrix Private m_weights!()() Private m_biases!()() Private input, hidden As Matrix Private nbHiddenNeurons% Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Optimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.nbHiddenNeurons = Me.neuronCount(1) Me.weightAdjustment = 0 ' Not used If Me.layerCount <> 3 Then ' ToDo: declare and use Me.weights_ih2 to compute 2 hidden layers Throw New ArgumentException( "This Matrix implementation can only compute one hidden layer!") Me.layerCount = 3 End If Me.nbOutputNeurons = neuronCount(Me.layerCount - 1) Dim dbleArray_ih#(Me.nbHiddenNeurons - 1, Me.nbInputNeurons - 1) Me.weights_ih = dbleArray_ih Dim dbleArray_ho#(Me.nbOutputNeurons - 1, Me.nbHiddenNeurons - 1) Me.weights_ho = dbleArray_ho Me.useBias = addBiasColumn If Me.useBias Then Dim dbleArray_bh#(Me.nbHiddenNeurons - 1, 0) Dim dbleArray_bo#(Me.nbOutputNeurons - 1, 0) Me.bias_h = dbleArray_bh Me.bias_o = dbleArray_bo End If End Sub ''' <summary> ''' Randomize weights ''' </summary> Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) Me.rnd = New Random() Me.weights_ih.Randomize(Me.rnd, minValue, maxValue) Me.weights_ho.Randomize(Me.rnd, minValue, maxValue) If Me.useBias Then Me.bias_h.Randomize(Me.rnd, minValue, maxValue) Me.bias_o.Randomize(Me.rnd, minValue, maxValue) End If End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) If layer = 1 Then ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) End If Dim i = layer - 1 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) ReDim Me.m_weights(i)(0 To nbWeights - 1) ReDim Me.m_biases(i)(0 To nbNeuronsLayer - 1) For j = 0 To nbNeuronsLayer - 1 Me.m_biases(i)(j) = 0 Next j Dim nbNeuronsPreviousLayer = Me.neuronCount(i) Dim l = 0 For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 Dim r = weights(j, k) Me.m_weights(i)(l) = CSng(r) l += 1 Next k If Me.useBias Then Dim r = weights(j, nbNeuronsPreviousLayer) Me.m_biases(i)(j) = CSng(r) End If Next j If layer = Me.layerCount - 1 Then Dim w1, w2, b1, b2 As Matrix Dim input = Me.nbInputNeurons Dim hidden = Me.neuronCount(1) Dim ouput = Me.nbOutputNeurons w1 = clsMLPHelper.TransformArrayTo2DArray(Me.m_weights(0), hidden, input) w2 = clsMLPHelper.TransformArrayTo2DArray(Me.m_weights(1), ouput, hidden) b1 = clsMLPHelper.TransformArrayTo2DArray(Me.m_biases(0), hidden, 1) b2 = clsMLPHelper.TransformArrayTo2DArray(Me.m_biases(1), ouput, 1) Me.weights_ih = w1 Me.weights_ho = w2 Me.bias_h = b1 Me.bias_o = b2 End If End Sub ''' <summary> ''' Test one sample ''' </summary> Public Overrides Sub TestOneSample(input!()) ForwardPropogateSignal(input) Me.lastOutputArray1DSingle = Me.output.ToArrayOfSingle() End Sub ''' <summary> ''' Propagate the input signal into the MLP ''' </summary> Private Sub ForwardPropogateSignal(inputsArray!()) ' Generating the Hidden Outputs Me.input = Matrix.FromArraySingle(inputsArray) If Me.useBias Then Me.hidden = Matrix.MultiplyAddAndMap(Me.weights_ih, Me.input, Me.bias_h, Me.lambdaFnc) Else Me.hidden = Matrix.MultiplyAndMap(Me.weights_ih, Me.input, Me.lambdaFnc) End If ' Generating the output's output! If Me.useBias Then Me.output = Matrix.MultiplyAddAndMap(Me.weights_ho, Me.hidden, Me.bias_o, Me.lambdaFnc) Else Me.output = Matrix.MultiplyAndMap(Me.weights_ho, Me.hidden, Me.lambdaFnc) End If End Sub Private Sub BackwardPropagateError() ' Calculate gradient ' Calculate hidden -> output delta weights ' Adjust the weights by deltas ' Calculate the hidden layer errors ' Me.weightAdjustment is not used in this implementation BackwardPropagateErrorComputeGradientAndAdjustWeights( Me.output, Me.lastError, Me.hidden, Me.learningRate, Me.weights_ho, Me.bias_o) ' Calculate the hidden layer errors Dim hidden_errors = Matrix.TransposeAndMultiply1(Me.weights_ho, Me.lastError) ' Calculate hidden gradient ' Calculate input -> hidden delta weights ' Adjust the bias by its deltas (which is just the gradients) BackwardPropagateErrorComputeGradientAndAdjustWeights( Me.hidden, hidden_errors, Me.input, Me.learningRate, Me.weights_ih, Me.bias_h) End Sub ''' <summary> ''' Train MLP with one sample ''' </summary> Public Overrides Sub TrainOneSample(inputsArray!(), targetsArray!()) ForwardPropogateSignal(inputsArray) ' Calculate the error: ERROR = TARGETS - OUTPUTS ComputeErrorOneSampleSpecific(targetsArray) Me.averageErrorOneSample = ComputeAverageErrorFromLastError() Me.averageErrorOneSampleSigned = ComputeAverageSignedErrorFromLastError() BackwardPropagateError() End Sub ''' <summary> ''' Gradient descend: Compute gradient and adjust weights ''' </summary> Public Sub BackwardPropagateErrorComputeGradientAndAdjustWeights( final As Matrix, error_ As Matrix, original As Matrix, adjustment!, ByRef weight As Matrix, ByRef bias As Matrix) ' Calculate gradient Dim gradient = Matrix.Map(final, lambdaFncDFOF) gradient *= error_ gradient *= adjustment ' Calculate original -> final delta weights Dim weight_deltas = Matrix.TransposeAndMultiply2(original, gradient) ' Adjust the weights by deltas weight += weight_deltas ' Adjust the bias by its deltas (which is just the gradients) If Me.useBias Then bias += gradient End Sub ''' <summary> ''' Compute error from output and target matrices ''' </summary> Private Sub ComputeErrorOneSampleSpecific(target!()) ' Calculate the error: ERROR = TARGETS - OUTPUTS Me.lastError = Matrix.SubtractFromArraySingle(target, Me.output) End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) MyBase.TrainSystematic(inputs, targets, learningMode) End Sub Public Overrides Function ShowWeights$(Optional format$ = format2Dec) GetWeights() Dim weights = MyBase.ShowWeights(format) Return weights End Function Private Sub GetWeights() ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) Dim w1 As Double(,) = Me.weights_ih Me.m_weights(0) = clsMLPHelper.Transform2DArrayDoubleToArraySingle(w1) Dim w2 As Double(,) = Me.weights_ho Me.m_weights(1) = clsMLPHelper.Transform2DArrayDoubleToArraySingle(w2) Dim w3 As Double(,) = Me.bias_h Me.m_biases(0) = clsMLPHelper.Transform2DArrayDoubleToArraySingle2(w3) Dim w4 As Double(,) = Me.bias_o Me.m_biases(1) = clsMLPHelper.Transform2DArrayDoubleToArraySingle2(w4) End Sub Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim ws! = Me.GetWeightSingle(layer, neuron, weight) Dim wd# = ws Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Dim biasValue = Me.m_biases(layer - 1)(l2) Return biasValue End If Dim l% = neuron * nbNeuronsLayer + weight Dim weightValue = Me.m_weights(layer - 1)(l) Return weightValue End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim ws! = CSng(weightWalue) SetWeightSingle(layer, neuron, weight, ws) End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Me.m_biases(layer - 1)(l2) = weightWalue Exit Sub End If Dim l% = neuron * nbNeuronsLayer + weight Me.m_weights(layer - 1)(l) = weightWalue End Sub End Class clsTrainData.vb ' From https://github.com/nlabiris/perceptrons : C# -> VB .NET conversion Class ML_TrainingData Public data!(,) Private inputsLength% Private targetsLength% Public Sub New(inputsLength%, targetsLength%) Me.inputsLength = inputsLength Me.targetsLength = targetsLength End Sub Public Sub Create() ' XOR sample Me.data = New Single(,) { {1.0!, 0.0!, 1.0!}, {0.0!, 0.0!, 0.0!}, {0.0!, 1.0!, 1.0!}, {1.0!, 1.0!, 0.0!}} End Sub Public Function GetInputs() As Single(,) Dim rows = Me.data.GetLength(0) Dim cols = Me.inputsLength Dim inp!(rows - 1, cols - 1) For i = 0 To rows - 1 For j = 0 To cols - 1 inp(i, j) = Me.data(i, j) Next Next Return inp End Function Public Function GetOutputs() As Single(,) Dim rows = Me.data.GetLength(0) ' 4 Dim cols = Me.data.GetLength(1) ' 3 Dim tgts = Me.targetsLength ' 1 Dim tgt!(rows - 1, tgts - 1) For i = 0 To rows - 1 Dim k = 0 For j = cols - tgts To cols - 1 tgt(i, k) = Me.data(i, j) Next Next Return tgt End Function End Class modMatrixMLPTest.vb ' Matrix-MultiLayerPerceptron: clsMPLMatrix ' From https://github.com/nlabiris/perceptrons : C# -> VB .NET conversion 'Imports Perceptron.MatrixMLP ' clsMPLMatrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMatrixMLPTest Sub MainMatrixMLP() Console.WriteLine("Matrix MLP with the classical XOR test.") MatrixMLPTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub MatrixMLPTest() Retry: Console.WriteLine("") Console.WriteLine("") Console.WriteLine("Matrix MLP Test, choose an option from the following list:") Console.WriteLine("0: Exit") Console.WriteLine("1: 1 XOR") Console.WriteLine("2: 2 XOR") Console.WriteLine("3: 3 XOR") Console.WriteLine("4: IRIS (Logical)") Console.WriteLine("5: IRIS (Analog)") Console.WriteLine("6: Sunspot") Dim k = Console.ReadKey Console.WriteLine("") Select Case k.KeyChar Case "0"c : Exit Sub Case "1"c : MatrixMLPXorTest(nbXor:=1) Case "2"c : MatrixMLPXorTest(nbXor:=2) Case "3"c : MatrixMLPXorTest(nbXor:=3) Case "4"c ' Three layers only, good results! MLPGenericIrisFlowerTest(New clsMPLMatrix, "Matrix MLP Iris flower logical test", nbIterations:=4000, threeLayers:=True) Case "5"c ' Three layers only, good results! MLPGenericIrisFlowerTestAnalog(New clsMPLMatrix, "Matrix MLP Iris flower analog test", nbIterations:=4000, threeLayers:=True) Case "6"c MLPGenericSunspotTest(New clsMPLMatrix, "Matrix MLP Sunspot test") End Select GoTo Retry End Sub Public Sub MatrixMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMPLMatrix() mlp.ShowMessage("Matrix MLP Xor test") mlp.ShowMessage("-------------------") Dim nbIterations% ' Works nbIterations = 5000 '100000 'mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.Sigmoid, center:=2) ' Sometimes works 'nbIterations = 100000 mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.HyperbolicTangent, gain:=2) 'mlp.Init(learningRate:=0.05, weightAdjustment:=0.05) ' Works 'nbIterations = 10000 'mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.ELU, center:=-2) mlp.nbIterations = nbIterations mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then 'Dim nbOutput = 1 'Dim training As New ML_TrainingData(inputsLength:=2, targetsLength:=nbOutput) 'training.Create() 'Dim inputs!(,) = training.GetInputs 'Dim targets!(,) = training.GetOutputs 'mlp.inputArray = inputs 'mlp.targetArray = targets mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) ' Not implemented: 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then 'mlp.nbIterations = 10000 mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If mlp.Initialize(learningRate:=0.1, weightAdjustment:=0.1) mlp.Randomize(-1, 2) mlp.PrintWeights() WaitForKeyToStart() mlp.Train() 'mlp.Train(enumLearningMode.Stochastic) 'mlp.TestAllSamples(inputs, nbOutput) 'mlp.targetArray = targets 'mlp.ComputeAverageError() mlp.ShowMessage("Matrix MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsVecMatrixMLP.vb Imports System.Runtime.InteropServices ' OutAttribute <Out> Imports Perceptron.Utility ' Matrix 'Namespace VectorizedMatrixMLP Public Class clsVectorizedMatrixMLP : Inherits clsVectorizedMLPGeneric Private neuronCountWithBias%() Private m_input, m_target, Zlast As Matrix Private w, error_, Z, A, delta As Matrix() Public Property input As Matrix Get Return Me.m_input End Get Set(value As Matrix) Me.m_input = value End Set End Property Public Property target As Matrix Get Return Me.m_target End Get Set(value As Matrix) Me.m_target = value End Set End Property Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Normal End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.input = Me.inputArray Me.target = Me.targetArray ReDim Me.neuronCount(0 To Me.layerCount - 1) ReDim Me.neuronCountWithBias(0 To Me.layerCount - 1) For i = 0 To Me.layerCount - 1 Me.neuronCount(i) = neuronCount(i) Me.neuronCountWithBias(i) = neuronCount(i) If Me.useBias AndAlso i > 0 AndAlso i < Me.layerCount - 1 Then _ Me.neuronCountWithBias(i) += 1 Next Me.exampleCount = Me.target.r Me.w = New Matrix(Me.layerCount - 1 - 1) {} End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) 'Me.rnd = New Random(Seed:=1) Me.rnd = New Random() For i = 0 To Me.w.Length - 1 Dim nbNeurons = Me.neuronCountWithBias(i) If Me.useBias Then nbNeurons += 1 Dim nbNeuronsNextLayer = Me.neuronCountWithBias(i + 1) Me.w(i) = Matrix.Randomize( nbNeurons, nbNeuronsNextLayer, Me.rnd, minValue, maxValue) * 2 - 1 'Me.w(i).Randomize(minValue, maxValue) Next End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) Me.w(layer - 1) = weights End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next ComputeAverageError() End Sub Public Sub ForwardPropagateSignal() Me.error_ = Nothing ForwardPropagateSignalInternal() Dim maxLayer = layerCount - 1 Dim maxIndex = Me.A.Length - 1 Me.Zlast = Me.Z(maxLayer) ' Cut first column for last layer Dim zr = Me.Z(maxLayer).r Dim zc = Me.Z(maxLayer).c If Me.useBias Then Zlast = Zlast.Slice(0, 1, zr, zc) Me.output = Me.A(maxIndex) ' Cut first column for last index of result matrix Dim ar = Me.A(maxIndex).r Dim ac = Me.A(maxIndex).c If Me.useBias Then Me.output = Me.output.Slice(0, 1, ar, ac) End Sub Public Sub ComputeErrorInternal() Me.error_ = New Matrix(Me.layerCount - 1) {} Me.error_(Me.layerCount - 1) = Me.output - Me.target End Sub Public Sub BackwardPropagateError() ComputeErrorInternal() BackwardPropagateErrorInternal() ComputeGradientAndAdjustWeights() End Sub Public Overrides Sub TrainVectorOneIteration() ForwardPropagateSignal() BackwardPropagateError() End Sub Private Sub ForwardPropagateSignalInternal() Me.Z = New Matrix(layerCount - 1) {} Me.A = New Matrix(layerCount - 1) {} Me.Z(0) = Me.m_input ' Column added with 1 for all examples If Me.useBias Then Me.Z(0) = Me.Z(0).AddColumn(Matrix.Ones(Me.exampleCount, 1)) Me.A(0) = Me.Z(0) For i = 1 To layerCount - 1 Me.Z(i) = Me.A(i - 1) * Me.w(i - 1) ' Column added with 1 for all examples If Me.useBias Then Me.Z(i) = Me.Z(i).AddColumn(Matrix.Ones(Me.exampleCount, 1)) Me.A(i) = Matrix.Map(Me.Z(i), Me.lambdaFnc) Next ' How use Relu ' Change all sigmoid function, for relu function ' Last A must have no Nonlinear function Matrix, Last A must be Equal To Last Z; ' because of that Last Delta has not derivated Matrix "Last Delta = Last error Error * 1"; ' The learning rate must be smaller, like 0.001 ' Optionaly you can use a Softmax layer to make a classifier ' Use if Relu OR iregularized Values If Me.activFnc.IsNonLinear Then Me.A(Me.A.Length - 1) = Me.Z(Me.Z.Length - 1) End Sub Private Sub BackwardPropagateErrorInternal() Me.delta = New Matrix(Me.layerCount - 1) {} Me.delta(Me.layerCount - 1) = Me.error_(Me.layerCount - 1) * Matrix.Map(Zlast, Me.lambdaFncD) For i = Me.layerCount - 2 To 0 Step -1 Dim d = Me.delta(i + 1) Dim t = Me.w(i).T Me.error_(i) = d * t Me.delta(i) = Me.error_(i) * Matrix.Map(Z(i), Me.lambdaFncD) ' Cut first column If Me.useBias Then Me.delta(i) = Me.delta(i).Slice(0, 1, Me.delta(i).r, Me.delta(i).c) Next End Sub Private Sub ComputeGradientAndAdjustWeights() ' Gradient descend: Compute gradient and adjust weights For i = 0 To w.Length - 1 Dim gradient = Me.A(i).T * Me.delta(i + 1) Me.w(i) -= gradient * Me.learningRate ' 30/05/2020 weightAdjustment If Me.weightAdjustment <> 0 Then Me.w(i) -= gradient * Me.weightAdjustment Next End Sub Public Sub SetLastError() Me.lastError = Me.error_(Me.layerCount - 1) End Sub Public Overrides Sub TrainOneSample(input!(), target!()) SetInputOneSample(input) SetTargetOneSample(target) Me.exampleCount = 1 TrainVectorOneIteration() End Sub Private Sub SetInputOneSample(input!()) Dim inputDble#(0, input.Length - 1) clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle(inputDble, input, 0) Dim matrixInput As Matrix = inputDble Me.input = matrixInput End Sub Private Sub SetTargetOneSample(target!()) Dim targetsDble#(0, target.Length - 1) clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle(targetsDble, target, 0) Me.target = targetsDble End Sub Public Overrides Sub SetOuput1D() Me.lastOutputArray1DSingle = Me.output.ToArrayOfSingle() End Sub Public Overrides Sub TestOneSample(input!()) ' Resize output to one sample If Me.exampleCount > 1 Then Me.exampleCount = 1 Dim nbOutputs% = Me.neuronCount(Me.layerCount - 1) Dim target!(nbOutputs - 1) SetTargetOneSample(target) End If SetInputOneSample(input) ForwardPropagateSignal() SetOuput1D() End Sub Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New System.Text.StringBuilder sb.Append(Me.ShowParameters()) For i = 0 To Me.layerCount - 1 sb.AppendLine("Neuron count(" & i & ")=" & Me.neuronCount(i)) Next sb.AppendLine("") For i = 0 To Me.w.Length - 1 sb.AppendLine("W(" & i + 1 & ")=" & Me.w(i).ToString & vbLf) Next Return sb.ToString() End Function Public Overrides Function GetWeight#(layer%, neuron%, weight%) Return Me.w(layer - 1).GetValue(weight, neuron) End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim wd# = Me.GetWeight(layer, neuron, weight) Dim ws! = CSng(wd) Return ws End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Me.w(layer - 1).Item(weight, neuron) = weightWalue End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim wd# = weightWalue SetWeight(layer, neuron, weight, wd) End Sub Public Overrides Sub PrintOutput(iteration%, Optional force As Boolean = False) If force OrElse ShowThisIteration(iteration) Then If Not Me.vectorizedLearningMode Then TestAllSamples(Me.inputArray) Else ComputeAverageError() End If PrintSuccess(iteration) 'For i = 0 To Me.LayerCount - 1 ' msg &= "Error(" & i & ")=" & Me.error_(i).ToString() & vbLf ' msg &= "A(" & i & ")=" & A(i).ToString() & vbLf 'Next End If End Sub End Class modVecMatrixMLPTest.vb ' Vectorized-MultiLayerPerceptron ' From https://github.com/HectorPulido/Vectorized-multilayer-neural-network : C# -> VB .NET conversion 'Imports Perceptron.VectorizedMatrixMLP Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMatrixVecMLPTest Sub MainVectorizedMatrixMLP() Console.WriteLine("Vectorized Matrix MLP with the classical XOR test.") VectorizedMatrixMLPTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub VectorizedMatrixMLPTest() Retry: Console.WriteLine("") Console.WriteLine("") Console.WriteLine("Vectorized Matrix MLP Test, choose an option from the following list:") Console.WriteLine("0: Exit") Console.WriteLine("1: 1 XOR") Console.WriteLine("2: 2 XOR") Console.WriteLine("3: 3 XOR") Console.WriteLine("4: IRIS (Logical)") Console.WriteLine("5: IRIS (Analog)") Console.WriteLine("6: Sunspot") Dim k = Console.ReadKey Console.WriteLine("") Select Case k.KeyChar Case "0"c : Exit Sub Case "1"c : VectorizedMatrixMLPXorTest(nbXor:=1) Case "2"c : VectorizedMatrixMLPXorTest(nbXor:=2) Case "3"c : VectorizedMatrixMLPXorTest(nbXor:=3) Case "4"c ' Works only using sigmoid activation MLPGenericIrisFlowerTest(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Iris flower logical test", nbIterations:=1000, sigmoid:=True) Case "5"c ' Works only using sigmoid activation, poor results! MLPGenericIrisFlowerTestAnalog(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Iris flower analog test", sigmoid:=True) Case "6"c ' Works only using sigmoid activation MLPGenericSunspotTest(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Sunspot test", sigmoid:=True) End Select GoTo Retry End Sub Public Sub VectorizedMatrixMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsVectorizedMatrixMLP mlp.ShowMessage("Vectorized Matrix MLP Xor test") mlp.ShowMessage("------------------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.nbIterations = 2000 ' Sigmoid: works 'mlp.nbIterations = 5000 ' Hyperbolic tangent: works 'mlp.nbIterations = 1000 ' Gaussian: works fine 'mlp.nbIterations = 500 ' Sinus: works fine 'mlp.nbIterations = 1000 ' Arc tangent: works fine 'mlp.nbIterations = 1000 ' ELU: works fine (but only one XOR) 'mlp.nbIterations = 100000 ' ReLU: Does not work yet, but this next one yes: 'mlp.nbIterations = 5000 ' ReLUSigmoid: works fine 'mlp.nbIterations = 5000 ' Double threshold: works fine mlp.SetActivationFunction(enumActivationFunction.Sigmoid) 'mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If mlp.Initialize(learningRate:=0.1, weightAdjustment:=1) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.TrainVector() 'mlp.Train() ' Works fine 'mlp.Train(enumLearningMode.Systematic) ' Works fine 'mlp.Train(enumLearningMode.SemiStochastic) ' Works 'mlp.Train(enumLearningMode.Stochastic) ' Works mlp.ShowMessage("Vectorized Matrix MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPNeuralNet.vb ' https://www.nuget.org/packages/NeuralNetwork.NET Nuget install ' https://github.com/Sergio0694/NeuralNetwork.NET ' https://scisharp.github.io/SciSharp Other .NET Machine Learning projects ' Install-Package NeuralNetwork.NET ' Fix SixLabors.ImageSharp FileLoadException (0x80131040) bug: ' Install-Package SixLabors.ImageSharp -Version 1.0.0-beta0007 ' (bug starting from 1.0.0-rc0001 version) Imports NeuralNetworkNET.APIs Imports NeuralNetworkNET.APIs.Enums Imports NeuralNetworkNET.APIs.Structs Imports NeuralNetworkNET.Networks.Cost ' CostFunctionType Imports System.Text ' Without GetWeights, SetWeights, functional tests will fails #Const NuGetImplementation = 1 ' 0: Off, 1: On ' GetWeights, SetWeights, see there: ' https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/get_set_weights #Const GetWeightsImplementation = 0 ' 0: Off, 1: On ' Tuples are not available for Visual Studio 2013: ' https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/training_input_target #Const GetWeightsImplementationVS2013 = 0 ' 0: Off, 1: On ' The stable branch contains the get_set_weights and training_input_target branches: ' https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/stable Public Class clsMLPNeuralNet : Inherits clsVectorizedMLPGeneric Private network As Interfaces.INeuralNetwork Private output2D!(,) Public inputJaggedArray!()() Public targetJaggedArray!()() Private m_weights!()() Private m_biases!()() #If GetWeightsImplementationVS2013 Then #ElseIf GetWeightsImplementation Or NuGetImplementation Then Private m_trainingData As (n As Single(), w As Single())() #End If Private m_dataset As Interfaces.Data.ITrainingDataset Private m_nbIterationsBatchLast% Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.Library End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) Me.useBias = addBiasColumn If Not Me.useBias Then Throw New NotImplementedException( "useBias=False is not implemented for clsMLPNeuralNet!") End If MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.trainingAlgorithm = enumTrainingAlgorithm.RMSProp Me.learningRate = 0 Me.weightAdjustment = 0 Me.minBatchSize = 1 #If NuGetImplementation Then Me.minBatchSize = 10 #End If Me.nbIterationsBatch = 100 End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) center = 0 If actFnc = enumActivationFunction.Sigmoid Then gain = 1 If actFnc = enumActivationFunction.HyperbolicTangent Then gain = 2 MyBase.SetActivationFunction(actFnc, gain, center) Select Case actFnc Case enumActivationFunction.Sigmoid Case enumActivationFunction.HyperbolicTangent #If GetWeightsImplementation Or GetWeightsImplementationVS2013 Then Case enumActivationFunction.Mish #End If Case Else Throw New NotImplementedException( "This activation function is not available!") End Select If IsNothing(Me.inputArray) Then Exit Sub Me.inputJaggedArray = clsMLPHelper.Transform2DArrayToJaggedArraySingle(Me.inputArray) Me.targetJaggedArray = clsMLPHelper.Transform2DArrayToJaggedArraySingle(Me.targetArray) #If GetWeightsImplementationVS2013 Then ' Zip function compiled in .NET Standard 2.0 NeuralNetwork.NET.dll, available from VS2013 Me.m_dataset = DatasetLoader.Training2(Me.inputJaggedArray, Me.targetJaggedArray, size:=Me.nbIterationsBatch) #ElseIf GetWeightsImplementation Or NuGetImplementation Then ' Zip: just concatenate the input array with the target array like a zipper! ' var trainingData = Enumerable.Zip(input, target).ToArray(); .NET Core 3.1 ' var trainingData = Enumerable.Zip(input, target, (n, c) => (n, c)).ToArray(); .NET Framework 4.7.2 ' var trainingData = input.Zip(target, (n, c) => (n, c)).ToArray(); .NET Standard 2.0 ' https://docs.microsoft.com/fr-fr/dotnet/api/system.linq.enumerable.zip?view=net-5.0 Dim zip = Me.inputJaggedArray.Zip(Me.targetJaggedArray, Function(n, w) (n, w)) Me.m_trainingData = zip.ToArray() ' size: The desired dataset batch size Me.m_dataset = DatasetLoader.Training(Me.m_trainingData, size:=Me.nbIterationsBatch) #End If Me.m_nbIterationsBatchLast = Me.nbIterationsBatch BuildGraph() End Sub Private Sub BuildGraph() Dim actFunc = ActivationType.Tanh Select Case Me.m_actFunc Case enumActivationFunction.Sigmoid : actFunc = ActivationType.Sigmoid Case enumActivationFunction.HyperbolicTangent : actFunc = ActivationType.Tanh #If GetWeightsImplementation Or GetWeightsImplementationVS2013 Then Case enumActivationFunction.Mish : actFunc = ActivationType.Mish #End If Case Else : Throw New NotImplementedException("This activation function is not available!") End Select Select Case Me.layerCount Case 3 Me.network = NetworkManager.NewSequential( TensorInfo.Linear(Me.nbInputNeurons), NetworkLayers.FullyConnected(Me.neuronCount(1), actFunc), NetworkLayers.FullyConnected(Me.nbOutputNeurons, actFunc, CostFunctionType.Quadratic)) Case 4 Me.network = NetworkManager.NewSequential( TensorInfo.Linear(Me.nbInputNeurons), NetworkLayers.FullyConnected(Me.neuronCount(1), actFunc), NetworkLayers.FullyConnected(Me.neuronCount(2), actFunc), NetworkLayers.FullyConnected(Me.nbOutputNeurons, actFunc, CostFunctionType.Quadratic)) Case 5 Me.network = NetworkManager.NewSequential( TensorInfo.Linear(Me.nbInputNeurons), NetworkLayers.FullyConnected(Me.neuronCount(1), actFunc), NetworkLayers.FullyConnected(Me.neuronCount(2), actFunc), NetworkLayers.FullyConnected(Me.neuronCount(3), actFunc), NetworkLayers.FullyConnected(Me.nbOutputNeurons, actFunc, CostFunctionType.Quadratic)) Case Else : Throw New NotImplementedException("Wrong number of layers!") End Select End Sub #If GetWeightsImplementation Or GetWeightsImplementationVS2013 Then Public Overrides Sub InitializeWeights(layer%, weights#(,)) If layer = 1 Then ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) End If Dim i = layer - 1 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) ReDim Me.m_weights(i)(0 To nbWeights - 1) ReDim Me.m_biases(i)(0 To nbNeuronsLayer - 1) For j = 0 To nbNeuronsLayer - 1 Me.m_biases(i)(j) = 0 Next j Dim nbNeuronsPreviousLayer = Me.neuronCount(i) Dim l = 0 For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 Dim r = weights(j, k) Me.m_weights(i)(l) = CSng(r) l += 1 Next k If Me.useBias Then Dim r = weights(j, nbNeuronsPreviousLayer) Me.m_biases(i)(j) = CSng(r) End If Next j If layer = Me.layerCount - 1 Then BuildGraph() Me.network.SetWeights(Me.m_weights, Me.m_biases) End If End Sub Public Sub GetWeights() Me.network.GetWeights(Me.m_weights, Me.m_biases) End Sub Public Sub SetWeights() Me.network.SetWeights(Me.m_weights, Me.m_biases) End Sub #Else Public Overrides Sub InitializeWeights(layer%, weights#(,)) ShowMessage("GetWeights, SetWeights, see there:") ShowMessage("https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/stable") End Sub Public Sub GetWeights() End Sub Public Sub SetWeights() End Sub #End If Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ' Re-build the graphe to randomize again the network! BuildGraph() ' Round the weights (to reproduce all tests exactly) RoundWeights() End Sub Public Sub ReDimWeights() ReDim Me.m_weights(0 To Me.layerCount - 2) ReDim Me.m_biases(0 To Me.layerCount - 2) For i = 0 To Me.layerCount - 2 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) ReDim Me.m_weights(i)(0 To nbWeights - 1) ReDim Me.m_biases(i)(0 To nbNeuronsLayer - 1) Next End Sub Public Overrides Sub RoundWeights() ReDimWeights() GetWeights() For i = 0 To Me.network.Layers.Count - 1 Dim nbNeuronsLayer = Me.neuronCount(i + 1) Dim nbWeights = nbNeuronsLayer * Me.neuronCount(i) For k = 0 To nbWeights - 1 Dim weight = Me.m_weights(i)(k) Dim rounded = Math.Round(weight, clsMLPGeneric.nbRoundingDigits) Me.m_weights(i)(k) = CSng(rounded) Next k If Me.useBias Then For k = 0 To nbNeuronsLayer - 1 Dim weightT = Me.m_biases(i)(k) Dim rounded = Math.Round(weightT, clsMLPGeneric.nbRoundingDigits) Me.m_biases(i)(k) = CSng(rounded) Next k End If Next SetWeights() End Sub #If NuGetImplementation Then Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) ' This is the unique learning mode for this MLP TrainVectorBatch() End Sub Public Overrides Sub TrainVector() Throw New NotImplementedException( "Use TrainVectorBatch(nbIterationsBatch)!") End Sub Public Overrides Sub TrainVectorOneIteration() Throw New NotImplementedException("Use TrainVectorBatch(nbIterationsBatch)!") End Sub #Else Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) If learningMode = enumLearningMode.Vectorial Then TrainVector() : Exit Sub ' This is the defaut learning mode for this MLP TrainVectorBatch() End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True If Me.nbIterationsBatch <> Me.m_nbIterationsBatchLast Then #If GetWeightsImplementationVS2013 Then Me.m_dataset = DatasetLoader.Training2(Me.inputJaggedArray, Me.targetJaggedArray, size:=1) #End If End If For iteration = 0 To Me.nbIterations - 1 TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next SetOuput1D() ComputeAverageError() End Sub Public Overrides Sub TrainVectorOneIteration() TrainNetwork(Me.m_dataset) End Sub #End If #If GetWeightsImplementationVS2013 Then Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) Me.vectorizedLearningMode = True Dim dataset As Interfaces.Data.ITrainingDataset If nbIterationsBatch = Me.m_nbIterationsBatchLast Then dataset = Me.m_dataset Else ' size: The desired dataset batch size dataset = DatasetLoader.Training2(Me.inputJaggedArray, Me.targetJaggedArray, size:=nbIterationsBatch) End If TrainNetwork(dataset) End Sub #ElseIf GetWeightsImplementation Or NuGetImplementation Then Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) Me.vectorizedLearningMode = True Dim dataset As Interfaces.Data.ITrainingDataset If nbIterationsBatch = Me.m_nbIterationsBatchLast Then dataset = Me.m_dataset Else ' size: The desired dataset batch size dataset = DatasetLoader.Training(Me.m_trainingData, size:=nbIterationsBatch) End If TrainNetwork(dataset) End Sub #Else Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) ' VS2013: You need DatasetLoader.Training2 function there: ' https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/stable ' dataset = DatasetLoader.Training2(Me.inputJaggedArray, Me.targetJaggedArray, size:=nbIterationsBatch) Throw New NotImplementedException("A special NeuralNetwork.NET dll version is needed!") End Sub #End If Private Sub TrainNetwork(dataset As Interfaces.Data.ITrainingDataset) Select Case Me.trainingAlgorithm Case enumTrainingAlgorithm.AdaMax NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.AdaMax(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.AdaGrad NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.AdaGrad(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.Adam NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.Adam(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.Momentum NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.Momentum(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.RMSProp NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.RMSProp(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.AdaDelta NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.AdaDelta(), epochs:=Me.nbIterationsBatch).Wait() Case enumTrainingAlgorithm.StochasticGradientDescent NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.StochasticGradientDescent(), epochs:=Me.nbIterationsBatch).Wait() Case Else 'Throw New NotImplementedException("This training algorithm is not available!") ' Default training algorithm: RMSProp NetworkManager.TrainNetworkAsync(Me.network, dataset, TrainingAlgorithms.RMSProp(), epochs:=Me.nbIterationsBatch).Wait() End Select End Sub Private Sub Forward() Me.output2D = Me.network.Forward(Me.inputArray) 'Dim r1 = Me.output2D(0, 0) 'If Single.IsNaN(r1) Then Debug.WriteLine(Now & " : Error found: NaN!") End Sub Public Overrides Sub SetOuput1D() Forward() Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs1D#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = Me.output2D(i, j) outputs1D(k) = Me.output2D(i, j) k += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Sub SetOuput1DOneSample() Dim nbInputs = 1 Dim nbTargets = 1 Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs!(lengthTot - 1) Dim k = 0 For i = 0 To nbInputs - 1 For j = 0 To Me.nbOutputNeurons - 1 outputs(i * Me.nbOutputNeurons + j) = Me.output2D(i, j) k += 1 Next Next Me.lastOutputArray1DSingle = outputs Me.output = Me.output2D End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) ' TrainStochastic requires TrainOneSample ' Possibility: shuffle:= True in model.fit() Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) ' TrainSemiStochastic requires TrainOneSample Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TestOneSample(input!()) Dim inputs2D!(0, input.Length - 1) clsMLPHelper.Fill2DArrayOfSingle(inputs2D, input, 0) Me.output2D = Me.network.Forward(inputs2D) SetOuput1DOneSample() End Sub #If GetWeightsImplementation Or GetWeightsImplementationVS2013 Then Public Overrides Function ShowWeights$(Optional format$ = format2Dec) ReDimWeights() GetWeights() Dim sb As New StringBuilder If Me.learningMode = enumLearningMode.VectorialBatch Then _ sb.AppendLine("nb iterations batch=" & Me.nbIterationsBatch) Dim weightsBase = MyBase.ShowWeights(format) sb.Append(weightsBase) Dim weights = sb.ToString Return weights End Function Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim ws! = Me.GetWeightSingle(layer, neuron, weight) Dim wd# = ws Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Dim biasValue = Me.m_biases(layer - 1)(l2) Return biasValue End If Dim l% = neuron * nbNeuronsLayer + weight Dim weightValue = Me.m_weights(layer - 1)(l) Return weightValue End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim ws! = CSng(weightWalue) SetWeightSingle(layer, neuron, weight, ws) End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim nbNeuronsLayer = Me.neuronCount(layer - 1) If weight >= nbNeuronsLayer Then Dim l2% = weight - nbNeuronsLayer + neuron Me.m_biases(layer - 1)(l2) = weightWalue Exit Sub End If Dim l% = neuron * nbNeuronsLayer + weight Me.m_weights(layer - 1)(l) = weightWalue End Sub #Else Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New StringBuilder sb.Append(Me.ShowParameters()) sb.AppendLine("Neuron count(" & 0 & ")=" & Me.nbInputNeurons) For i = 0 To Me.network.Layers.Count - 1 sb.AppendLine("Neuron count(" & i + 1 & ")=" & Me.neuronCount(i + 1)) Next sb.AppendLine("") sb.AppendLine("GetWeights, SetWeights, see there:") sb.AppendLine("https://github.com/PatriceDargenton/NeuralNetwork.NET/tree/stable") Return sb.ToString() End Function #End If End Class modMLPNeuralNetTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Imports Perceptron.clsMLPNeuralNet ' TrainingAlgorithmType ' BC40025: Type of this member is not CLS-compliant: 'Imports NeuralNetworkNET.SupervisedLearning.Algorithms ' TrainingAlgorithmType Module modMLPNeuralNetTest Sub MainNeuralNetMLP() Console.WriteLine("NeuralNet.NET MLP with the classical XOR test.") NeuralNetMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub NeuralNetMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPNeuralNet mlp.ShowMessage("NeuralNet.NET MLP test") mlp.ShowMessage("----------------------") mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.Initialize(learningRate:=0) mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.InitializeStruct(m_neuronCountXOR271, addBiasColumn:=True) mlp.printOutputMatrix = True mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR673, addBiasColumn:=True) End If mlp.trainingAlgorithm = enumTrainingAlgorithm.RMSProp 'mlp.nbIterationsBatch = mlp.minBatchSize ' Can be 1 using Sigmoid and RMSProp 'mlp.nbIterations = 15000 ' Sigmoid: works 'mlp.SetActivationFunction(enumActivationFunction.Sigmoid) mlp.nbIterations = 4000 ' Hyperbolic tangent: works fine If nbXor = 3 Then mlp.nbIterations = 15000 mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.Train(learningMode:=enumLearningMode.VectorialBatch) ' Works fine mlp.ShowMessage("NeuralNet.NET MLP test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module AdjustableSigmoid.vb Imports Perceptron.Utilities Namespace Activation Public Class AdjustableSigmoid : Inherits BaseActivation Public Property Alpha# Public Sub New() Me.Alpha = 1 Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(-1, 1) End Sub Public Sub New(alpha#, outputRange As Range) Me.Alpha = alpha Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = Me.OutputRange End Sub Public Overrides Function AbstractedDerivative#(value#) Throw New NotImplementedException End Function Public Overrides Function Derivative#(value#) Dim exp = Math.Exp(Me.Alpha * value) Return Me.OutputRange.Delta * (Me.Alpha * exp) / ((exp + 1) * (exp + 1)) End Function Public Overrides Function Evaluate#(value#) Return Me.OutputRange.Delta / (1 + Math.Exp(-Me.Alpha * value)) + Me.OutputRange.Minimum End Function End Class End Namespace BaseActivation.vb Imports Perceptron.Utilities Namespace Activation Public MustInherit Class BaseActivation Protected in_range As Range Protected out_range As Range Public Property Center# Public ReadOnly Property InputRange As Range Get Return Me.in_range End Get End Property Public ReadOnly Property OutputRange As Range Get Return Me.out_range End Get End Property Public MustOverride Function Evaluate#(value#) Public MustOverride Function Derivative#(value#) Public MustOverride Function AbstractedDerivative#(value#) Public Sub New() Me.Center = 0 End Sub Public Sub New(center!) Me.Center = center End Sub End Class End Namespace ELU.vb Imports Perceptron.Utilities Namespace Activation Public Class ELU : Inherits BaseActivation Public Property Alpha# Public Sub New() MyBase.New() Me.Alpha = 1 Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(0, 1) End Sub Public Sub New(alpha#, center#) Me.Alpha = alpha Me.Center = center Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(0, 1) End Sub Public Overrides Function AbstractedDerivative#(value#) ' If gain < 0 the derivate is undefined If Me.Alpha < 0 Then Return 0 Dim y# If value >= 0 Then y = 1 Else y = value + Me.Alpha End If Return y End Function Public Overrides Function Derivative#(value#) ' If gain < 0 the derivate is undefined If Me.Alpha < 0 Then Return 0 Dim xc# = value - Me.Center Dim y# If xc >= 0 Then y = 1 Else Dim fx# = Evaluate(value) y = fx + Me.Alpha End If Return y End Function Public Overrides Function Evaluate#(value#) Dim xc# = value - Me.Center Dim y# If xc >= 0 Then y = xc Else y = Me.Alpha * (Math.Exp(xc) - 1) End If Return y End Function End Class End Namespace HyperbolicTangent.vb Imports Perceptron.Utilities Namespace Activation ' BipolarSigmoid (alpha x) = HyperbolicTangent(-alpha x / 2) Public Class HyperbolicTangent : Inherits BaseActivation Public Property Alpha# Public Sub New() Me.Alpha = 1 Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(-1, 1) End Sub Public Sub New(alpha#) Me.Alpha = alpha Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(-1, 1) End Sub Public Overrides Function AbstractedDerivative#(value#) Throw New NotImplementedException End Function Public Overrides Function Derivative#(value#) Dim exp = Math.Exp(Me.Alpha * value) Return 2 * (Me.Alpha * exp) / ((exp + 1) * (exp + 1)) End Function Public Overrides Function Evaluate#(value#) Return 2 / (1 + Math.Exp(-Me.Alpha * value)) - 1 End Function End Class End Namespace Linear.vb Imports Perceptron.Utilities Namespace Activation Public Class Linear : Inherits BaseActivation Public Property Slope# Public Sub New() Me.Slope = 1 Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) End Sub Public Sub New(slope#) Me.Slope = slope Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) End Sub Public Sub New(slope%) Me.Slope = slope End Sub Public Overrides Function AbstractedDerivative#(value#) Return Me.Slope End Function Public Overrides Function Derivative#(value#) Return Me.Slope End Function Public Overrides Function Evaluate#(value#) Return Me.Slope * value End Function End Class End Namespace Sigmoid.vb Imports Perceptron.Utilities Namespace Activation Public Class Sigmoid : Inherits BaseActivation Public Property Alpha# Public Sub New() MyBase.New() Me.Alpha = 1 Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(0, 1) End Sub Public Sub New(alpha#, Optional center# = 0) Me.Alpha = alpha Me.Center = center Me.in_range = New Range(Double.NegativeInfinity, Double.PositiveInfinity) Me.out_range = New Range(0, 1) End Sub Public Overrides Function AbstractedDerivative#(value#) Dim xc# = value - Me.Center Return Me.Alpha * xc * (1 - xc) End Function Public Overrides Function Derivative#(value#) Dim xc# = value - Me.Center Dim exp = Math.Exp(Me.Alpha * xc) 'Dim exp2 = Math.Exp(-Me.Alpha * xc) ' Quasi-same value Return (Me.Alpha * exp) / ((exp + 1) * (exp + 1)) End Function Public Overrides Function Evaluate#(value#) Dim xc# = value - Me.Center Return 1 / (1 + Math.Exp(-Me.Alpha * xc)) End Function End Class End Namespace Testing.vb Namespace Data Public Class Testing Public Property Input As List(Of Double) Public Sub New(input As IEnumerable(Of Double)) Me.Input = New List(Of Double) Me.Input.AddRange(input) End Sub End Class End Namespace Training.vb Namespace Data Public Class Training Public Property Input As List(Of Double) Public Property Output As List(Of Double) Public Sub New() Me.Input = New List(Of Double) Me.Output = New List(Of Double) End Sub Public Sub New(input As IEnumerable(Of Double), output As IEnumerable(Of Double)) Me.Input = New List(Of Double) Me.Output = New List(Of Double) Me.Input.AddRange(input) Me.Output.AddRange(output) End Sub End Class End Namespace BaseLayer.vb Imports Perceptron.Activation Imports Perceptron.Neurons Imports Perceptron.Randoms Namespace Layers Public Class BaseLayer Public Property Size% Public Property Neurons As List(Of Neuron) Public Property ActivationFunction As BaseActivation Public Sub New(Size%, Activation As BaseActivation) Me.Size = Size Me.Neurons = New List(Of Neuron) Me.ActivationFunction = Activation End Sub Public Sub Init() For Each n As Neuron In Me.Neurons n.WeightsToChild = New List(Of Weight) n.WeightsToParent = New List(Of Weight) n.WeightToBias = Nothing Next End Sub Public Sub ConnectParent(layer As BaseLayer, Random As BaseRandom) For Each n2 As Neuron In Me.Neurons For Each n As Neuron In layer.Neurons Dim weight = New Weight(Random.Generate(), n, n2) n.WeightsToChild.Add(weight) n2.WeightsToParent.Add(weight) Next Next End Sub Public Sub ConnectChild(layer As BaseLayer, Random As BaseRandom) For Each n2 As Neuron In Me.Neurons For Each n As Neuron In layer.Neurons Dim weight = New Weight(Random.Generate(), n2, n) n.WeightsToParent.Add(weight) n2.WeightsToChild.Add(weight) Next Next End Sub Public Sub InitChild(layer As BaseLayer, Random As BaseRandom) Dim i = 0 For Each n2 As Neuron In Me.Neurons Dim j = 0 For Each n As Neuron In layer.Neurons Dim weight = New Weight(Random.Generate(), n2, n) n.WeightsToParent(i) = weight n2.WeightsToChild(j) = weight j += 1 Next i += 1 Next End Sub Public Sub ConnectChildInit(layer As BaseLayer) For Each n2 As Neuron In Me.Neurons For Each n As Neuron In layer.Neurons Dim weight = New Weight(0, n2, n) n.WeightsToParent.Add(weight) n2.WeightsToChild.Add(weight) Next Next End Sub Public Sub ConnectBias(bias As Neuron, Random As BaseRandom) For Each n As Neuron In Me.Neurons Dim weight = New Weight(Random.Generate(), bias, n) n.WeightToBias = weight bias.WeightsToChild.Add(weight) Next End Sub Public Sub InitBias(bias As Neuron, Random As BaseRandom) Dim i = 0 For Each n As Neuron In Me.Neurons Dim weight = New Weight(Random.Generate(), bias, n) n.WeightToBias = weight bias.WeightsToChild(i) = weight i += 1 Next End Sub Public Sub ConnectBiasInit(bias As Neuron) For Each n As Neuron In Me.Neurons Dim weight = New Weight(0, bias, n) n.WeightToBias = weight bias.WeightsToChild.Add(weight) Next End Sub Public Sub RestoreWeightsWithBias(weightsWithBias#(,), useBias As Boolean, bias As Neuron, layerParent As BaseLayer) Dim layerParentSize = weightsWithBias.GetUpperBound(1) Dim weights#(Me.Neurons.Count - 1, layerParentSize) Dim biasWeights#(Me.Neurons.Count - 1) Dim i = 0 Dim nbNeurons = Me.Neurons.Count Dim nbNeuronsParent = layerParent.Neurons.Count For Each n2 As Neuron In Me.Neurons If n2.Type = NeuronType.Input Then Continue For Dim j = 0 For j = 0 To nbNeuronsParent - 1 weights(i, j) = weightsWithBias(i, j) Next If useBias Then biasWeights(i) = weightsWithBias(i, j) i += 1 Next Me.Init() Me.ConnectParent2(layerParent, weights) If useBias Then Me.ConnectBias2(bias, biasWeights) End Sub Public Sub ConnectParent2(layer As BaseLayer, weights#(,)) Dim i = 0 For Each n2 As Neuron In Me.Neurons Dim j = 0 Dim nbLayerNeurons = layer.Neurons.Count For Each n As Neuron In layer.Neurons Dim weight = New Weight(weights(i, j), n, n2) If n.Type <> NeuronType.Output Then n.WeightsToChild.Add(weight) n2.WeightsToParent.Add(weight) j += 1 Next i += 1 Next End Sub Public Sub ConnectBias2(bias As Neuron, weights#()) Dim i = 0 For Each n As Neuron In Me.Neurons Dim weight = New Weight(weights(i), bias, n) n.WeightToBias = weight bias.WeightsToChild.Add(weight) i += 1 Next End Sub End Class End Namespace HiddenLayer.vb Imports Perceptron.Activation Imports Perceptron.Neurons Namespace Layers Public Class HiddenLayer : Inherits BaseLayer Public Sub New(Size%, Activation As BaseActivation) MyBase.New(Size, Activation) For x = 1 To Size Me.Neurons.Add(New Neuron(NeuronType.Hidden)) Next End Sub End Class End Namespace InputLayer.vb Imports Perceptron.Activation Imports Perceptron.Neurons Namespace Layers Public Class InputLayer : Inherits BaseLayer Public Sub New(Size%, Activation As BaseActivation) MyBase.New(Size, Activation) For x = 1 To Size Me.Neurons.Add(New Neuron(NeuronType.Input)) Next End Sub Public Sub SetInput(input As List(Of Double)) For x = 0 To Me.Size - 1 Me.Neurons(x).Input = input(x) Me.Neurons(x).Output = input(x) Next End Sub End Class End Namespace OutputLayer.vb Imports Perceptron.Activation Imports Perceptron.Neurons Namespace Layers Public Class OutputLayer : Inherits BaseLayer Public Sub New(Size%, Activation As BaseActivation) MyBase.New(Size, Activation) For x = 1 To Size Me.Neurons.Add(New Neuron(NeuronType.Output)) Next End Sub Public Sub AssignErrors(expected As List(Of Double)) For x = 0 To Me.Size - 1 Me.Neurons(x).ErrorDelta = expected(x) - Me.Neurons(x).Output Next End Sub Public Function ExtractOutputs() As List(Of Double) Dim results = New List(Of Double) For Each n In Me.Neurons results.Add(n.Output) Next Return results End Function Public Function CalculateSquaredError#() Dim sum# = 0.0 For Each n In Me.Neurons sum += n.ErrorDelta * n.ErrorDelta Next Return Math.Sqrt(sum) ' sum / 2 End Function Public Function CalculateAbsError#() Dim sum# = 0.0 For Each n In Me.Neurons sum += Math.Abs(n.ErrorDelta) Next Return sum End Function Public Function CalculateSignedError#() Dim sum# = 0.0 For Each n In Me.Neurons sum += n.ErrorDelta Next Return sum End Function End Class End Namespace MLPOOPTests.vb Imports Perceptron.Activation Imports Perceptron.Utilities Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module Main Sub MainOOPMLP() Console.WriteLine("Object-oriented programming (OOP) MLP with the classical XOR test.") OOPMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub OOPMLPXorTest(Optional nbXor% = 1) Const minValue# = -1 Const maxValue# = 1 'Const minValue# = -0.5 'Const maxValue# = 0.5 Dim standard As New Randoms.Standard( New Range(minValue, maxValue), seed:=DateTime.Now.Millisecond) Dim mlp As New clsMLPOOP( learning_rate:=0.5, momentum:=0.8, randomizer:=standard, activation:=New HyperbolicTangent(alpha:=0.5#)) mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain:=0.5) mlp.ShowMessage("Object-oriented programming MLP Xor test") mlp.ShowMessage("----------------------------------------") mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.nbIterations = 2000 If nbXor = 1 Then 'num_input:=2, num_hidden:={5}, num_output:=1 mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'mlp.InitializeStruct({2, 5, 1}, addBiasColumn:=True) mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If mlp.Randomize() ' See above: standard As New Randoms.Standard 'mlp.Randomize(minValue:=-0.5, maxValue:=0.5) mlp.PrintWeights() WaitForKeyToStart() 'Dim Training As New List(Of Training) 'Training.Add(New Training({0, 1}, {1})) 'Training.Add(New Training({0, 0}, {0})) 'Training.Add(New Training({1, 0}, {1})) 'Training.Add(New Training({1, 1}, {0})) 'Training.Add(New Training({1, 0, 1, 0}, {1, 1})) 'Training.Add(New Training({1, 0, 0, 0}, {1, 0})) 'Training.Add(New Training({1, 0, 0, 1}, {1, 1})) 'Training.Add(New Training({1, 0, 1, 1}, {1, 0})) 'Dim result = False 'While Not result ' mlp.TrainOrig(Training, 5, 0.1) ' Console.WriteLine(String.Format( ' "Total error on correctly predicting training set: {0}", ' mlp.TotalError)) ' Console.ReadLine() 'End While 'Dim nbIterations% = 3000 'For iteration = 0 To nbIterations - 1 ' mlp.TrainOneIteration(Training) ' If (iteration < 10 OrElse ' ((iteration + 1) Mod 100 = 0 AndAlso iteration < 1000) OrElse ' ((iteration + 1) Mod 1000 = 0 AndAlso iteration < 10000) OrElse ' (iteration + 1) Mod 10000 = 0) Then ' Dim msg$ = vbLf & "Iteration n°" & iteration + 1 & "/" & nbIterations & vbLf & ' "Output: " & mlp.PrintOutputOOP() & vbLf & ' "Average error: " & mlp.TotalError.ToString(format6Dec) ' Console.WriteLine(msg) ' End If 'Next 'Console.WriteLine("Press a key to quit.") 'Console.ReadLine() mlp.Train() 'mlp.Train(enumLearningMode.SemiStochastic) ' Works 'mlp.Train(enumLearningMode.Stochastic) ' Works mlp.ShowMessage("Object-oriented programming MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module MultilayerPerceptron.vb ' From https://github.com/RutledgePaulV/multilayer-perceptron-vb Imports Perceptron.Layers Imports Perceptron.Activation Imports Perceptron.Data Imports Perceptron.Neurons Imports Perceptron.Randoms Imports Perceptron.Utility ' Matrix Imports System.Text ' StringBuilder Public Class clsMLPOOP : Inherits clsMLPGeneric Public Property TotalSquaredError# Public Property TotalAbsError# Public Property TotalSignedError# 'Public Property Momentum# -> weightAdjustment Public Property Bias As Neuron Public Property Randomizer As BaseRandom Public Property ActivationFunction As BaseActivation Public Property Layers As List(Of BaseLayer) Public Property InputLayer As InputLayer Public Property OutputLayer As OutputLayer Public Property HiddenLayers As List(Of HiddenLayer) Public Property Outputs As List(Of List(Of Double)) Public Sub New() Dim standard As New Randoms.Standard( New Utilities.Range(-1, 1), seed:=DateTime.Now.Millisecond) Me.Randomizer = standard End Sub Public Sub New(learning_rate!, momentum!, randomizer As BaseRandom, activation As BaseActivation) 'setting properties Me.weightAdjustment = momentum Me.Randomizer = randomizer Me.learningRate = learning_rate Me.ActivationFunction = activation End Sub Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.BothNormalAndSpecificCode End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) If addBiasColumn Then 'setting bias Me.Bias = New Neuron(NeuronType.Input) Me.Bias.Input = 1 Me.Bias.Output = 1 Else Me.Bias = Nothing End If 'initializing lists Me.Layers = New List(Of BaseLayer) Me.HiddenLayers = New List(Of HiddenLayer) 'creating layers Me.InputLayer = New InputLayer(Me.nbInputNeurons, Me.ActivationFunction) Me.Layers.Add(InputLayer) Dim numLayer = 0 For Each i In neuronCount numLayer += 1 If numLayer = 1 Then Continue For If numLayer = Me.layerCount Then Exit For Dim hiddenLayer = New HiddenLayer(i, Me.ActivationFunction) Me.HiddenLayers.Add(hiddenLayer) Me.Layers.Add(hiddenLayer) Next Me.OutputLayer = New OutputLayer(Me.nbOutputNeurons, Me.ActivationFunction) Me.Layers.Add(OutputLayer) Me.layerCount = Me.Layers.Count WeightInitStruct() End Sub Private Sub WeightInitStruct() 'connecting layers (creating weights) For x = 0 To Me.Layers.Count - 2 Me.Layers(x).ConnectChildInit(Layers(x + 1)) 'connecting bias If Me.useBias Then Me.Layers(x + 1).ConnectBiasInit(Bias) Next End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) Me.Layers(layer).RestoreWeightsWithBias(weights, Me.useBias, Me.Bias, Me.Layers(layer - 1)) End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ' 06/06/2021 Me.Randomizer = New Randoms.Standard( New Utilities.Range(minValue, maxValue), seed:=DateTime.Now.Millisecond) For x = 0 To Me.Layers.Count - 2 Me.Layers(x).InitChild(Layers(x + 1), Me.Randomizer) If Me.useBias Then Me.Layers(x + 1).InitBias(Me.Bias, Me.Randomizer) Next End Sub Public Sub TrainOneSampleOOP(data As List(Of Training)) Me.Outputs = New List(Of List(Of Double)) Me.TotalAbsError = 0 Me.TotalSquaredError = 0 Me.TotalSignedError = 0 For Each item In data Me.InputLayer.SetInput(item.Input) ForwardPropogateSignal() Me.OutputLayer.AssignErrors(item.Output) BackwardPropogateErrorComputeGradientAndAdjustWeights() Me.TotalAbsError += Me.OutputLayer.CalculateAbsError() Me.TotalSquaredError += Me.OutputLayer.CalculateSquaredError() Me.TotalSignedError += Me.OutputLayer.CalculateSignedError() Me.Outputs.Add(Me.OutputLayer.ExtractOutputs) Next End Sub Public Sub TrainOneIteration(data As List(Of Training)) Me.Outputs = New List(Of List(Of Double)) Me.TotalSquaredError = 0 For Each item In data Me.InputLayer.SetInput(item.Input) ForwardPropogateSignal() Me.OutputLayer.AssignErrors(item.Output) BackwardPropogateErrorComputeGradientAndAdjustWeights() Me.TotalSquaredError += Me.OutputLayer.CalculateSquaredError() Me.Outputs.Add(Me.OutputLayer.ExtractOutputs) Next End Sub Private Sub SetInputOneSample(input!()) Dim inputDble#() = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim lst As List(Of Double) = inputDble.ToList Dim data As New Testing(lst) Me.InputLayer.SetInput(data.Input) End Sub Private Function SetInputAndTargetOneSample(input!(), target!()) As List(Of Training) Dim data As New List(Of Training) Dim inputDble#() = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim targetDble#() = clsMLPHelper.Convert1DArrayOfSingleToDouble(target) data.Add(New Training(inputDble, targetDble)) Return data End Function Public Overrides Sub TrainOneSample(input!(), target!()) Dim data = SetInputAndTargetOneSample(input, target) TrainOneSampleOOP(data) Me.averageErrorOneSample = Me.TotalSquaredError / target.GetLength(0) Me.averageErrorOneSampleSigned = Me.TotalSignedError / target.GetLength(0) SetOuput1D() End Sub Public Sub SetOuput1D() Dim lst = Me.OutputLayer.ExtractOutputs() Me.lastOutputArray1D = lst.ToArray() Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(Me.lastOutputArray1D) End Sub Private Sub SetOutput() ' 29/11/2020 Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, Me.lastOutputArray1D, 0) Me.output = outputs2D End Sub Public Overrides Sub TestOneSample(input!()) SetInputOneSample(input) ForwardPropogateSignal() SetOuput1D() SetOutput() ' 29/11/2020 End Sub Private Sub ForwardPropogateSignal() For x = 1 To Me.Layers.Count - 1 For Each node In Me.Layers(x).Neurons node.Input = 0.0 For Each w In node.WeightsToParent node.Input += w.Parent.Output * w.Value Next 'adding bias If Me.useBias Then node.Input += node.WeightToBias.Parent.Output * node.WeightToBias.Value If IsNothing(Me.lambdaFnc) AndAlso IsNothing(Layers(x).ActivationFunction) Then Throw New ArgumentException("Activation function undefined!") End If If IsNothing(Me.ActivationFunction) Then ' Generic activation function node.Output = Me.lambdaFnc.Invoke(node.Input) Else ' OOP activation function node.Output = Me.Layers(x).ActivationFunction.Evaluate(node.Input) End If Next Next End Sub Private Sub BackwardPropogateErrorComputeGradientAndAdjustWeights() ' Backward propagate error from the output layer through to the first layer ' Gradient descend: Compute gradient and adjust weights 'updating weights for all other layers For x = Me.Layers.Count - 1 To 1 Step -1 For Each node In Me.Layers(x).Neurons 'if not output layer, then errors need to be backpropogated from child layer to parent If node.Type <> NeuronType.Output Then node.ErrorDelta = 0.0 For Each w In node.WeightsToChild node.ErrorDelta += w.Value * w.Child.ErrorDelta * w.Child.Primed Next End If 'calculating derivative value of input 'node.Primed = Layers(x).ActivationFunction.AbstractedDerivative(node.Output) If IsNothing(Me.ActivationFunction) Then node.Primed = Me.lambdaFncD.Invoke(node.Input) Else node.Primed = Me.Layers(x).ActivationFunction.Derivative(node.Input) End If 'adjusting weight values between parent layer For Each w In node.WeightsToParent Dim adjustment = Me.learningRate * node.ErrorDelta * node.Primed * w.Parent.Output w.Value += adjustment '+ w.Previous * Me.weightAdjustment If Me.weightAdjustment <> 0 Then _ w.Value += w.Previous * Me.weightAdjustment w.Previous = adjustment Next 'adjusting weights between bias If Me.useBias Then Dim biasAdjustment = Me.learningRate * node.ErrorDelta * node.Primed * node.WeightToBias.Parent.Output node.WeightToBias.Value += biasAdjustment '+ 'node.WeightToBias.Previous * Me.weightAdjustment If Me.weightAdjustment <> 0 Then _ node.WeightToBias.Value += node.WeightToBias.Previous * Me.weightAdjustment node.WeightToBias.Previous = biasAdjustment End If Next Next End Sub Public Overrides Function GetWeight#(layer%, neuron%, weight%) Dim neuron_ = Me.Layers(layer).Neurons(neuron) Dim nbWeights = neuron_.WeightsToParent.Count If weight >= nbWeights Then Dim wB# = neuron_.WeightToBias.Value Return wB End If Dim wd# = neuron_.WeightsToParent(weight).Value Return wd End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim wd# = Me.GetWeight(layer, neuron, weight) Dim ws! = CSng(wd) Return ws End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) Dim neuron_ = Me.Layers(layer).Neurons(neuron) Dim nbWeights = neuron_.WeightsToParent.Count If weight >= nbWeights Then neuron_.WeightToBias.Value = weightWalue Exit Sub End If neuron_.WeightsToParent(weight).Value = weightWalue End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim wd# = weightWalue SetWeight(layer, neuron, weight, wd) End Sub End Class Neuron.vb Namespace Neurons Public Class Neuron Public Property NumericalFormat$ = "0.000" Public Property Input# Public Property Output# Public Property ErrorDelta# Public Property Primed# Public Property Type As NeuronType Public Property WeightsToChild As List(Of Weight) Public Property WeightsToParent As List(Of Weight) Public Property WeightToBias As Weight Public Sub New(type As NeuronType) Me.Input = 0 Me.Output = 0 Me.Primed = 0 Me.ErrorDelta = 0 Me.Type = type Select Case type Case NeuronType.Input Me.WeightsToChild = New List(Of Weight) Case NeuronType.Hidden Me.WeightsToChild = New List(Of Weight) Me.WeightsToParent = New List(Of Weight) Case NeuronType.Output Me.WeightsToParent = New List(Of Weight) End Select End Sub Public Overrides Function ToString$() Dim result = "Input = " & Me.Input.ToString(NumericalFormat) & vbCr result &= "Output = " & Me.Output.ToString(NumericalFormat) & vbCr result &= "Error = " & Me.ErrorDelta.ToString(NumericalFormat) Return result End Function End Class End Namespace NeuronEnum.vb Namespace Neurons Public Module NeuronEnum Public Enum NeuronType Input Hidden Output End Enum End Module End Namespace Weight.vb Namespace Neurons Public Class Weight Public Property Value# Public Property Previous# Public Property Child As Neuron Public Property Parent As Neuron Public Sub New(value#, parent_node As Neuron, child_node As Neuron) Me.Previous = 0 Me.Value = value Me.Child = child_node Me.Parent = parent_node End Sub End Class End Namespace BaseRandom.vb Imports Perceptron.Utilities Namespace Randoms Public MustInherit Class BaseRandom Public Property Range As Range Public Sub New(range As Range) Me.Range = range End Sub Public MustOverride Function Generate#() End Class End Namespace Standard.vb Imports Perceptron.Utilities Namespace Randoms Public Class Standard : Inherits BaseRandom Private m_random As Random Public Sub New(range As Range, seed%) MyBase.New(range) Me.m_random = New Random(seed) End Sub Public Overrides Function Generate#() Dim r = Me.m_random.NextDouble() * Me.Range.Delta + Me.Range.Minimum Dim rounded# = Math.Round(r, clsMLPGeneric.nbRoundingDigits) ' 28/11/2020 Return rounded End Function End Class End Namespace Range.vb Namespace Utilities Public Class Range Public Property Minimum# Public Property Maximum# Public ReadOnly Property Delta# Get Return Me.Maximum - Me.Minimum End Get End Property Public Sub New(min#, max#) Me.Minimum = min Me.Maximum = max End Sub End Class End Namespace clsMLPRProp.vb ' From https://github.com/nokitakaze/ResilientBackProp : C# -> VB .NET conversion Imports System.IO Imports System.Text ' StringBuilder Imports System.Threading ' Interlocked Imports System.Threading.Tasks ' TaskFactory ''' <summary> ''' Resilient Back Propagation (RPROP) ''' </summary> Public Class clsMLPRProp : Inherits clsVectorizedMLPGeneric Public multiThread As Boolean = False Public inputJaggedDblArray#()() Public targetJaggedDblArray#()() Dim m_trainData#()() Dim m_nbWeights% Public m_gnn As NeuralNetwork Public m_weights#() Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.SpecificCodeOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.trainingAlgorithm = enumTrainingAlgorithm.RProp ' Randomize weights between [0 - 1] instead of [-0.5 - 0.5] ? Me.useNguyenWidrowWeightsInitialization = False Me.learningRate = 0 Me.weightAdjustment = 0 Me.useBias = addBiasColumn Me.nbIterationsBatch = 10 If Not Me.useBias Then Throw New NotImplementedException( "useBias=False is not implemented for clsMLPRProp!") End If If IsNothing(Me.inputArray) Then Exit Sub Dim inputArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.inputArray) Me.inputJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(inputArrayDbl) Dim targetArrayDbl = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.targetArray) Me.targetJaggedDblArray = clsMLPHelper.Transform2DArrayToJaggedArray(targetArrayDbl) Me.exampleCount = Me.inputArray.GetLength(0) SetTrainData() m_gnn = New NeuralNetwork(neuronCount) m_nbWeights = m_gnn.GetWeightsCount() m_gnn.multiThread = multiThread m_gnn.softMaxForLastLayer = Me.classificationObjective End Sub Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) Select Case actFnc Case enumActivationFunction.Sigmoid SetActivationFunctionOptimized( enumActivationFunctionOptimized.Sigmoid, gain, center) Case enumActivationFunction.HyperbolicTangent SetActivationFunctionOptimized( enumActivationFunctionOptimized.HyperbolicTangent, gain, center) Case enumActivationFunction.ELU SetActivationFunctionOptimized( enumActivationFunctionOptimized.ELU, gain, center) Case Else Throw New NotImplementedException( "This activation function is not available!") End Select End Sub Public Overrides Sub SetActivationFunctionOptimized( actFnc As enumActivationFunctionOptimized, Optional gain! = 1, Optional center! = 0) MyBase.SetActivationFunctionOptimized(actFnc, gain, center) m_gnn.LambdaFnc = Me.lambdaFnc m_gnn.LambdaFncDFOF = Me.lambdaFncDFOF Select Case actFnc Case enumActivationFunctionOptimized.Sigmoid Me.m_actFunc = enumActivationFunction.Sigmoid Case enumActivationFunctionOptimized.HyperbolicTangent Me.m_actFunc = enumActivationFunction.HyperbolicTangent Case enumActivationFunctionOptimized.ELU Me.m_actFunc = enumActivationFunction.ELU Case Else Throw New NotImplementedException( "This activation function is not available!") Me.activFnc = Nothing m_gnn.LambdaFnc = Nothing m_gnn.LambdaFncDFOF = Nothing End Select End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) If Me.useNguyenWidrowWeightsInitialization Then InitializeWeightsNguyenWidrow() Else Dim rnd As Random = New Random() Dim weights = New Double(Me.m_nbWeights - 1) {} ' actually weights & biases For i = 0 To Me.m_nbWeights - 1 'Dim r# = 20.0 * rnd.NextDouble() - 10.0 'Dim r# = rnd.NextDouble(minValue, maxValue) Dim r# = rnd.NextDoubleGreaterThanZero(minValue, maxValue, clsMLPGeneric.minRandomValue) Dim rounded# = Math.Round(r, clsMLPGeneric.nbRoundingDigits) weights(i) = rounded Next Me.m_gnn.SetWeights(weights) End If End Sub Private Sub InitializeWeightsNguyenWidrow() Const SmallStep! = 0.001 Const VerySmallStep! = clsMLPGeneric.minRandomValue ' 0.0001 Me.rnd = New Random() Dim weights = New Double(Me.m_nbWeights - 1) {} ' actually weights & biases Dim numWeight% = 0 For layer = 1 To Me.layerCount - 1 Dim size = Me.neuronCount(layer) Dim prev_size = Me.neuronCount(layer - 1) Dim layerNumWeights = (Me.neuronCount(layer - 1) + 1) * Me.neuronCount(layer) For node = 0 To size - 1 Dim biasIndice = numWeight + prev_size ' Me.Neurons(layer).Biases(node) Dim r# = Me.rnd.NextDouble() 'r = 1 ' Show the weights distribution weights(biasIndice) = (SmallStep - VerySmallStep) * r + VerySmallStep Dim vj# = 0 For i = 0 To prev_size - 1 Dim wIndice = numWeight + i ' Me.Neurons(layer).Weights(node)(i) r = Me.rnd.NextDouble() 'r = 1 ' Show the weights distribution weights(wIndice) = (SmallStep - VerySmallStep) * r + VerySmallStep Dim x = weights(wIndice) vj += x * x Next ' Nguyen-Widrow (1990) algorithm ' https://www.rdocumentation.org/packages/brnn/versions/0.8/topics/initnw ' p : Number of predictors ' n : Number of cases ' Scaling factor : teta = 0.7 * p ^ (1/n) ' (see the distribution examples at the end of this file) vj = 0.7 * Math.Pow(size, 1.0 / prev_size) / Math.Sqrt(vj) For i = 0 To prev_size - 1 Dim wIndice = numWeight + i ' Me.Neurons(layer).Weights(node)(i) weights(wIndice) *= vj Next numWeight += prev_size + 1 Next node Next layer For i = 0 To Me.m_nbWeights - 1 Dim r# = weights(i) Dim rounded# = Math.Round(r, 4) ' clsMLPGeneric.nbRoundingDigits = 2 weights(i) = rounded Next m_gnn.SetWeights(weights) End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) Static s_weights#() Static l% = 0 If layer = 1 Then l = 0 s_weights = New Double(Me.m_nbWeights - 1) {} ' actually weights & biases End If Dim nbNeuronsLayer = Me.neuronCount(layer) Dim nbNeuronsPreviousLayer = Me.neuronCount(layer - 1) ' Bias weigths are included within main weights in this array: weights#(,) If Me.useBias Then nbNeuronsPreviousLayer += 1 For j = 0 To nbNeuronsLayer - 1 For k = 0 To nbNeuronsPreviousLayer - 1 s_weights(l) = weights(j, k) l += 1 Next k Next j If layer = Me.layerCount - 1 Then Me.m_gnn.SetWeights(s_weights) End Sub Public Sub SetTrainData() Dim numInput = Me.nbInputNeurons Dim numOutput = Me.nbOutputNeurons Dim numRows = Me.exampleCount Dim result = New Double(numRows - 1)() {} ' allocate return-result matrix For i = 0 To numRows - 1 result(i) = New Double(numInput + numOutput - 1) {} ' 1-of-N Y in last column Next Dim numTrainRows% = Me.exampleCount m_trainData = New Double(numTrainRows - 1)() {} For r = 0 To numRows - 1 Dim inputs#() = Me.inputJaggedDblArray(r) Dim c% = 0 ' column into result[][] For i = 0 To numInput - 1 result(r)(c) = inputs(i) c += 1 Next For i = 0 To numOutput - 1 result(r)(c) = Me.targetJaggedDblArray(r)(i) c += 1 Next m_trainData(r) = result(r) Next r ' each row End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next SetOuput1D() ComputeAverageError() End Sub Public Overrides Sub TrainVectorOneIteration() Dim maxEpochs% = 1 Dim finalErr# = 0 m_gnn.TrainRPROP(m_trainData, maxEpochs, finalErr) Me.averageError = finalErr End Sub Public Overrides Sub TrainVectorBatch(nbIterationsBatch%) Me.learningMode = enumLearningMode.VectorialBatch Me.vectorizedLearningMode = True Dim maxEpochs% = nbIterationsBatch Dim finalErr# = 0 m_gnn.TrainRPROP(m_trainData, maxEpochs, finalErr) Me.averageError = finalErr End Sub Public Overrides Sub SetOuput1D() Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim lengthTot = nbTargets * Me.nbOutputNeurons Dim outputs1D#(lengthTot - 1) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) Dim k = 0 For i = 0 To nbInputs - 1 Dim outputs#(Me.nbOutputNeurons - 1) outputs = m_gnn.ComputeOutputs(Me.inputJaggedDblArray(i)) For j = 0 To Me.nbOutputNeurons - 1 outputs2D(i, j) = outputs(j) outputs1D(k) = outputs(j) k += 1 Next Next Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs1D) Me.output = outputs2D End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) If learningMode = enumLearningMode.Vectorial Then TrainVector() ' Does not work fine Else TrainVectorBatch() ' This is the main learning mode for this MLP End If End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) ' TrainStochastic requires TrainOneSample Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) ' TrainSemiStochastic requires TrainOneSample Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TestOneSample(input!()) Dim inputDble = clsMLPHelper.Convert1DArrayOfSingleToDouble(input) Dim outputs = m_gnn.ComputeOutputs(inputDble) Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(outputs) Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, outputs, 0) Me.output = outputs2D End Sub Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Me.m_weights = Me.m_gnn.GetWeights() Dim sb As New StringBuilder If Me.learningMode = enumLearningMode.VectorialBatch Then _ sb.AppendLine("nb iterations batch=" & Me.nbIterationsBatch) If Me.useNguyenWidrowWeightsInitialization Then format = format4Dec Dim weightsBase = MyBase.ShowWeights(format) sb.Append(weightsBase) Dim weights = sb.ToString Return weights End Function Public Overrides Function GetWeight#(layer%, neuron%, weight%) If IsNothing(Me.m_weights) Then Return 0.0! Dim l% = weight For i% = 1 To layer Dim nbNeuronsLayer = Me.neuronCount(i) Dim nbNeuronsPreviousLayer = Me.neuronCount(i - 1) If Me.useBias Then nbNeuronsPreviousLayer += 1 Dim mult = neuron If i < layer Then mult = nbNeuronsLayer l += nbNeuronsPreviousLayer * mult Next Return Me.m_weights(l) End Function Public Overrides Function GetWeightSingle!(layer%, neuron%, weight%) Dim wd# = Me.GetWeight(layer, neuron, weight) Dim ws! = CSng(wd) Return ws End Function Public Overrides Sub SetWeight(layer%, neuron%, weight%, weightWalue#) If IsNothing(Me.m_weights) Then Exit Sub Dim l% = weight For i% = 1 To layer Dim nbNeuronsLayer = Me.neuronCount(i) Dim nbNeuronsPreviousLayer = Me.neuronCount(i - 1) If Me.useBias Then nbNeuronsPreviousLayer += 1 Dim mult = neuron If i < layer Then mult = nbNeuronsLayer l += nbNeuronsPreviousLayer * mult Next Me.m_weights(l) = weightWalue End Sub Public Overrides Sub SetWeightSingle(layer%, neuron%, weight%, weightWalue!) Dim wd# = weightWalue SetWeight(layer, neuron, weight, wd) End Sub #Region "Console demo" Const lineLenGlob% = 10 Public Sub ConsoleDemo(ByRef trainAcc#, ByRef testAcc#, multiThread As Boolean, Optional trainAndTest As Boolean = False, Optional fastMode As Boolean = False) Console.WriteLine(vbLf & "Begin neural network with Resilient Back-Propagation (RPROP) training demo") Const numInput% = 4 ' number features Const numHidden% = 5 Const numOutput% = 3 ' number of classes for Y Const numRows% = 5000 '10000 Console.WriteLine(vbLf & "Generating " & numRows & " artificial data items with " & numInput & " features") Dim allData = MakeAllData(numInput, numHidden, numOutput, numRows) Console.WriteLine("Done") Console.WriteLine(vbLf & "Creating train (80%) and test (20%) matrices") Dim trainData As Double()() = Nothing Dim testData As Double()() = Nothing MakeTrainTest(allData, 0.8, trainData, testData) Console.WriteLine("Done") Console.WriteLine(vbLf & "Training data: " & vbLf) ShowData(trainData, 4, 2, True) Console.WriteLine("Test data: " & vbLf) ShowData(testData, 3, 2, True) Console.WriteLine("Creating a 4-5-3 neural network") Dim sizes%() = {numInput, numHidden, numOutput} Dim nn As NeuralNetwork = New NeuralNetwork(sizes) If nn.saveWeights Then nn.Save("before_test.dat") Const maxEpochs% = 1000 Console.WriteLine(vbLf & "Setting maxEpochs = " & maxEpochs) Console.WriteLine(vbLf & "Starting RPROP training") nn.multiThread = multiThread 'False nn.consoleDemo = True nn.debugConsoleDemo = False 'True nn.fastMode = fastMode nn.softMaxForLastLayer = True Dim weights#() If trainAndTest Then weights = nn.TrainRPROPAndTest(trainData, maxEpochs, testData) ' RPROP Else Dim finalErr# = 0 nn.TrainRPROP(trainData, maxEpochs, finalErr) ' RPROP nn.TestRPROP(testData) weights = nn.GetWeights End If If nn.saveWeights Then nn.Save("after_test.dat") nn.ShowMessage("Done") nn.ShowMessage(vbLf & "Final neural network model weights:" & vbLf) ShowVector(weights, 4, lineLenGlob, True) trainAcc = nn.Accuracy(trainData, weights) nn.ShowMessage(vbLf & "Accuracy on training data = " & trainAcc.ToString("F4")) testAcc = nn.Accuracy(testData, weights) nn.ShowMessage(vbLf & "Accuracy on test data = " & testAcc.ToString("F4")) nn.ShowMessage(vbLf & "End neural network with Resilient Propagation demo" & vbLf) End Sub ''' <summary> ''' Generate synthetic data ''' </summary> Private Function MakeAllData(numInput%, numHidden%, numOutput%, numRows%) As Double()() Dim rnd As Random = New Random() Dim numWeights = numInput * numHidden + numHidden + numHidden * numOutput + numOutput Const initWeights = True ' Note: There is allready a Nguyen-Widrow weights initialization Dim weights#() = New Double(numWeights - 1) {} ' actually weights & biases If initWeights Then For i = 0 To numWeights - 1 weights(i) = 20.0 * rnd.NextDouble() - 10.0 ' [-10.0 to -10.0] Next Console.WriteLine("Generating weights:") ShowVector(weights, 4, lineLenGlob, True) End If Dim result = New Double(numRows - 1)() {} ' allocate return-result matrix For i = 0 To numRows - 1 result(i) = New Double(numInput + numOutput - 1) {} ' 1-of-N Y in last column Next Dim sizes%() = {numInput, numHidden, numOutput} Dim gnn As NeuralNetwork = New NeuralNetwork(sizes) ' generating NN If initWeights Then gnn.SetWeights(weights) For r = 0 To numRows - 1 ' generate random inputs Dim inputs#() = New Double(numInput - 1) {} For i = 0 To numInput - 1 inputs(i) = 20.0 * rnd.NextDouble() - 10.0 ' [-10.0 to -10.0] Next ' compute outputs Dim outputs#() = gnn.ComputeOutputs(inputs) ' translate outputs to 1-of-N Dim oneOfN#() = New Double(numOutput - 1) {} ' all 0.0 Dim maxIndex% = 0 Dim maxValue# = outputs(0) For i = 0 To numOutput - 1 If Not outputs(i) > maxValue Then Continue For maxIndex = i maxValue = outputs(i) Next oneOfN(maxIndex) = 1.0 ' place inputs and 1-of-N output values into curr row Dim c% = 0 ' column into result[][] For i = 0 To numInput - 1 result(r)(c) = inputs(i) c += 1 Next For i = 0 To numOutput - 1 result(r)(c) = oneOfN(i) c += 1 Next Next r ' each row Return result End Function ' MakeAllData ''' <summary> ''' Put synthetic data to train and test ''' </summary> Private Sub MakeTrainTest(allData#()(), trainPct#, ByRef trainData#()(), ByRef testData#()()) Dim rnd As Random = New Random() Dim totRows = allData.Length Dim numTrainRows = CInt(totRows * trainPct) ' usually 0.80 Dim numTestRows = totRows - numTrainRows trainData = New Double(numTrainRows - 1)() {} testData = New Double(numTestRows - 1)() {} Dim copy = New Double(allData.Length - 1)() {} ' ref copy of all data For i = 0 To copy.Length - 1 copy(i) = allData(i) Next For i = 0 To copy.Length - 1 Dim r% = rnd.Next(i, copy.Length) ' use Fisher-Yates Dim tmp#() = copy(r) copy(r) = copy(i) copy(i) = tmp Next i For i = 0 To numTrainRows - 1 trainData(i) = copy(i) Next For i = 0 To numTestRows - 1 testData(i) = copy(i + numTrainRows) Next End Sub ' MakeTrainTest Private Shared Sub ShowData(data#()(), numRows%, decimals%, indices As Boolean) Dim len = data.Length.ToString().Length ' First rows to display For i = 0 To numRows - 1 If indices Then Console.Write("[" & i.ToString().PadLeft(len) & "] ") If IsNothing(data(i)) Then Console.WriteLine("") : Continue For For j = 0 To data(i).Length - 1 Dim v# = data(i)(j) If v >= 0.0 Then Console.Write(" ") ' '+' Console.Write(v.ToString("F" & decimals) & " ") Next Console.WriteLine("") Next Dim lastRow% = data.Length - 1 If numRows = lastRow + 1 Then Console.WriteLine(vbLf) : Exit Sub Console.WriteLine(". . .") If indices Then Console.Write("[" & lastRow.ToString().PadLeft(len) & "] ") ' Display last row For j = 0 To data(lastRow).Length - 1 Dim v# = data(lastRow)(j) If v >= 0.0 Then Console.Write(" ") ' '+' Console.Write(v.ToString("F" & decimals) & " ") Next Console.WriteLine(vbLf) End Sub Private Shared Sub ShowVector(vector#(), decimals%, lineLen%, newLine As Boolean) For i = 0 To vector.Length - 1 If i > 0 AndAlso i Mod lineLen = 0 Then Console.WriteLine("") If vector(i) >= 0 Then Console.Write(" ") Console.Write(vector(i).ToString("F" & decimals) & " ") Next If newLine Then Console.WriteLine("") End Sub #End Region #Region "Structures" Public Structure WeightComposite Public Weights#()() Public Biases#() End Structure Public Structure ThreadInputDatum Public trainDatum#()() Public allGradsAcc As WeightComposite() Public field#()() Public xValues#() Public tValues#() ''' <summary> ''' Sum ''' </summary> Public delim1# ''' <summary> ''' Average ''' </summary> Public delim2# Public sumSquaredErrors#() End Structure Public Structure RMSEThreadInputDatum Public trainDatum#()() Public xValues#() Public tValues#() Public field#()() ''' <summary> ''' Sum ''' </summary> Public delim1# ''' <summary> ''' Average ''' </summary> Public delim2# Public sumSquaredErrors#() End Structure #End Region Public Class NeuralNetwork : Inherits AbstractNeuralNetwork Public Sub New(sizes As IReadOnlyList(Of Integer)) MyBase.New(sizes) End Sub Protected Overrides Function ActivateFunction( rawValues As IReadOnlyList(Of Double), layerId%) As Double() If Me.softMaxForLastLayer AndAlso layerId >= Me.LayerCount - 1 Then Return Softmax(rawValues) Dim actfctDefined = Not IsNothing(Me.LambdaFnc) Dim values#() = New Double(rawValues.Count - 1) {} For i = 0 To rawValues.Count - 1 If actfctDefined Then Dim r# = rawValues(i) Dim v1 = Me.LambdaFnc.Invoke(r) values(i) = v1 'If debugActivationFunction Then ' Dim v2 = HyperTan(rawValues(i)) ' If Not clsMLPHelper.Compare(v1, v2, dec:=5) Then Stop 'End If Continue For End If values(i) = HyperTan(rawValues(i)) Next Return values End Function Protected Overrides Function CalculateGradTerms(rawValues#()(), tValues As IReadOnlyList(Of Double)) As Double()() Dim gradTerms = New Double(rawValues.Length - 1)() {} For layerId = Me.LayerCount - 1 To 0 + 1 Step -1 gradTerms(layerId) = If(layerId < Me.LayerCount - 1, CalculateGradTermsForNonLast( rawValues(layerId), Me.Neurons(layerId + 1).Weights, gradTerms(layerId + 1), Me.LambdaFncDFOF), CalculateGradTermsForLast(rawValues(layerId), tValues, Me.LambdaFncDFOF, Me.softMaxForLastLayer)) Next Return gradTerms End Function Private Shared Function CalculateGradTermsForLast(rawValues As IReadOnlyList(Of Double), tValues As IReadOnlyList(Of Double), lambdaFncDFOF As Func(Of Double, Double), softMaxForLastLayer As Boolean) As Double() Dim actfctDefined = Not IsNothing(lambdaFncDFOF) Dim gradTerms = New Double(rawValues.Count - 1) {} For i = 0 To rawValues.Count - 1 Dim value# = rawValues(i) Dim derivative# If softMaxForLastLayer OrElse Not actfctDefined Then ' derivative of softmax = (1 - y) * y (same as log-sigmoid) derivative = (1 - value) * value Else derivative = lambdaFncDFOF.Invoke(value) End If ' careful with O-T vs. T-O, O-T is the most usual Dim delta# = value - tValues(i) gradTerms(i) = derivative * delta Next Return gradTerms End Function Private Shared Function CalculateGradTermsForNonLast( rawValues As IReadOnlyList(Of Double), nextNeuronLayerWeights As IReadOnlyList(Of Double()), nextGradTerms As IReadOnlyList(Of Double), lambdaFncDFOF As Func(Of Double, Double)) As Double() Dim actfctDefined = Not IsNothing(lambdaFncDFOF) Dim gradTerms = New Double(rawValues.Count - 1) {} Dim i% = 0 For i = 0 To rawValues.Count - 1 Dim value# = rawValues(i) Dim derivative# If actfctDefined Then derivative = lambdaFncDFOF.Invoke(value) 'If debugActivationFunction Then ' Dim d2 = (1 - value) * (1 + value) ' If Not clsMLPHelper.Compare(derivative, d2, dec:=5) Then Stop 'End If Else ' derivative of tanh = (1 - y) * (1 + y) derivative = (1 - value) * (1 + value) End If ' double sum = nextGradTerms.Select((t, j) => t * nextNeuronLayerWeights[j][i]).Sum(); Dim sum# = Enumerable.Select(nextGradTerms, Function(t, j) t * nextNeuronLayerWeights(j)(i)).Sum() ' each hidden delta is the sum of this.sizes[2] terms gradTerms(i) = derivative * sum Next Return gradTerms End Function Protected Shared Function HyperTan#(x#) If x < -20.0 Then Return -1.0 ' approximation is correct to 30 decimals Return If(x > 20.0, 1.0, Math.Tanh(x)) End Function Protected Shared Function Softmax(oSums As IReadOnlyList(Of Double)) As Double() ' does all output nodes at once so scale doesn't have to be re-computed each time ' determine max output-sum Dim max# = oSums(0) max = Enumerable.Concat(oSums, {max}).Max() ' determine scaling factor -- sum of exp(each val - max) Dim scale# = oSums.Sum(Function(t) Math.Exp(t - max)) Dim result = New Double(oSums.Count - 1) {} For i = 0 To oSums.Count - 1 result(i) = Math.Exp(oSums(i) - max) / scale Next Return result ' now scaled so that xi sum to 1.0 End Function End Class Public MustInherit Class AbstractNeuralNetwork ''' <summary> ''' Lambda function for the activation function ''' </summary> Public LambdaFnc As Func(Of Double, Double) ''' <summary> ''' Lambda function for the derivative of the activation function, ''' from the original function ''' </summary> Public LambdaFncDFOF As Func(Of Double, Double) Public consoleDemo As Boolean = False Public debugConsoleDemo As Boolean = False ''' <summary> ''' softmax is an activation function allowing to estimate a probability at each ''' output (1 among N) in the objective of classification within a homogeneous group ''' </summary> Public softMaxForLastLayer As Boolean = False Public saveWeights As Boolean = False Public fastMode As Boolean = True Public multiThread As Boolean Const displayMod% = 100 ' 10 Const showProgress = False Const displayMod2% = 5 Const sumIndice% = 0 Const averageIndice% = 1 Protected ReadOnly Rnd As Random Protected LayerCount% Protected Sizes%() ''' <summary> ''' Values for layers ''' </summary> Protected Layers#()() Protected Neurons As WeightComposite() Const SmallErr! = 0.001 Const VerySmallErr! = 0.0001 Const SmallValue# = 0.01 ''' <summary> ''' Variable etaPlus is the factor used to increase a weight or bias delta when ''' the algorithm is moving in the correct direction. ''' </summary> Const EtaPlus# = 1.2 ' values are from the paper ''' <summary> ''' Variable etaMinus is the factor used to decrease a weight or bias delta when ''' the algorithm has overshot a minimum error. ''' </summary> Const EtaMinus# = 0.5 ''' <summary> ''' Variables deltaMax and deltaMin are used to prevent any weight or bias increase ''' or decrease factor from being too large or too small. ''' </summary> Const DeltaMax# = 50.0 Const DeltaMin# = 0.000001 Private ThreadCount% Protected Sub New(sizes As IReadOnlyList(Of Integer)) Me.LayerCount = sizes.Count Me.Sizes = New Integer(sizes.Count - 1) {} For i = 0 To sizes.Count - 1 Me.Sizes(i) = sizes(i) Next Me.Layers = New Double(Me.LayerCount - 1)() {} Me.Neurons = New WeightComposite(Me.LayerCount - 1) {} For i = 0 To Me.LayerCount - 1 Me.Layers(i) = New Double(Me.Sizes(i) - 1) {} Next For i = 1 To Me.LayerCount - 1 Me.Neurons(i).Biases = New Double(Me.Sizes(i) - 1) {} Me.Neurons(i).Weights = MakeMatrix(Me.Sizes(i), Me.Sizes(i - 1), 0.0) Next Me.Rnd = New Random() InitializeWeightsNguyenWidrow() ' all weights and biases End Sub Protected MustOverride Function ActivateFunction( rawValues As IReadOnlyList(Of Double), layerId%) As Double() Protected MustOverride Function CalculateGradTerms( rawValues#()(), tValues As IReadOnlyList(Of Double)) As Double()() ' helper for Train Protected Shared Function MakeMatrix(rows%, cols%, v#) As Double()() Dim result = New Double(rows - 1)() {} For r = 0 To result.Length - 1 result(r) = New Double(cols - 1) {} Next For i = 0 To rows - 1 For j = 0 To cols - 1 result(i)(j) = v Next Next Return result End Function Protected Shared Function MakeVector(len%, v#) As Double() ' helper for Train Dim result#() = New Double(len - 1) {} For i = 0 To len - 1 result(i) = v Next Return result End Function Protected Sub InitializeWeightsNguyenWidrow() Const SmallStep! = 0.001 Const VerySmallStep! = 0.0001 For layer = 1 To Me.LayerCount - 1 Dim size = Me.Sizes(layer) Dim prev_size = Me.Sizes(layer - 1) For node = 0 To size - 1 Me.Neurons(layer).Biases(node) = (SmallStep - VerySmallStep) * Me.Rnd.NextDouble() + VerySmallStep Me.Neurons(layer).Weights(node) = New Double(prev_size - 1) {} Dim vj# = 0 For i = 0 To prev_size - 1 Me.Neurons(layer).Weights(node)(i) = (SmallStep - VerySmallStep) * Me.Rnd.NextDouble() + VerySmallStep 'vj += Math.Pow(Me.Neurons(layer).Weights(node)(i), 2) Dim x = Me.Neurons(layer).Weights(node)(i) vj += x * x Next ' Nguyen-Widrow (1990) algorithm ' https://www.rdocumentation.org/packages/brnn/versions/0.8/topics/initnw ' p : Number of predictors ' n : Number of cases ' Scaling factor : teta = 0.7 * p ^ (1/n) ' (see the distribution examples at the end of this file) vj = 0.7 * Math.Pow(size, 1.0 / prev_size) / Math.Sqrt(vj) For i = 0 To prev_size - 1 Me.Neurons(layer).Weights(node)(i) *= vj Next Next Next End Sub Private Sub InitializeGradients( allGradsAcc As WeightComposite(), prevGradsAcc As WeightComposite(), prevDeltas As WeightComposite()) For i = 1 To Me.LayerCount - 1 Dim size% = Me.Sizes(i) Dim prevSize% = Me.Sizes(i - 1) ' accumulated over all training data allGradsAcc(i).Biases = New Double(size - 1) {} allGradsAcc(i).Weights = MakeMatrix(size, prevSize, 0.0) ' accumulated, previous iteration prevGradsAcc(i).Biases = New Double(size - 1) {} prevGradsAcc(i).Weights = MakeMatrix(size, prevSize, 0.0) ' must save previous weight deltas prevDeltas(i).Biases = MakeVector(size, SmallValue) prevDeltas(i).Weights = MakeMatrix(size, prevSize, SmallValue) Next End Sub Public Function TrainRPROPAndTest(trainData#()(), maxEpochs%, testData#()()) As Double() ' Acc: Accumulated Dim allGradsAcc As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} Dim prevGradsAcc As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} Dim prevDeltas As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} InitializeGradients(allGradsAcc, prevGradsAcc, prevDeltas) If Me.multiThread AndAlso Me.ThreadCount = 0 Then Me.ThreadCount = Environment.ProcessorCount - 1 ' Round ThreadCount to a pair value to reproduce exactly the tests ? If Me.ThreadCount Mod 2 > 0 Then Me.ThreadCount -= 1 End If Dim timer1 As Stopwatch = New Stopwatch() Dim timer2 As Stopwatch = New Stopwatch() Dim timer3 As Stopwatch = New Stopwatch() If Me.consoleDemo Then Dim currWts1#() = GetWeights() Dim err1#() = RootMeanSquaredError(trainData, currWts1) Dim err_t1#() = RootMeanSquaredError(testData, currWts1) Console.WriteLine(vbLf & "epoch = pre; err = {0:F4} [{1:F4}]" & vbTab & "test err = {2:F4} [{3:F4}]", err1(0), err1(1), err_t1(0), err_t1(1)) timer3.Start() End If Dim epoch% = 0 While epoch < maxEpochs epoch += 1 If Me.consoleDemo Then timer3.Stop() timer1.Start() End If ' 1. compute and accumulate all gradients For layer = 1 To Me.LayerCount - 1 ' zero-out values from prev iteration ZeroOut(allGradsAcc(layer).Weights) ZeroOut(allGradsAcc(layer).Biases) Next ' 0: sumSquaredErrorItem, 1:sumSquaredError Dim err#() = ComputeGraduate(trainData, allGradsAcc) Dim finalErr = err(0) ' update all weights and biases (in any order) UpdateWeigtsAndBiases(allGradsAcc, prevGradsAcc, prevDeltas) If Me.consoleDemo Then timer1.Stop() timer3.Start() If showProgress Then Console.Write(".") End If If epoch Mod displayMod = 0 OrElse finalErr <= VerySmallErr Then If Me.consoleDemo Then timer3.Stop() timer2.Start() Dim currWts#() = GetWeights() Dim err_t#() = RootMeanSquaredError(testData, currWts) Console.WriteLine(vbLf & "epoch = {0} err = {1:F4} [{2:F4}]" & vbTab & "test err = {3:F4} [{4:F4}]", epoch, finalErr, err(1), err_t(0), err_t(1)) timer2.Stop() timer3.Start() End If If Me.saveWeights Then Save("epoch-" & epoch & ".dat") If Me.fastMode Then Exit While If finalErr <= SmallErr Then Exit While Else If Me.consoleDemo AndAlso showProgress AndAlso epoch Mod displayMod2 = 0 Then _ Console.Write(" ") End If End While If Me.consoleDemo Then timer3.Stop() Console.WriteLine("Elapsed time. Neuro = {0}, RMSE calculation = {1}, Other work = {2}", timer1.ElapsedMilliseconds / 1000, timer2.ElapsedMilliseconds / 1000, timer3.ElapsedMilliseconds / 1000) End If Dim wts#() = GetWeights() Return wts End Function Public Sub TrainRPROP(trainData#()(), maxEpochs%, ByRef finalErr#) finalErr = 0 ' Acc: Accumulated Dim allGradsAcc As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} Dim prevGradsAcc As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} Dim prevDeltas As WeightComposite() = New WeightComposite(Me.LayerCount - 1) {} InitializeGradients(allGradsAcc, prevGradsAcc, prevDeltas) If Me.multiThread AndAlso Me.ThreadCount = 0 Then Me.ThreadCount = Environment.ProcessorCount - 1 ' Round ThreadCount to a pair value to reproduce exactly the tests ? If Me.ThreadCount Mod 2 > 0 Then Me.ThreadCount -= 1 'Me.ThreadCount = 1 ' multithread disabled End If If Me.consoleDemo Then Dim currWts1#() = GetWeights() Dim err1#() = RootMeanSquaredError(trainData, currWts1) Console.WriteLine(vbLf & "epoch = pre; err = {0:F4} [{1:F4}]", err1(0), err1(1)) End If Dim timer1 As Stopwatch = New Stopwatch() Dim epoch% = 0 While epoch < maxEpochs epoch += 1 If Me.consoleDemo Then timer1.Start() ' 1. compute and accumulate all gradients For layer = 1 To Me.LayerCount - 1 ' zero-out values from prev iteration ZeroOut(allGradsAcc(layer).Weights) ZeroOut(allGradsAcc(layer).Biases) Next ' 0: sumSquaredErrorItem, 1:sumSquaredError Dim err#() = ComputeGraduate(trainData, allGradsAcc) finalErr = err(0) If Me.debugConsoleDemo Then Me.ShowMessage("epoch " & epoch & " : err=" & finalErr.ToString("0.00000") & ", multithread = " & Me.multiThread) End If ' update all weights and biases (in any order) UpdateWeigtsAndBiases(allGradsAcc, prevGradsAcc, prevDeltas) If Me.consoleDemo Then timer1.Stop() If showProgress Then Console.Write(".") End If If epoch Mod displayMod = 0 OrElse finalErr <= VerySmallErr Then If Me.saveWeights Then Save("epoch-" & epoch & ".dat") If Me.fastMode Then Exit While If finalErr <= SmallErr Then Exit While Else If Me.consoleDemo AndAlso showProgress AndAlso epoch Mod displayMod2 = 0 Then _ Console.Write(" ") End If End While If Me.consoleDemo Then Console.WriteLine("Elapsed time. Neuro = {0}", timer1.ElapsedMilliseconds / 1000) End Sub Public Sub TestRPROP(testData#()()) Dim timer2 As Stopwatch = New Stopwatch() If Me.consoleDemo Then timer2.Start() Dim currWts#() = GetWeights() Dim err_t#() = RootMeanSquaredError(testData, currWts) If Me.consoleDemo Then Console.WriteLine(vbLf & "test err = {0:F4} [{1:F4}]", err_t(0), err_t(1)) timer2.Stop() End If End Sub Protected Shared Sub ZeroOut(matrix#()()) For Each t As Double() In matrix For i = 0 To t.Length - 1 t(i) = 0.0 Next Next End Sub Protected Shared Sub ZeroOut(array#()) ' helper for Train For i = 0 To array.Length - 1 array(i) = 0.0 Next End Sub ''' <summary> ''' WeightsCount ''' </summary> Public Function GetWeightsCount%() Dim numWeights% = 0 For layerNum = 1 To Me.LayerCount - 1 numWeights += (Me.Sizes(layerNum - 1) + 1) * Me.Sizes(layerNum) Next Return numWeights End Function Public Sub SetWeights(weights#()) ' copy weights and biases in weights[] array to i-h weights, i-h biases, h-o weights, h-o biases Dim numWeights% = GetWeightsCount() If weights.Length <> numWeights Then Throw New Exception("Bad weights array in SetWeights") Dim k% = 0 ' points into weights param For layerNum = 1 To Me.LayerCount - 1 For i = 0 To Me.Sizes(layerNum) - 1 For j = 0 To Me.Sizes(layerNum - 1) - 1 Me.Neurons(layerNum).Weights(i)(j) = weights(k) k += 1 Next Next For i = 0 To Me.Sizes(layerNum) - 1 Me.Neurons(layerNum).Biases(i) = weights(k) k += 1 Next Next End Sub Public Function GetWeights() As Double() Dim numWeights% = GetWeightsCount() Dim result#() = New Double(numWeights - 1) {} Dim k% = 0 For layerNum = 1 To Me.LayerCount - 1 For i = 0 To Me.Sizes(layerNum) - 1 For j = 0 To Me.Sizes(layerNum - 1) - 1 result(k) = Me.Neurons(layerNum).Weights(i)(j) k += 1 Next Next For i = 0 To Me.Sizes(layerNum) - 1 result(k) = Me.Neurons(layerNum).Biases(i) k += 1 Next Next Return result End Function Public Function ComputeOutputs(xValues#(), Optional outputLayers#()() = Nothing) As Double() Dim field = If(outputLayers, Me.Layers) field(0) = xValues For layer = 1 To Me.LayerCount - 1 field(layer) = New Double(Me.Sizes(layer) - 1) {} Array.Copy(Me.Neurons(layer).Biases, field(layer), Me.Sizes(layer)) For j = 0 To Me.Sizes(layer) - 1 For i = 0 To Me.Sizes(layer - 1) - 1 field(layer)(j) += field(layer - 1)(i) * Me.Neurons(layer).Weights(j)(i) Next Next field(layer) = ActivateFunction(field(layer), layer) Next Return field(Me.LayerCount - 1) End Function Public Function Accuracy(testData#()(), weights#()) As Double SetWeights(weights) ' percentage correct using winner-takes all Dim numCorrect% = 0 Dim numWrong% = 0 Dim lastLayerId% = Me.LayerCount - 1 Dim xValues#() = New Double(Me.Sizes(0) - 1) {} ' inputs Dim tValues#() = New Double(Me.Sizes(lastLayerId) - 1) {} ' targets For Each t As Double() In testData Array.Copy(t, xValues, Me.Sizes(0)) ' parse data into x-values and t-values Array.Copy(t, Me.Sizes(0), tValues, 0, Me.Sizes(lastLayerId)) Dim yValues = ComputeOutputs(xValues) ' computed Y Dim maxIndex% = AbstractNeuralNetwork.MaxIndex(yValues) ' which cell in yValues has largest value? If tValues(maxIndex) = 1.0 Then ' ugly. consider AreEqual(double x, double y, double epsilon) numCorrect += 1 Else numWrong += 1 End If Next Return numCorrect / (numCorrect + numWrong) ' ugly 2 - check for divide by zero End Function Public Function RootMeanSquaredError(trainData#()(), weights#()) As Double() Return If(Me.multiThread, RootMeanSquaredErrorMultiThread(trainData, weights), RootMeanSquaredErrorSingleThread(trainData, weights)) End Function Public Function RootMeanSquaredErrorSingleThread(trainData#()(), weights#()) As Double() SetWeights(weights) ' copy the weights to evaluate in Dim lastLayerId% = Me.LayerCount - 1 Dim outputSize% = Me.Sizes(lastLayerId) Dim trainDataSize% = trainData.Length Dim xValues#() = New Double(Me.Sizes(0) - 1) {} ' inputs Dim tValues#() = New Double(outputSize - 1) {} ' targets Dim sumSquaredError# = 0.0 Dim sumSquaredErrorItem# = 0.0 For Each t As Double() In trainData ' following assumes data has all x-values first, followed by y-values! Array.Copy(t, xValues, Me.Sizes(0)) ' extract inputs Array.Copy(t, Me.Sizes(0), tValues, 0, outputSize) ' extract targets Dim yValues#() = ComputeOutputs(xValues) For j = 0 To outputSize - 1 'Dim err# = Math.Pow(yValues(j) - tValues(j), 2) Dim delta = yValues(j) - tValues(j) Dim err# = delta * delta sumSquaredError += err / trainDataSize sumSquaredErrorItem += err / trainDataSize / outputSize Next Next Dim d#() = {Math.Sqrt(sumSquaredErrorItem), Math.Sqrt(sumSquaredError)} Return d End Function Private Shared Function MaxIndex%(vector As IReadOnlyList(Of Double)) ' helper for Accuracy() ' index of largest value Dim bigIndex% = 0 Dim biggestVal# = vector(0) For i = 0 To vector.Count - 1 If vector(i) <= biggestVal Then Continue For biggestVal = vector(i) bigIndex = i Next Return bigIndex End Function Public Sub Save(filename As String) Using fo As FileStream = File.Open(filename, FileMode.Create) Using writer As New BinaryWriter(fo) writer.Write(Me.LayerCount) For i = 0 To Me.LayerCount - 1 writer.Write(Me.Sizes(i)) Next Dim weights#() = GetWeights() For Each t# In weights writer.Write(t) Next writer.Write(0) writer.Write(4) End Using 'writer.Close() End Using 'fo.Close() End Sub ''' <summary> ''' Update all weights and biases ''' </summary> Protected Function ComputeGraduate( trainData#()(), allGradsAcc As WeightComposite()) As Double() Return If(Me.multiThread, ComputeGraduateMultiThread(trainData, allGradsAcc), ComputeGraduateSingleThread(trainData, allGradsAcc)) End Function ''' <summary> ''' Update all weights and biases ''' </summary> Protected Function ComputeGraduateSingleThread(trainData#()(), allGradsAcc As WeightComposite()) As Double() Dim lastLayerId% = Me.LayerCount - 1 Dim outputSize% = Me.Sizes(lastLayerId) Dim xValues#() = New Double(Me.Sizes(0) - 1) {} ' inputs Dim tValues#() = New Double(outputSize - 1) {} ' target values Dim sumSquaredErrors#() = {0, 0} For Each t As Double() In trainData ' no need to visit in random order because all rows processed before any updates ('batch') Array.Copy(t, xValues, Me.Sizes(0)) ' get the inputs Array.Copy(t, Me.Sizes(0), tValues, 0, outputSize) ' get the target values ' copy xValues in, compute outputs using curr weights (and store outputs internally) Dim yValues#() = ComputeOutputs(xValues) Dim gradTerms = CalculateGradTerms(Me.Layers, tValues) For layer = lastLayerId To 0 + 1 Step -1 ' add input to h-o component to make h-o weight gradients, and accumulate For j = 0 To Me.Sizes(layer) - 1 Dim grad# = gradTerms(layer)(j) allGradsAcc(layer).Biases(j) += grad For i = 0 To Me.Sizes(layer - 1) - 1 grad = gradTerms(layer)(j) * Me.Layers(layer - 1)(i) allGradsAcc(layer).Weights(j)(i) += grad Next Next Next For j = 0 To outputSize - 1 Dim delta# = yValues(j) - tValues(j) Dim err# = delta * delta 'Math.Pow(delta, 2) sumSquaredErrors(sumIndice) += err / trainData.Length sumSquaredErrors(averageIndice) += err / trainData.Length / Me.Sizes(Me.LayerCount - 1) Next Next Return sumSquaredErrors End Function ''' <summary> ''' Calculating the gradient in multiple streams ''' </summary> Protected Function ComputeGraduateMultiThread(trainData#()(), allGradsAcc As WeightComposite()) As Double() Dim taskFactory As TaskFactory = New TaskFactory() Dim tasks As Task() = New Task(Me.ThreadCount - 1) {} Dim threadInputData As ThreadInputDatum() = New ThreadInputDatum(Me.ThreadCount - 1) {} InitializeThreads(threadInputData, trainData) Dim innerTrainData As List(Of Double()) = New List(Of Double())(trainData) Dim innerTrainDataChunk As List(Of Double()) = New List(Of Double())() ' multithread version does not compute the same value using this: 'Dim chunk_size% = CInt(innerTrainData.Count * 0.8 / Me.ThreadCount) 'Dim rest% = CInt(innerTrainData.Count * 0.8) Mod Me.ThreadCount ' Warning: this solution will not always work: 'Debug.WriteLine("ThreadCount=" & Me.ThreadCount) 'Debug.WriteLine("innerTrainData.Count=" & innerTrainData.Count) Dim chunk_size% = CInt(innerTrainData.Count / Me.ThreadCount) 'Debug.WriteLine("chunk_size=" & chunk_size) While innerTrainData.Count > 0 Dim currentThread% = -1 For i = 0 To Me.ThreadCount - 1 If tasks(i) Is Nothing OrElse tasks(i).IsCompleted Then currentThread = i Exit For End If Next If currentThread = -1 Then Thread.Sleep(20) Continue While End If innerTrainDataChunk.Clear() While innerTrainDataChunk.Count < chunk_size AndAlso innerTrainData.Count > 0 innerTrainDataChunk.Add(innerTrainData(0)) innerTrainData.RemoveAt(0) End While threadInputData(currentThread).trainDatum = innerTrainDataChunk.ToArray() For layer = 1 To Me.LayerCount - 1 ' zero-out values from prev. iteration ZeroOut(threadInputData(currentThread).allGradsAcc(layer).Weights) ZeroOut(threadInputData(currentThread).allGradsAcc(layer).Biases) Next tasks(currentThread) = taskFactory.StartNew( New Action(Of Object)(AddressOf ComputeGraduateInThread), threadInputData(currentThread)) End While For i = 0 To Me.ThreadCount - 1 If tasks(i) IsNot Nothing Then tasks(i).Wait() End If Next ' All in allGradsAcc For i = 0 To Me.ThreadCount - 1 For layer = 1 To Me.LayerCount - 1 For size = 0 To Me.Sizes(layer) - 1 allGradsAcc(layer).Biases(size) += threadInputData(i).allGradsAcc(layer).Biases(size) For prev_size = 0 To Me.Sizes(layer - 1) - 1 allGradsAcc(layer).Weights(size)(prev_size) += threadInputData(i).allGradsAcc(layer).Weights(size)(prev_size) Next Next Next Next Dim sumSquaredErrorItem# = 0 Dim sumSquaredError# = 0 For i = 0 To Me.ThreadCount - 1 sumSquaredError += threadInputData(i).sumSquaredErrors(sumIndice) sumSquaredErrorItem += threadInputData(i).sumSquaredErrors(averageIndice) Next Dim d#() = {Math.Sqrt(sumSquaredErrorItem), Math.Sqrt(sumSquaredError)} Return d End Function Private Sub InitializeThreads(threadInputData As ThreadInputDatum(), trainData#()()) For i = 0 To Me.ThreadCount - 1 threadInputData(i).field = New Double(Me.LayerCount - 1)() {} threadInputData(i).allGradsAcc = New WeightComposite(Me.LayerCount - 1) {} threadInputData(i).xValues = New Double(Me.Sizes(0) - 1) {} ' inputs threadInputData(i).tValues = New Double(Me.Sizes(Me.LayerCount - 1) - 1) {} ' targets threadInputData(i).delim1 = 1.0 / trainData.Length threadInputData(i).delim2 = 1.0 / trainData.Length / Me.Sizes(Me.LayerCount - 1) threadInputData(i).sumSquaredErrors = New Double() {0, 0} For j = 0 To Me.LayerCount - 1 threadInputData(i).field(j) = New Double(Me.Sizes(j) - 1) {} If j <= 0 Then Continue For threadInputData(i).allGradsAcc(j).Biases = New Double(Me.Sizes(j) - 1) {} threadInputData(i).allGradsAcc(j).Weights = MakeMatrix(Me.Sizes(j), Me.Sizes(j - 1), 0.0) Next Next End Sub Public Sub ComputeGraduateInThread(input As Object) Dim inputDatum As ThreadInputDatum = CType(input, ThreadInputDatum) Dim lastLayerId% = Me.LayerCount - 1 For Each t As Double() In inputDatum.trainDatum ' no need to visit in random order because all rows processed before ' any updates ('batch') Array.Copy(t, inputDatum.xValues, Me.Sizes(0)) ' get the inputs ' get the target values Array.Copy(t, Me.Sizes(0), inputDatum.tValues, 0, Me.Sizes(lastLayerId)) ' copy xValues in, compute outputs using curr weights ' (and store outputs internally) Dim yValues = ComputeOutputs(inputDatum.xValues, inputDatum.field) Dim gradTerms = CalculateGradTerms(inputDatum.field, inputDatum.tValues) For layer = lastLayerId To 0 + 1 Step -1 ' add input to h-o component to make h-o weight gradients, and accumulate For j = 0 To Me.Sizes(layer) - 1 Dim grad# = gradTerms(layer)(j) inputDatum.allGradsAcc(layer).Biases(j) += grad For i = 0 To Me.Sizes(layer - 1) - 1 grad = gradTerms(layer)(j) * inputDatum.field(layer - 1)(i) inputDatum.allGradsAcc(layer).Weights(j)(i) += grad Next Next Next For j = 0 To Me.Sizes(lastLayerId) - 1 'Dim err# = Math.Pow(yValues(j) - inputDatum.tValues(j), 2) Dim delta = yValues(j) - inputDatum.tValues(j) Dim err# = delta * delta inputDatum.sumSquaredErrors(sumIndice) += err * inputDatum.delim1 inputDatum.sumSquaredErrors(averageIndice) += err * inputDatum.delim2 Next Next End Sub Public Function RootMeanSquaredErrorMultiThread(trainData#()(), weights#()) As Double() SetWeights(weights) ' copy the weights to evaluate in Dim taskFactory As New TaskFactory() Dim tasks As Task() = New Task(Me.ThreadCount - 1) {} Dim lastLayerId% = Me.LayerCount - 1 Dim outputSize% = Me.Sizes(lastLayerId) Dim threadInputData = New RMSEThreadInputDatum(Me.ThreadCount - 1) {} InitializeThreads2(threadInputData, trainData, outputSize) Dim innerTrainData As List(Of Double()) = New List(Of Double())(trainData) Dim innerTrainDataChunk As List(Of Double()) = New List(Of Double())() Dim chunk_size% = CInt(innerTrainData.Count * 0.8 / Me.ThreadCount) While innerTrainData.Count > 0 Dim currentThread% = -1 For i = 0 To Me.ThreadCount - 1 If tasks(i) Is Nothing OrElse tasks(i).IsCompleted Then currentThread = i Exit For End If Next If currentThread = -1 Then Thread.Sleep(20) Continue While End If innerTrainDataChunk.Clear() While innerTrainDataChunk.Count < chunk_size AndAlso innerTrainData.Count > 0 innerTrainDataChunk.Add(innerTrainData(0)) innerTrainData.RemoveAt(0) End While threadInputData(currentThread).trainDatum = innerTrainDataChunk.ToArray() tasks(currentThread) = taskFactory.StartNew( New Action(Of Object)(AddressOf ComputeRMSEInThread), threadInputData(currentThread)) End While For i = 0 To Me.ThreadCount - 1 If tasks(i) IsNot Nothing Then tasks(i).Wait() End If Next Dim sumSquaredErrorItem# = 0 Dim sumSquaredError# = 0 For i = 0 To Me.ThreadCount - 1 sumSquaredError += threadInputData(i).sumSquaredErrors(sumIndice) sumSquaredErrorItem += threadInputData(i).sumSquaredErrors(averageIndice) Next Dim d#() = {Math.Sqrt(sumSquaredErrorItem), Math.Sqrt(sumSquaredError)} Return d End Function Private Sub InitializeThreads2(threadInputData As RMSEThreadInputDatum(), trainData#()(), outputSize%) For i = 0 To Me.ThreadCount - 1 threadInputData(i).field = New Double(Me.LayerCount - 1)() {} threadInputData(i).xValues = New Double(Me.Sizes(0) - 1) {} ' inputs threadInputData(i).tValues = New Double(outputSize - 1) {} ' targets threadInputData(i).delim1 = 1.0 / trainData.Length threadInputData(i).delim2 = 1.0 / trainData.Length / outputSize threadInputData(i).sumSquaredErrors = New Double(1) {} For j = 0 To Me.LayerCount - 1 threadInputData(i).field(j) = New Double(Me.Sizes(j) - 1) {} Next Next End Sub Public Sub ComputeRMSEInThread(input As Object) Dim threadInputDatum = CType(input, RMSEThreadInputDatum) Dim outputSize% = Me.Sizes(Me.LayerCount - 1) For Each t As Double() In threadInputDatum.trainDatum ' following assumes data has all x-values first, followed by y-values! Array.Copy(t, threadInputDatum.xValues, Me.Sizes(0)) ' extract inputs Array.Copy(t, Me.Sizes(0), threadInputDatum.tValues, 0, outputSize) ' extract targets Dim yValues#() = ComputeOutputs(threadInputDatum.xValues, threadInputDatum.field) For j = 0 To outputSize - 1 'Dim err# = Math.Pow(yValues(j) - threadInputDatum.tValues(j), 2) Dim delta# = yValues(j) - threadInputDatum.tValues(j) Dim err# = delta * delta threadInputDatum.sumSquaredErrors(sumIndice) += err * threadInputDatum.delim1 threadInputDatum.sumSquaredErrors(averageIndice) += err * threadInputDatum.delim2 Next Next End Sub Protected Sub UpdateWeigtsAndBiases(allGradsAcc As WeightComposite(), prevGradsAcc As WeightComposite(), prevDeltas As WeightComposite()) ' update input-hidden weights For layer = 1 To Me.LayerCount - 1 Dim size% = Me.Sizes(layer) Dim previousSize% = Me.Sizes(layer - 1) For i = 0 To previousSize - 1 For j = 0 To size - 1 Dim delta# = prevDeltas(layer).Weights(j)(i) Dim t# = prevGradsAcc(layer).Weights(j)(i) * allGradsAcc(layer).Weights(j)(i) If t > 0 Then ' no sign change, increase delta delta *= EtaPlus ' compute delta ' keep it in range If delta > DeltaMax Then delta = DeltaMax ' determine direction and magnitude Dim tmp# = -Math.Sign(allGradsAcc(layer).Weights(j)(i)) * delta Me.Neurons(layer).Weights(j)(i) += tmp ' update weights ElseIf t < 0 Then ' grad changed sign, decrease delta delta *= EtaMinus ' the delta (not used, but saved for later) ' keep it in range If delta < DeltaMin Then delta = DeltaMin ' revert to previous weight Me.Neurons(layer).Weights(j)(i) -= prevDeltas(layer).Weights(j)(i) ' forces next if-then branch, next iteration allGradsAcc(layer).Weights(j)(i) = 0 ' this happens next iteration after 2nd branch above ' (just had a change in gradient) Else ' no change to delta ' no way should delta be 0 ... ' determine direction Dim tmp# = -Math.Sign(allGradsAcc(layer).Weights(j)(i)) * delta Me.Neurons(layer).Weights(j)(i) += tmp ' update End If prevDeltas(layer).Weights(j)(i) = delta ' save delta ' save the (accumulated) gradient prevGradsAcc(layer).Weights(j)(i) = allGradsAcc(layer).Weights(j)(i) Next Next ' update (input-to-) hidden biases For i = 0 To size - 1 Dim delta# = prevDeltas(layer).Biases(i) Dim t# = prevGradsAcc(layer).Biases(i) * allGradsAcc(layer).Biases(i) If t > 0 Then ' no sign change, increase delta delta *= EtaPlus ' compute delta If delta > DeltaMax Then delta = DeltaMax ' determine direction Dim tmp# = -Math.Sign(allGradsAcc(layer).Biases(i)) * delta Me.Neurons(layer).Biases(i) += tmp ' update ElseIf t < 0 Then ' grad changed sign, decrease delta delta *= EtaMinus ' the delta (not used, but saved later) If delta < DeltaMin Then delta = DeltaMin ' revert to previous weight Me.Neurons(layer).Biases(i) -= prevDeltas(layer).Biases(i) allGradsAcc(layer).Biases(i) = 0 ' forces next branch, next iteration ' this happens next iteration after 2nd branch above ' (just had a change in gradient) Else If delta > DeltaMax Then delta = DeltaMax ElseIf delta < DeltaMin Then delta = DeltaMin End If ' no way should delta be 0 . . . ' determine direction Dim tmp# = -Math.Sign(allGradsAcc(layer).Biases(i)) * delta Me.Neurons(layer).Biases(i) += tmp ' update End If prevDeltas(layer).Biases(i) = delta prevGradsAcc(layer).Biases(i) = allGradsAcc(layer).Biases(i) Next Next End Sub Public Sub ShowMessage(msg$) If isConsoleApp() Then Console.WriteLine(msg) Debug.WriteLine(msg) End Sub End Class ' NeuralNetwork End Class ' Nguyen-Widrow weights distribution examples (the last column is the bias): ' ' neuron count={2, 2, 1} ' ' W(1)={ ' {0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.001}} ' ' W(2)={ ' {0.495, 0.495, 0.001}} ' ' ' neuron count={2, 2, 2, 1} ' ' W(1)={ ' {0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.001}} ' ' W(2)={ ' {0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.001}} ' ' W(3)={ ' {0.495, 0.495, 0.001}} ' ' ' neuron count={4, 4, 2} ' ' W(1)={ ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}} ' ' W(2)={ ' {0.416, 0.416, 0.416, 0.416, 0.001}, ' {0.416, 0.416, 0.416, 0.416, 0.001}} ' ' ' neuron count={4, 4, 4, 2} ' ' W(1)={ ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}} ' ' W(2)={ ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}, ' {0.495, 0.495, 0.495, 0.495, 0.001}} ' ' W(3)={ ' {0.416, 0.416, 0.416, 0.416, 0.001}, ' {0.416, 0.416, 0.416, 0.416, 0.001}} ' ' ' neuron count={6, 6, 3} ' ' W(1)={ ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}} ' ' W(2)={ ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}, ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}, ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}} ' ' ' neuron count={6, 6, 6, 3} ' ' W(1)={ ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}} ' ' W(2)={ ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}, ' {0.385, 0.385, 0.385, 0.385, 0.385, 0.385, 0.001}} ' ' W(3)={ ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}, ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}, ' {0.343, 0.343, 0.343, 0.343, 0.343, 0.343, 0.001}} ' ' ' neuron count={4, 20, 3} ' ' W(1)={ ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}, ' {0.740, 0.740, 0.740, 0.740, 0.001}} ' ' W(2)={ ' {0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.001}, ' {0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.001}, ' {0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.165, 0.001}} ' ' ' neuron count={4, 16, 8, 3} ' ' W(1)={ ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}, ' {0.700, 0.700, 0.700, 0.700, 0.001}} ' ' W(2)={ ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}, ' {0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.199, 0.001}} ' ' W(3)={ ' {0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.001}, ' {0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.001}, ' {0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.284, 0.001}} modMLPRPropTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode #Const TestConsoleDemo = 0 ' 0: Off, 1: On Module modMLPRPROPTTest Sub MainRPropMLP() 'Dim mlp As New clsMLPRProp 'Dim trainAcc# = 0, testAcc# = 0 'mlp.ConsoleDemo(trainAcc, testAcc, multiThread:=False) 'Console.ReadLine() 'Exit Sub Console.WriteLine("Resilient Propagation (RProp) MLP with the Resilient Propagation XOR test.") RPropMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub RPropMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPRProp mlp.ShowMessage("Resilient Propagation MLP Xor test") mlp.ShowMessage("----------------------------------") mlp.Initialize(learningRate:=0!) mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.nbIterations = 1000 If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR 'mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR452, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR 'mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) mlp.InitializeStruct(m_neuronCount3XOR673, addBiasColumn:=True) End If 'mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=0.2!) 'mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain:=0.2!) mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain:=0.7!) mlp.nbIterationsBatch = 10 mlp.Randomize() 'mlp.Randomize(minValue:=-10, maxValue:=10) mlp.PrintWeights() WaitForKeyToStart() 'mlp.Train() mlp.TrainVectorBatch() mlp.ShowMessage("Resilient Propagation MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsActivation.vb Imports Perceptron.Utility ' Matrix Namespace DLFramework.Layers.Activation Public Class Sigmoid Public Shared Function Forward(input As Tensor, center!) As Tensor Dim output#(,) = input.Data Matrix.MatrixLoop( Sub(i, j) Dim x# = output(i, j) Dim xc# = x - center output(i, j) = 1 / (1 + Math.Exp(-xc)) End Sub, input.Data.r, input.Data.c) If input.AutoGrad Then Dim Creators = New List(Of Tensor)() From {input} Return New Tensor(data:=output, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Other, arguments:=Nothing, backwardCallback:=AddressOf Sigmoid.Backward) End If Return New Tensor(output) End Function Public Shared Sub Backward(self As Tensor, gradient As Tensor, creators As List(Of Tensor)) Dim ones = New Tensor(Matrix.Ones(gradient.Data.r, gradient.Data.c)) Dim derivative#(,) = self.Data Matrix.MatrixLoop( Sub(i, j) derivative(i, j) = derivative(i, j) * (1.0 - derivative(i, j)) End Sub, self.Data.r, self.Data.c) Dim derivatives = New Tensor(derivative) creators(0).Backward(Tensor.Mul(gradient, derivatives)) End Sub End Class Public Class HyperbolicTangent Public Shared Function Forward(input As Tensor, center!) As Tensor Dim output#(,) = input.Data Matrix.MatrixLoop( Sub(i, j) Dim x# = output(i, j) Dim xc# = x - center output(i, j) = Math.Tanh(xc) End Sub, input.Data.r, input.Data.c) If input.AutoGrad Then Dim Creators = New List(Of Tensor)() From {input} Return New Tensor(data:=output, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Other, arguments:=Nothing, backwardCallback:=AddressOf HyperbolicTangent.Backward) End If Return New Tensor(output) End Function Public Shared Sub Backward(self As Tensor, gradient As Tensor, creators As List(Of Tensor)) Dim ones = New Tensor(Matrix.Ones(gradient.Data.r, gradient.Data.c)) Dim derivative#(,) = self.Data Matrix.MatrixLoop( Sub(i, j) derivative(i, j) = 1.0 - derivative(i, j) * derivative(i, j) End Sub, self.Data.r, self.Data.c) Dim derivatives = New Tensor(derivative) creators(0).Backward(Tensor.Mul(gradient, derivatives)) End Sub End Class Public Class ELU ' Exponential Linear Units Const gain# = 1 Public Shared Function Forward(input As Tensor, center!) As Tensor Dim output#(,) = input.Data Matrix.MatrixLoop( Sub(i, j) Dim x# = output(i, j) Dim xc# = x - center Dim y# If xc >= 0 Then y = xc Else y = gain * (Math.Exp(xc) - 1) End If output(i, j) = y End Sub, input.Data.r, input.Data.c) If input.AutoGrad Then Dim Creators = New List(Of Tensor)() From {input} Return New Tensor(data:=output, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Other, arguments:=Nothing, backwardCallback:=AddressOf ELU.Backward) End If Return New Tensor(output) End Function Public Shared Sub Backward(self As Tensor, gradient As Tensor, creators As List(Of Tensor)) Dim ones = New Tensor(Matrix.Ones(gradient.Data.r, gradient.Data.c)) Dim derivative#(,) = self.Data Matrix.MatrixLoop( Sub(i, j) Dim y# = 0 If gain > 0 Then Dim fx# = derivative(i, j) If fx >= 0 Then y = 1 Else y = fx + gain End If End If derivative(i, j) = y End Sub, self.Data.r, self.Data.c) Dim derivatives = New Tensor(derivative) creators(0).Backward(Tensor.Mul(gradient, derivatives)) End Sub End Class End Namespace clsMeanSquaredError.vb Imports Perceptron.Utility ' AxisZero Namespace DLFramework.Layers.Loss Public Class MeanSquaredError : Inherits Layer Public Overrides Function Forward(prediction As Tensor, target As Tensor) As Tensor Dim diff = Tensor.Substract(prediction, target) Dim tnsor = Tensor.Sum(Tensor.Mul(diff, diff), AxisZero.vertical) Return tnsor End Function 'Public Overrides Function Forward(prediction As Tensor, target As Tensor, ' useBias As Boolean) As Tensor ' If Not useBias Then ' ' Cut first column? ' 'delta(i) = delta(i).Slice(0, 1, delta(i).x, delta(i).y) ' End If ' Dim diff = Tensor.Substract(prediction, target) ' Return Tensor.Sum(Tensor.Mul(diff, diff), AxisZero.vertical) 'End Function End Class End Namespace clsMLPTensor.vb ' From https://github.com/HectorPulido/Machine-learning-Framework-Csharp : C# -> VB .NET conversion Imports Perceptron.DLFramework ' Tensor Imports Perceptron.DLFramework.Layers ' Linear, Sequential Imports Perceptron.DLFramework.Layers.Loss ' MeanSquaredError Imports Perceptron.DLFramework.Optimizers ' StochasticGradientDescent Imports Perceptron.Utility ' Matrix Imports System.Text ' StringBuilder Public Class clsMLPTensor : Inherits clsVectorizedMLPGeneric Private input, target, pred, loss As Tensor Private weights As List(Of Tensor) Private seq As Sequential Private mse As MeanSquaredError Private sgd As StochasticGradientDescent Private nbHiddenNeurons% Private nbHiddenNeuronsTensor% Private nbHiddenNeuronsTensorWithBias% Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.SpecificCodeOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.nbHiddenNeurons = Me.neuronCount(1) If Not Me.useBias Then Throw New NotImplementedException( "useBias=False is not implemented for clsTensorMLP!") End If ' 03/10/2020 If Me.nbInputNeurons <> Me.nbHiddenNeurons Then Throw New NotImplementedException( "nbHiddenNeurons must be identical to nbInputNeurons for clsTensorMLP!") End If Me.nbHiddenNeuronsTensor = Me.nbHiddenNeurons + Me.nbInputNeurons Me.nbHiddenNeuronsTensorWithBias = Me.nbHiddenNeuronsTensor If Me.useBias Then Me.nbHiddenNeuronsTensorWithBias += 1 Me.weights = New List(Of Tensor) For i = 1 To Me.layerCount - 1 Dim nbNodes1 = Me.nbHiddenNeurons Dim nbNodes2 = Me.nbHiddenNeurons If Me.useBias AndAlso i > 1 Then nbNodes1 += 1 If Me.useBias AndAlso i < Me.layerCount - 1 Then nbNodes2 += 1 If i = 1 Then nbNodes1 = Me.nbInputNeurons If i = Me.layerCount - 1 Then nbNodes2 = Me.nbOutputNeurons 'Debug.WriteLine("W" & i & " : " & nbNodes1 & " x " & nbNodes2) Me.weights.Add(New Tensor( Matrix.Zeros(nbNodes1, nbNodes2), autoGrad:=True)) Next InitializeSequential() Me.mse = New MeanSquaredError() End Sub Private Sub InitializeSequential() Me.rnd = New Random Me.seq = New Sequential() For i = 0 To Me.layerCount - 1 'Dim nbNodes1 = Me.nbHiddenNeuronsTensorWithBias 'Dim nbNodes2 = Me.nbHiddenNeuronsTensorWithBias 'If i = 0 Then nbNodes1 = Me.nbInputNeurons 'If i = Me.layerCount - 1 Then nbNodes2 = Me.nbOutputNeurons ' 06/12/2020 Dim nbNodes1 = Me.nbInputNeurons Dim nbNodes2 = Me.nbHiddenNeurons + Me.nbInputNeurons If Me.useBias Then nbNodes2 += 1 If i > 0 Then nbNodes1 = nbNodes2 If i = Me.layerCount - 1 Then nbNodes2 = Me.nbOutputNeurons 'Debug.WriteLine("WL" & i + 1 & " : " & nbNodes1 & " x " & nbNodes2) 'Me.seq.Layers.Add(New Linear(nbNodes1, nbNodes2, Me.useBias)) Me.seq.Layers.Add(New Linear(nbNodes1, nbNodes2, Me.rnd, Me.useBias)) ' 06/12/2020 AddLayerWithActivationFunction() Next InitializeGradient() End Sub Private Sub AddLayerWithActivationFunction() Select Case Me.m_actFunc Case enumActivationFunction.Sigmoid : Me.seq.Layers.Add( New SigmoidLayer(Me.m_center)) Case enumActivationFunction.HyperbolicTangent : Me.seq.Layers.Add( New HyperbolicTangentLayer(Me.m_center)) Case enumActivationFunction.ELU : Me.seq.Layers.Add( New ELULayer(Me.m_center)) Case Else Throw New ArgumentException("Activation function undefined!") End Select End Sub Public Overrides Sub InitializeWeights(layer%, weights#(,)) Dim wMatrix As Matrix = weights Me.weights(layer - 1) = New Tensor(wMatrix, autoGrad:=True) End Sub Public Sub WeightInitLayerLinear(layer%, weights#(,), Optional addBias As Boolean = True, Optional bias#(,) = Nothing) Dim wMatrix As Matrix = weights Dim i = layer - 1 Dim nbNodes1 = Me.nbHiddenNeuronsTensorWithBias Dim nbNodes2 = Me.nbHiddenNeuronsTensorWithBias If i = 0 Then nbNodes1 = Me.nbInputNeurons If i = Me.layerCount - 1 Then nbNodes2 = Me.nbOutputNeurons Dim j% Dim biasMatrix As Matrix If addBias Then If IsNothing(bias) Then biasMatrix = Matrix.Zeros(1, nbNodes2) ' For functionnal tests Else biasMatrix = bias ' To save and restore weights End If j = i * 2 Me.seq.Layers(j) = New Linear(nbNodes1, nbNodes2, wMatrix, biasMatrix) Else j = i Me.seq.Layers(j) = New Linear(nbNodes1, nbNodes2, wMatrix, addBias:=False) End If 'Debug.WriteLine("WL" & j + 1 & " : " & nbNodes1 & " x " & nbNodes2) End Sub Public Sub InitializeGradient() Me.sgd = New StochasticGradientDescent( Me.seq.Parameters, Me.learningRate, Me.weightAdjustment) 'Debug.WriteLine("seq.prm=" & Me.seq.ParametersToString) End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) Me.weights = New List(Of Tensor) Me.rnd = New Random For i = 1 To Me.layerCount - 1 Dim nbNodes1 = Me.nbHiddenNeurons Dim nbNodes2 = Me.nbHiddenNeurons If i = 1 Then nbNodes1 = Me.nbInputNeurons If Me.useBias AndAlso i > 1 Then nbNodes1 += 1 If Me.useBias AndAlso i < Me.layerCount - 1 Then nbNodes2 += 1 If i = Me.layerCount - 1 Then nbNodes2 = Me.nbOutputNeurons 'Debug.WriteLine("W" & i & " : " & nbNodes1 & " x " & nbNodes2) Dim t = New Tensor(Matrix.Randomize( nbNodes1, nbNodes2, Me.rnd), autoGrad:=True) Me.weights.Add(t) Next Me.InitializeSequential() End Sub Private Sub SetInputOneSample(input!()) Dim inputsDble#(0, input.Length - 1) clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle(inputsDble, input, 0) Dim inputMatrix As Matrix = inputsDble Me.input = New Tensor(inputMatrix, autoGrad:=True) End Sub Public Sub SetInputAllSamples() Dim inputArrayDble#(,) = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.inputArray) Dim inputMatrix As Matrix = inputArrayDble Me.input = New Tensor(inputMatrix, autoGrad:=True) End Sub Private Sub SetTargetOneSample(target!()) Dim targetsDble#(0, target.Length - 1) clsMLPHelper.Fill2DArrayOfDoubleByArrayOfSingle(targetsDble, target, 0) Dim targetMatrix As Matrix = targetsDble Me.target = New Tensor(targetMatrix, autoGrad:=True) End Sub Public Sub SetTargetAllSamples() Dim targetArrayDble#(,) = clsMLPHelper.Convert2DArrayOfSingleToDouble(Me.targetArray) Dim targetMatrix As Matrix = targetArrayDble Me.target = New Tensor(targetMatrix, autoGrad:=True) End Sub Private Sub SetOuput1DOneSample() Dim output As Matrix = Me.pred.Data Me.output = Me.pred.Data ' 20/11/2020 Me.lastOutputArray1DSingle = output.ToArrayOfSingle() End Sub Public Overrides Sub SetOuput1D() Dim output As Matrix = Me.pred.Data Me.output = Me.pred.Data ' 20/11/2020 Me.lastOutputArray1DSingle = output.ToArrayOfSingle() End Sub Private Sub SetOuputAllSamples() Me.output = Me.pred.Data End Sub Public Overrides Sub TrainVector() Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True ' 20/09/2020 Code moved here SetInputAllSamples() SetTargetAllSamples() For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next Me.output = Me.pred.Data ComputeAverageError() ' 14/11/2020 End Sub Public Overrides Sub TrainVectorOneIteration() 'SetInputAllSamples() ' 20/09/2020 See above ForwardPropogateSignal() 'SetTargetAllSamples() ' 20/09/2020 See above ComputeErrorInternal() BackwardPropagateError() End Sub Public Overrides Sub TestOneSample(input!()) SetInputOneSample(input) ForwardPropogateSignal() SetOuput1DOneSample() End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Me.vectorizedLearningMode = False ' 10/10/2020 TestOneSample(input) SetTargetOneSample(target) ComputeErrorInternal() BackwardPropagateError() Me.averageErrorOneSample = ComputeAverageErrorFromLastError() Me.averageErrorOneSampleSigned = ComputeAverageSignedErrorFromLastError() End Sub Private Sub ForwardPropogateSignal() Me.pred = Me.seq.Forward(Me.input) End Sub Public Sub BackwardPropagateError() Me.loss.Backward(New Tensor(Matrix.Ones(Me.loss.Data.r, Me.loss.Data.c))) Me.sgd.Step_(zero:=False) End Sub Private Sub ComputeErrorInternal() ' Calculate the error: ERROR = TARGETS - OUTPUTS Me.loss = Me.mse.Forward(Me.pred, Me.target) Me.lastError = Me.loss.Data End Sub 'Public Overrides Function ComputeAverageError#() ' ' Calculate the error: ERROR = TARGETS - OUTPUTS ' Dim m As Matrix = Me.targetArray ' Me.lastError = m - Me.output ' ComputeSuccess() ' Me.averageError = ComputeAverageErrorFromLastError() ' Me.averageErrorSigned = Me.ComputeAverageSignedErrorFromLastError() ' Return Me.averageError 'End Function Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New StringBuilder sb.Append(Me.ShowParameters()) For i = 0 To Me.layerCount - 1 sb.AppendLine("Neuron count(" & i & ")=" & Me.neuronCount(i)) Next sb.AppendLine("") Dim j = 0 For Each w In Me.weights sb.AppendLine("W(" & j + 1 & ")=" & w.ToString & vbLf) j += 1 Next sb.AppendLine("") j = 0 For Each layer In Me.seq.Layers Dim k = 0 For Each tensr In layer.Parameters Dim m As Matrix = tensr.Data sb.AppendLine("Layer(" & j + 1 & "," & k + 1 & ").W=" & m.ToString & vbLf) k += 1 Next If layer.Parameters.Count > 0 Then j += 1 Next Return sb.ToString() End Function Public Overrides Sub PrintOutput(iteration%, Optional force As Boolean = False) If force OrElse ShowThisIteration(iteration) Then If Not Me.vectorizedLearningMode Then 'Dim nbTargets = Me.targetArray.GetLength(1) TestAllSamples(Me.inputArray) ', nbOutputs:=nbTargets) Else SetOuputAllSamples() End If ComputeAverageError() PrintSuccess(iteration) 'ShowMessage("pred=" & Me.pred.ToString) 'ShowMessage("loss=" & Me.loss.ToString) 'ShowMessage("weights=" & Me.weights.ToString) End If End Sub End Class clsStochasticGradientDescent.vb Imports System.Text Imports Perceptron.Utility Namespace DLFramework.Optimizers Public Class StochasticGradientDescent Private m_weightAdjustment! ' (alpha coefficient) Private m_learningRate! Private m_parameters As List(Of Tensor) Public Property Parameters As List(Of Tensor) Get Return m_parameters End Get Set(value As List(Of Tensor)) m_parameters = value End Set End Property Public ReadOnly Property LearningRate! Get Return m_learningRate End Get End Property Public ReadOnly Property WeightAdjustment! Get Return m_weightAdjustment End Get End Property Public Sub New(parameters As List(Of Tensor), Optional learningRate! = 0.1!, Optional weightAdjustment! = 0) Me.m_parameters = parameters Me.m_learningRate = learningRate Me.m_weightAdjustment = weightAdjustment End Sub Public Sub Step_(zero As Boolean) For Each parameter In Parameters Dim m As Matrix = parameter.Gradient.Data * Me.LearningRate parameter.Data -= m If zero OrElse Me.WeightAdjustment <> 0 Then _ parameter.Gradient.Data *= Me.WeightAdjustment Next End Sub Public Function ParametersToString$() Dim sb As New StringBuilder() Dim numPrm% = 0 For Each parameter In Parameters numPrm += 1 sb.AppendLine("prm n°" & numPrm & "=" & parameter.ToString()) Next Return sb.ToString End Function End Class End Namespace clsTensor.vb Imports Perceptron.Utility ' Matrix Namespace DLFramework Public Enum TensorOperations Addition Negation Substraction Multiplication Sumatory Transpose MatrixMultiplication Expand Other None End Enum Public Class Tensor Private Shared idCount% = 0 Private m_data As Matrix Private m_creators As List(Of Tensor) Private m_childrens As Dictionary(Of Integer, Integer) Private m_creationOperation As TensorOperations Private m_gradient As Tensor Private m_autoGrad As Boolean Private m_id% Private m_arguments As List(Of Object) Private m_backwardCallback As Action(Of Tensor, Tensor, List(Of Tensor)) Public Property Data As Matrix Get Return Me.m_data End Get Set(value As Matrix) Me.m_data = value End Set End Property Public ReadOnly Property Creators As List(Of Tensor) Get Return Me.m_creators End Get End Property Public ReadOnly Property CreationOperation As TensorOperations Get Return Me.m_creationOperation End Get End Property Public Property Gradient As Tensor Get Return Me.m_gradient End Get Set(value As Tensor) Me.m_gradient = value End Set End Property Public ReadOnly Property AutoGrad As Boolean Get Return Me.m_autoGrad End Get End Property Public ReadOnly Property Id% Get Return Me.m_id End Get End Property Public Property Childrens As Dictionary(Of Integer, Integer) Get Return Me.m_childrens End Get Set(value As Dictionary(Of Integer, Integer)) Me.m_childrens = value End Set End Property Public ReadOnly Property Arguments As List(Of Object) Get Return Me.m_arguments End Get End Property Public Sub New(data As Matrix, Optional autoGrad As Boolean = False, Optional creators As List(Of Tensor) = Nothing, Optional creationOperation As TensorOperations = TensorOperations.None, Optional arguments As List(Of Object) = Nothing, Optional backwardCallback As Action( Of Tensor, Tensor, List(Of Tensor)) = Nothing) Me.m_data = data Me.m_autoGrad = autoGrad Me.m_gradient = Nothing Me.m_arguments = arguments Me.m_backwardCallback = backwardCallback Me.m_id = idCount idCount += 1 Me.m_creators = creators Me.m_creationOperation = creationOperation Me.m_childrens = New Dictionary(Of Integer, Integer)() If Me.Creators IsNot Nothing Then For Each creator In Me.Creators If creator.m_childrens.ContainsKey(Id) Then creator.m_childrens(Id) += 1 Else creator.m_childrens.Add(Id, 1) End If Next End If End Sub Private Function allChildrenGradsAccountedFor() As Boolean For Each child In m_childrens If child.Value <> 0 Then Return False Next Return True End Function Public Sub Backward(gradient As Tensor, Optional gradientOrigin As Tensor = Nothing) If Not Me.m_autoGrad Then Return If gradient Is Nothing Then _ gradient = New Tensor(Matrix.Ones(m_data.r, m_data.c)) If gradientOrigin IsNot Nothing Then If m_childrens(gradientOrigin.Id) = 0 Then _ Throw New ArgumentException("Cannot backprop more than once") m_childrens(gradientOrigin.Id) -= 1 End If If Me.m_gradient Is Nothing Then Me.m_gradient = gradient Else Me.m_gradient = Tensor.Add(Me.m_gradient, gradient) End If If Me.m_creators IsNot Nothing AndAlso (allChildrenGradsAccountedFor() OrElse gradientOrigin Is Nothing) Then Select Case m_creationOperation Case TensorOperations.None Case TensorOperations.Addition AdditionTensorOperation() Case TensorOperations.Negation NegationTensorOperation() Case TensorOperations.Substraction SubstractionTensorOperation() Case TensorOperations.Multiplication MultiplicationTensorOperation() Case TensorOperations.Sumatory SumatoryTensorOperation() Case TensorOperations.Transpose TransposeTensorOperation() Case TensorOperations.MatrixMultiplication MatrixMultiplicationTensorOperation() Case TensorOperations.Expand ExpandTensorOperation() Case TensorOperations.Other m_backwardCallback(Me, Me.m_gradient, Me.m_creators) Case Else Throw New ArgumentException( "Invalid Creation operation: {CreationOperation}") End Select End If End Sub Public Overrides Function ToString$() Return Me.m_data.ToString() End Function Private Sub CheckCreatorsThrow(creatorNumber%) If Me.m_creators Is Nothing Then _ Throw New ArgumentException("Creators can not be null") If Me.m_creators.Count <> creatorNumber Then _ Throw New ArgumentException("Creator count must be 2 not {Creators.Count}") End Sub Private Sub CheckArgumentsThrow(argumentsNumber%) If Me.m_arguments Is Nothing Then Throw New ArgumentException("Arguments are null") End If If Me.m_arguments.Count <> argumentsNumber Then Throw New ArgumentException( "Number of arguments must be {argumentsNumber}") End If End Sub Private Function CheckCreators(creatorNumber%) As Boolean If Me.m_creators Is Nothing Then Return False If Me.m_creators.Count <> creatorNumber Then Return False Return True End Function Private Sub SumatoryTensorOperation() CheckCreatorsThrow(1) CheckArgumentsThrow(1) Dim dimension = CType(m_arguments(0), AxisZero) Dim copies% = 0 If dimension = AxisZero.horizontal Then copies = CInt(Me.m_creators(0).m_data.c) Else copies = CInt(Me.m_creators(0).m_data.r) End If Me.m_creators(0).Backward(Tensor.Expand(Me.m_gradient, dimension, copies)) End Sub Private Sub MatrixMultiplicationTensorOperation() Me.m_creators(0).Backward( Tensor.MatMult(Me.m_gradient, Tensor.Transp(Me.m_creators(1)))) Me.m_creators(1).Backward( Tensor.Transp(Tensor.MatMult(Tensor.Transp(Me.m_gradient), Me.m_creators(0)))) End Sub Private Sub TransposeTensorOperation() CheckCreatorsThrow(1) Me.m_creators(0).Backward(Tensor.Transp(Me.m_gradient)) End Sub Private Sub MultiplicationTensorOperation() CheckCreatorsThrow(2) Me.m_creators(0).Backward(Tensor.Mul(Me.m_gradient, Me.m_creators(1)), Me) Me.m_creators(1).Backward(Tensor.Mul(Me.m_gradient, Me.m_creators(0)), Me) End Sub Private Sub SubstractionTensorOperation() CheckCreatorsThrow(2) Me.m_creators(0).Backward(Me.m_gradient, Me) Me.m_creators(1).Backward(Tensor.Neg(Me.m_gradient), Me) End Sub Private Sub NegationTensorOperation() CheckCreatorsThrow(1) Me.m_creators(0).Backward(Tensor.Neg(Me.m_gradient), Me) End Sub Private Sub AdditionTensorOperation() CheckCreatorsThrow(2) Me.m_creators(0).Backward(Me.m_gradient, Me) Me.m_creators(1).Backward(Me.m_gradient, Me) End Sub Private Sub ExpandTensorOperation() CheckCreatorsThrow(1) CheckArgumentsThrow(1) Dim dimension = CType(Me.m_arguments(0), AxisZero) Me.m_creators(0).Backward(Tensor.Sum(Me.m_gradient, dimension)) End Sub Public Shared Function Expand(A As Tensor, axis0 As AxisZero, copies%) As Tensor Dim m As New Matrix() If axis0 = AxisZero.horizontal Then m = Matrix.Zeros(A.m_data.r, copies) Matrix.MatrixLoop(Sub(i, j) m(i, j) = A.m_data(i, 0) End Sub, A.m_data.r, copies) ElseIf axis0 = AxisZero.vertical Then m = Matrix.Zeros(copies, A.m_data.c) Matrix.MatrixLoop(Sub(i, j) m(i, j) = A.m_data(0, j) End Sub, copies, A.m_data.c) End If If A.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A} Dim Argument = New List(Of Object)() From {axis0} Return New Tensor(m, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Expand, arguments:=Argument) End If Return New Tensor(m) End Function Public Shared Function Neg(A As Tensor) As Tensor If A.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A} Return New Tensor(A.m_data * -1.0!, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Negation) End If Return New Tensor(A.m_data * -1.0!) End Function Public Shared Function Add(A As Tensor, B As Tensor) As Tensor If A.m_autoGrad AndAlso B.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A, B} Return New Tensor(A.m_data + B.m_data, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Addition) End If Return New Tensor(A.m_data + B.m_data) End Function Public Shared Function Substract(A As Tensor, B As Tensor) As Tensor If A.m_autoGrad AndAlso B.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A, B} Return New Tensor(A.m_data - B.m_data, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Substraction) End If Return New Tensor(A.m_data - B.m_data) End Function Public Shared Function Mul(A As Tensor, B As Tensor) As Tensor If A.m_autoGrad AndAlso B.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A, B} 'Return New Tensor(Matrix.DeltaMult(A.m_data, B.m_data), ' autoGrad:=True, creators:=Creators, ' creationOperation:=TensorOperations.Multiplication) Return New Tensor(A.m_data * B.m_data, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Multiplication) End If 'Return New Tensor(Matrix.DeltaMult(A.m_data, B.m_data)) Return New Tensor(A.m_data * B.m_data) End Function Public Shared Function MatMult(A As Tensor, B As Tensor) As Tensor If A.m_autoGrad AndAlso B.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A, B} 'Return New Tensor(Matrix.MatMult(A.m_data, B.m_data), ' autoGrad:=True, creators:=Creators, ' creationOperation:=TensorOperations.MatrixMultiplication) Return New Tensor(A.m_data * B.m_data, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.MatrixMultiplication) End If 'Return New Tensor(Matrix.MatMult(A.m_data, B.m_data)) Return New Tensor(A.m_data * B.m_data) End Function Public Shared Function Sum(A As Tensor, axis0 As AxisZero) As Tensor If A.m_autoGrad Then Dim Argument = New List(Of Object)() From {axis0} Dim Creators = New List(Of Tensor)() From {A} Return New Tensor(A.m_data.Sumatory(axis0), autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Sumatory, arguments:=Argument) End If Return New Tensor(A.m_data.Sumatory(axis0)) End Function Public Shared Function Transp(A As Tensor) As Tensor If A.m_autoGrad Then Dim Creators = New List(Of Tensor)() From {A} Return New Tensor(A.m_data.T, autoGrad:=True, creators:=Creators, creationOperation:=TensorOperations.Transpose) End If Return New Tensor(A.m_data.T) End Function End Class End Namespace clsELULayer.vb Imports Perceptron.DLFramework.Layers.Activation Namespace DLFramework.Layers Public Class ELULayer : Inherits Layer Private m_center! Public ReadOnly Property Center! Get Return m_center End Get End Property Public Sub New(center!) Me.m_center = center End Sub Public Overrides Function Forward(input As Tensor) As Tensor Return ELU.Forward(input, Me.Center) End Function End Class End Namespace clsHyperbolicTangentLayer.vb Imports Perceptron.DLFramework.Layers.Activation Namespace DLFramework.Layers Public Class HyperbolicTangentLayer : Inherits Layer Private m_center! Public ReadOnly Property Center! Get Return m_center End Get End Property Public Sub New(center!) Me.m_center = center End Sub Public Overrides Function Forward(input As Tensor) As Tensor Return HyperbolicTangent.Forward(input, Me.Center) End Function End Class End Namespace clsLayer.vb Namespace DLFramework.Layers Public Class Layer Protected m_parameters As List(Of Tensor) Public Overridable Property Parameters As List(Of Tensor) Get Return m_parameters End Get Set(value As List(Of Tensor)) m_parameters = value End Set End Property Public Sub New() m_parameters = New List(Of Tensor)() End Sub Public Overridable Function Forward(input As Tensor) As Tensor Return Nothing End Function Public Overridable Function Forward(input As Tensor, target As Tensor) As Tensor Return Nothing End Function 'Public Overridable Function Forward(input As Tensor, target As Tensor, ' useBias As Boolean) As Tensor ' Return Nothing 'End Function End Class End Namespace clsLinear.vb Imports Perceptron.Utility ' Matrix Namespace DLFramework.Layers Public Class Linear : Inherits Layer Private m_addBias As Boolean = False Public Sub New(input%, output%, w As Matrix, addBias As Boolean) Dim weights = New Tensor(w, autoGrad:=True) Parameters.Add(weights) Me.m_addBias = addBias End Sub Public Sub New(input%, output%, w As Matrix, bias0 As Matrix) Dim weights = New Tensor(w, autoGrad:=True) Dim bias = New Tensor(bias0, autoGrad:=True) Parameters.Add(weights) Parameters.Add(bias) Me.m_addBias = True End Sub Public Sub New(input%, output%, addBias As Boolean) Dim w = Matrix.Zeros(input, output) - 1 Dim weights = New Tensor(w, autoGrad:=True) Parameters.Add(weights) Me.m_addBias = addBias If Not Me.m_addBias Then Exit Sub Dim bias = New Tensor(Matrix.Zeros(1, output), autoGrad:=True) Parameters.Add(bias) End Sub Public Sub New(input%, output%, r As Random, addBias As Boolean) Dim w = (Matrix.Randomize(input, output, r) * 2) - 1 Dim weights = New Tensor(w, autoGrad:=True) Parameters.Add(weights) Me.m_addBias = addBias If Not Me.m_addBias Then Exit Sub Dim bias = New Tensor(Matrix.Zeros(1, output), autoGrad:=True) Parameters.Add(bias) End Sub Public Overrides Function Forward(input As Tensor) As Tensor If Me.m_addBias Then Dim bias = Tensor.Expand(Parameters(1), AxisZero.vertical, input.Data.r) Dim tnsor = Tensor.Add(Tensor.MatMult(input, Parameters(0)), bias) Return tnsor Else Return input End If End Function End Class End Namespace clsSequential.vb Imports System.Collections.Generic Imports System.Text Namespace DLFramework.Layers Public Class Sequential : Inherits Layer Private m_layers As List(Of Layer) Public ReadOnly Property Layers As List(Of Layer) Get Return m_layers End Get End Property Public Overrides Property Parameters As List(Of Tensor) Get Return GetParameters() End Get Set(value As List(Of Tensor)) m_parameters = value End Set End Property Public Sub New(layers As List(Of Layer)) Me.m_layers = layers End Sub Public Sub New() Me.m_layers = New List(Of Layer)() End Sub Public Overrides Function Forward(input As Tensor) As Tensor 'Dim numLayer% = 0 For Each layer In Me.m_layers input = layer.Forward(input) 'numLayer += 1 'Debug.WriteLine("Layer n°" & numLayer & ": " & input.ToString()) Next Return input End Function Public Function GetParameters() As List(Of Tensor) Dim temp As List(Of Tensor) = New List(Of Tensor)() For Each layer In Me.m_layers temp.AddRange(layer.Parameters) Next Return temp End Function Public Function ParametersToString$() Dim sb As New StringBuilder() For Each parameter In Parameters sb.Append(parameter.ToString()) Next Return sb.ToString End Function End Class End Namespace clsSigmoidLayer.vb Imports Perceptron.DLFramework.Layers.Activation Namespace DLFramework.Layers Public Class SigmoidLayer : Inherits Layer Private m_center! Public ReadOnly Property Center! Get Return m_center End Get End Property Public Sub New(center!) Me.m_center = center End Sub Public Overrides Function Forward(input As Tensor) As Tensor Return Sigmoid.Forward(input, Me.Center) End Function End Class End Namespace modMLPTensorTest.vb Imports Perceptron.DLFramework ' Tensor Imports Perceptron.DLFramework.Layers ' Linear, Sequential Imports Perceptron.DLFramework.Layers.Loss ' MeanSquaredError Imports Perceptron.DLFramework.Optimizers ' StochasticGradientDescent Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPTensorTest Sub MainTensorMLP() 'SimpleTest() Console.WriteLine("Tensor MultiLayerPerceptron with the classical XOR test.") TensorMLPXorTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub SimpleTest() Dim r As New Random ' Works with an easy test: 'Dim sourceDble As Double(,) = {{0, 0}, {0, 1}, {1, 0}, {1, 1}} 'Dim targetDble As Double(,) = {{0}, {1}, {0}, {1}} ' Does not work with the classical and difficult XOR test: Dim sourceDble As Double(,) = {{1, 0}, {0, 0}, {0, 1}, {1, 1}} Dim targetDble As Double(,) = {{1}, {0}, {1}, {0}} Dim sourceMatrix As Matrix = sourceDble Dim data As New Tensor(sourceMatrix, autoGrad:=True) Dim targetMatrix As Matrix = targetDble Dim target As New Tensor(targetMatrix, autoGrad:=True) Dim seq As New Sequential() seq.Layers.Add(New Linear(2, 3, r, addBias:=True)) seq.Layers.Add(New Linear(3, 1, r, addBias:=True)) Dim sgd As New StochasticGradientDescent(seq.Parameters, 0.1!) Dim mse As New MeanSquaredError() For i = 0 To 20 Dim pred = seq.Forward(data) Dim loss = mse.Forward(pred, target) loss.Backward(New Tensor(Matrix.Ones(loss.Data.c, loss.Data.r))) sgd.Step_(zero:=True) Debug.WriteLine("Epoch: {" & i & "} Loss: { " & loss.ToString() & "}") Next End Sub Public Sub TensorMLPXorTest(Optional nbXor% = 1) Dim mlp As New clsMLPTensor mlp.ShowMessage("Tensor MLP Xor test") mlp.ShowMessage("-------------------") mlp.Initialize(learningRate:=0.1!, weightAdjustment:=0.05!) Dim nbIterations% mlp.SetActivationFunctionOptimized( enumActivationFunctionOptimized.Sigmoid) 'mlp.SetActivationFunctionOptimized( ' enumActivationFunctionOptimized.HyperbolicTangent, gain:=2) ' Works only for 1 XOR, and 2 XOR in not vectorized learning mode: 'mlp.SetActivationFunctionOptimized( ' enumActivationFunctionOptimized.ELU) ', center:=0.4) nbIterations = 2000 mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.nbIterations = nbIterations If nbXor = 1 Then mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR231, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR4Layers2331, addBiasColumn:=True) 'mlp.InitializeStruct(m_neuronCountXOR5Layers23331, addBiasColumn:=True) mlp.printOutputMatrix = True ElseIf nbXor = 2 Then mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR 'mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=True) mlp.InitializeStruct(m_neuronCount2XOR, addBiasColumn:=True) ElseIf nbXor = 3 Then mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR, addBiasColumn:=True) End If mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.TrainVector() ' Works fine 'mlp.Train() 'mlp.Train(enumLearningMode.Systematic) ' Works fine 'mlp.Train(enumLearningMode.SemiStochastic) ' Works 'mlp.Train(enumLearningMode.Stochastic) ' Works mlp.ShowMessage("Tensor MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub End Module clsMLPTensorFlow.vb ' From https://github.com/SciSharp/SciSharp-Stack-Examples/blob/master/src/TensorFlowNET.Examples/NeuralNetworks/NeuralNetXor.cs : C# -> VB .NET conversion ' https://www.nuget.org/packages/TensorFlow.NET ' https://www.nuget.org/packages/Microsoft.ML.TensorFlow.Redist ' Install-Package TensorFlow.NET -Version 0.14.1 ' Install-Package Microsoft.ML.TensorFlow.Redist ' https://github.com/SciSharp/SciSharp-Stack-Examples ' https://github.com/SciSharp/TensorFlow.NET ' https://tensorflownet.readthedocs.io/en/latest Imports NumSharp Imports Tensorflow Imports Tensorflow.Binding Imports System.Text ' Tuples are not available for Visual Studio 2013: Set 0: Off #Const Implementation = 1 ' 0: Off, 1: On Public Class clsMLPTensorFlow : Inherits clsVectorizedMLPGeneric Const computeScoreOneByOne = False Const versionRedist$ = "0.14.0" Private sess As Session Private data As NDArray Private target As NDArray Private targetArray1D!() Private features, labels As Tensor Dim outputs1D#() Dim hiddenWeights#(), outputWeights#() Private nbHiddenNeurons% #If Implementation Or TENSORFLOW Then Private graphTuple As ( trainOperation As Operation, loss As Tensor, globalStep As Tensor, prediction As Tensor, hw As Tensor, ow As Tensor) #End If Public Overrides Function GetMLPType$() Return System.Reflection.MethodBase.GetCurrentMethod().DeclaringType.Name End Function Public Overrides Function GetActivationFunctionType() As enumActivationFunctionType Return enumActivationFunctionType.LibraryOptimized End Function Public Overrides Sub InitializeStruct(neuronCount%(), addBiasColumn As Boolean) MyBase.InitializeStruct(neuronCount, addBiasColumn) Me.nbHiddenNeurons = Me.neuronCount(1) ReDim Me.hiddenWeights(Me.nbInputNeurons * Me.nbHiddenNeurons - 1) ReDim Me.outputWeights(Me.nbHiddenNeurons * Me.nbOutputNeurons - 1) If Me.useBias Then Throw New NotImplementedException( "useBias is not implemented for clsMLPTensorFlow!") End If If Me.layerCount <> 3 Then MsgBox("This TensorFlow implementation can only compute one hidden layer!", MsgBoxStyle.Exclamation) Me.layerCount = 3 End If InitializeTensorFlow() End Sub #If Implementation Or TENSORFLOW Then Private Sub InitializeTensorFlow() ' Check TensorFlow.Redist dll tensorflow.dll 'Dim exePath = Application.StartupPath() ' WinForm ' Console app.: Dim asmPath = System.Reflection.Assembly.GetExecutingAssembly().Location Dim exePath = System.IO.Path.GetDirectoryName(asmPath) Dim dllPath = exePath & "\tensorflow.dll" Dim dllPathDotNetCore = exePath & "\runtimes\win-x64\native\tensorflow.dll" If Not System.IO.File.Exists(dllPath) AndAlso Not System.IO.File.Exists(dllPathDotNetCore) Then Const TFDllPath = "\packages\Microsoft.ML.TensorFlow.Redist." & versionRedist & "\runtimes\win-x64\native\tensorflow.dll" Dim srcDllPath = System.IO.Path.GetDirectoryName(exePath) & TFDllPath ' If \bin\Debug directory: Dim srcDllPath2 = System.IO.Path.GetDirectoryName( System.IO.Path.GetDirectoryName(exePath)) & TFDllPath Dim srcDllPath3 = System.IO.Path.GetDirectoryName( System.IO.Path.GetDirectoryName( System.IO.Path.GetDirectoryName(exePath))) & TFDllPath Dim srcDllPath4 = System.IO.Path.GetDirectoryName( System.IO.Path.GetDirectoryName( System.IO.Path.GetDirectoryName(exePath))) & "\SolutionDotNet4" & TFDllPath If Not System.IO.File.Exists(srcDllPath) AndAlso Not System.IO.File.Exists(srcDllPath2) AndAlso Not System.IO.File.Exists(srcDllPath3) AndAlso Not System.IO.File.Exists(srcDllPath4) Then _ Throw New System.IO.FileNotFoundException( "Please build the solution in Debug mode!") If System.IO.File.Exists(srcDllPath) Then System.IO.File.Copy(srcDllPath, dllPath) ElseIf System.IO.File.Exists(srcDllPath2) Then System.IO.File.Copy(srcDllPath2, dllPath) ElseIf System.IO.File.Exists(srcDllPath3) Then System.IO.File.Copy(srcDllPath3, dllPath) ElseIf System.IO.File.Exists(srcDllPath4) Then System.IO.File.Copy(srcDllPath4, dllPath) End If End If Dim graph = tf.Graph.as_default Me.exampleCount = Me.inputArray.GetLength(0) ' TensorFlow.NET version="0.20.1" : tf.placeholder() is not compatible with eager execution Me.features = tf.placeholder(tf.float32, New TensorShape(Me.exampleCount, Me.nbInputNeurons)) Me.labels = tf.placeholder(tf.int32, New TensorShape(Me.exampleCount, Me.nbOutputNeurons)) If Me.nbOutputNeurons = 1 Then _ Me.labels = tf.placeholder(tf.int32, New TensorShape(Me.exampleCount)) Dim num_hidden = Me.nbHiddenNeurons Me.graphTuple = makeGraph(Me.features, Me.labels, num_hidden:=num_hidden, num_input:=Me.nbInputNeurons, num_output:=Me.nbOutputNeurons) Dim init = tf.global_variables_initializer Me.sess = tf.Session(graph) Me.sess.run(init) Me.data = Me.inputArray If Me.nbOutputNeurons = 1 Then Me.targetArray1D = clsMLPHelper.GetColumn(Me.targetArray, 0) Me.target = np.array(Me.targetArray1D, dtype:=np.float32) Else Me.target = np.array(Me.targetArray, dtype:=np.float32) End If End Sub Private Function makeGraph( features As Tensor, labels As Tensor, num_hidden%, num_input%, num_output%) _ As (op As Operation, t1 As Tensor, t2 As Tensor, t3 As Tensor, t4 As Tensor, t5 As Tensor) Dim stddev = 1 / Math.Sqrt(2) Dim hidden_weights = tf.Variable(tf.truncated_normal( New Integer() {num_input, num_hidden}, seed:=1, stddev:=CType(stddev, Single)), name:="hidden_weights") Dim hidden_activations = tf.nn.relu(tf.matmul(features, hidden_weights)) Dim output_weights = tf.Variable(tf.truncated_normal( {num_hidden, num_output}, seed:=17, stddev:=CSng(1 / Math.Sqrt(num_hidden))), name:="output_weights") ' Shape [4, 1] for one XOR Dim logits = tf.matmul(hidden_activations, output_weights) ' Shape [4] for one XOR Dim prediction = tf.tanh(tf.squeeze(logits), name:="prediction") Dim tfCast = tf.cast(labels, tf.float32) Dim diff = prediction - tfCast Dim loss = tf.reduce_mean(tf.square(diff), name:="loss") Dim globalStep = tf.Variable(0, trainable:=False, name:="global_step") Dim trainOperation = tf.train.GradientDescentOptimizer( Me.learningRate).minimize(loss, global_step:=globalStep) Dim returnTuple As ( trainOperation As Operation, loss As Tensor, globalStep As Tensor, prediction As Tensor, hw As Tensor, ow As Tensor) = (trainOperation, loss, globalStep, prediction, hidden_weights, output_weights) Return returnTuple End Function Private Sub ReadWeights() Dim resultTuple As ( _x As NDArray, step_ As NDArray, loss_value As NDArray, predictionValue As NDArray, hw As NDArray) = Me.sess.run( (Me.graphTuple.trainOperation, Me.graphTuple.globalStep, Me.graphTuple.loss, Me.graphTuple.prediction, Me.graphTuple.hw), (Me.features, Me.data), (Me.labels, Me.target)) Me.hiddenWeights = ConvertNDArrayToArrayOfDouble(resultTuple.hw, Me.nbInputNeurons, Me.nbHiddenNeurons) Dim resultTuple2 As ( _x As NDArray, step_ As NDArray, loss_value As NDArray, predictionValue As NDArray, ow As NDArray) = Me.sess.run( (Me.graphTuple.trainOperation, Me.graphTuple.globalStep, Me.graphTuple.loss, Me.graphTuple.prediction, Me.graphTuple.ow), (Me.features, Me.data), (Me.labels, Me.target)) Me.outputWeights = ConvertNDArrayToArrayOfDouble(resultTuple2.ow, Me.nbHiddenNeurons, Me.nbOutputNeurons) End Sub #Else Private Sub InitializeTensorFlow() End Sub Private Sub ReadWeights() End Sub #End If Public Overrides Sub SetActivationFunction( actFnc As enumActivationFunction, Optional gain! = 1, Optional center! = 0) ' gain can only be 2 for TensorFlow MLP gain = 2 center = 0 Me.weightAdjustment = 0 ' Not used MyBase.SetActivationFunction(actFnc, gain, center) If actFnc <> enumActivationFunction.HyperbolicTangent Then Throw New NotImplementedException( "This activation function is not available!") End If End Sub Public Overrides Sub InitializeWeights(numLayer%, weights#(,)) ' Just display a message, let's play all the tests ShowMessage("NotImplementedException: InitializeWeights is not implemented for clsMLPTensorFlow") End Sub Public Overrides Sub Randomize(Optional minValue! = -0.5!, Optional maxValue! = 0.5!) ReadWeights() End Sub Public Overrides Sub TrainVector() If IsNothing(Me.sess) Then Exit Sub Me.learningMode = enumLearningMode.Vectorial Me.vectorizedLearningMode = True For iteration = 0 To Me.nbIterations - 1 Me.numIteration = iteration TrainVectorOneIteration() If Me.printOutput_ Then PrintOutput(iteration) Next If computeScoreOneByOne Then 'Dim nbTargets = Me.targetArray.GetLength(1) TestAllSamples(Me.inputArray) ', nbOutputs:=nbTargets) Else SetOuput1D() End If ComputeAverageError() ' Do not close before prediction 'CloseSession() End Sub Public Overrides Sub CloseTrainingSession() If Not IsNothing(Me.sess) Then Me.sess.close() End Sub Public Overrides Sub TrainVectorOneIteration() #If Implementation Or TENSORFLOW Then Dim resultTuple As ( _x As NDArray, step_ As NDArray, loss_value As NDArray, predictionValue As NDArray) = Me.sess.run( (Me.graphTuple.trainOperation, Me.graphTuple.globalStep, Me.graphTuple.loss, Me.graphTuple.prediction), (Me.features, Me.data), (Me.labels, Me.target)) Me.averageError = resultTuple.loss_value Me.outputs1D = ConvertNDArrayToArrayOfDouble(resultTuple.predictionValue, Me.exampleCount, Me.nbOutputNeurons) #End If End Sub Private Function ConvertNDArrayToArrayOfDouble(nda As NDArray, r%, c%) As Double() ' input.ToArray<float> works in C#, but ' input.ToArray(Of Single) does not work in VB.Net : BC30649 "Unsupported Type" ' https://github.com/SciSharp/NumSharp.Lite ' https://github.com/shimat/opencvsharp_samples/issues/23 Probably similar issue Dim length = r * c Dim output#(length - 1) Dim k = 0 For i = 0 To r - 1 ' row For j = 0 To c - 1 ' column Dim vt As ValueType = nda.GetAtIndex(i * c + j) ' Then no other solution to parse the string containing the value, ' because ValueType is a generic type Dim strVal = vt.ToString() Dim sngVal = Single.Parse(strVal) output(k) = sngVal k += 1 Next Next Return output End Function Public Overrides Sub SetOuput1D() If IsNothing(Me.outputs1D) Then Exit Sub Dim nbInputs = Me.inputArray.GetLength(0) Dim nbTargets = Me.targetArray.GetLength(0) Dim outputs2D#(nbTargets - 1, Me.nbOutputNeurons - 1) For i = 0 To nbInputs - 1 'For j = 0 To Me.nbOutputNeurons - 1 ' outputs2D(i, j) = Me.outputs1D(i) 'Next clsMLPHelper.Fill2DArrayOfDoubleByArray(outputs2D, Me.outputs1D, i) Next Me.output = outputs2D Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(Me.outputs1D) End Sub Public Overrides Sub TrainSystematic(inputs!(,), targets!(,), Optional learningMode As enumLearningMode = enumLearningMode.Defaut) 'If learningMode = enumLearningMode.Vectorial Then ' This is the unique learning mode for this MLP TrainVector() ' Exit Sub 'End If 'Me.vectorizedLearningMode = False 'TrainAllSamples(inputs, targets) End Sub Public Overrides Sub TrainStochastic(inputs!(,), targets!(,)) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainSemiStochastic(inputs!(,), targets!(,)) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub Public Overrides Sub TrainOneSample(input!(), target!()) Throw New NotImplementedException("There is no TrainOneSample() function!") End Sub #If Implementation Or TENSORFLOW Then Public Overrides Sub TestOneSample(input!()) ' We use only the first placeholder ' 1XOR 'Me.data({0, 0}) = input(0) 'Me.data({0, 1}) = input(1) ' 2XOR 'Me.data({0, 0}) = input(0) 'Me.data({0, 1}) = input(1) 'Me.data({0, 2}) = input(2) 'Me.data({0, 3}) = input(3) For m = 0 To Me.nbInputNeurons - 1 Dim lst As New List(Of Integer) Dim k = 0 For l = 0 To 1 lst.Add(k) k = m Next Dim prmArr = lst.ToArray Dim value = input(m) Me.data(prmArr) = value Next Dim resultTuple As ( _x As NDArray, step_ As NDArray, loss_value As NDArray, predictionValue As NDArray) = Me.sess.run( (Me.graphTuple.trainOperation, Me.graphTuple.globalStep, Me.graphTuple.loss, Me.graphTuple.prediction), (Me.features, Me.data), (Me.labels, Me.target)) Me.outputs1D = ConvertNDArrayToArrayOfDouble(resultTuple.predictionValue, r:=1, c:=Me.nbOutputNeurons) Me.lastOutputArray1DSingle = clsMLPHelper.Convert1DArrayOfDoubleToSingle(Me.outputs1D) 'Dim s = clsMLPHelper.ArrayToString(input) 'Debug.WriteLine(s & " -> " & clsMLPHelper.ArrayToString(Me.lastOutputArray1DSingle)) ' 27/12/2020 Me.lastOutputArray1D = clsMLPHelper.Convert1DArrayOfSingleToDouble( Me.lastOutputArray1DSingle) Dim outputs2D#(0, Me.nbOutputNeurons - 1) clsMLPHelper.Fill2DArrayOfDouble(outputs2D, Me.lastOutputArray1D, 0) Me.output = outputs2D End Sub #Else Public Overrides Sub TestOneSample(input!()) ReDim Me.lastOutputArray1DSingle(Me.nbOutputNeurons - 1) End Sub #End If Public Overrides Function ShowWeights$(Optional format$ = format2Dec) Dim sb As New StringBuilder sb.Append(Me.ShowParameters()) For i = 0 To Me.layerCount - 1 sb.AppendLine("Neuron count(" & i & ")=" & Me.neuronCount(i)) Next sb.AppendLine("") For i = 1 To Me.layerCount - 1 sb.AppendLine("W(" & i & ")={") Dim nbNeuronsLayer = Me.nbHiddenNeurons Dim nbNeuronsPreviousLayer = Me.nbHiddenNeurons Dim lMax% If i = 1 Then nbNeuronsPreviousLayer = Me.nbInputNeurons lMax = Me.hiddenWeights.GetUpperBound(0) ElseIf i = Me.layerCount - 1 Then nbNeuronsLayer = Me.nbOutputNeurons lMax = Me.outputWeights.GetUpperBound(0) End If Dim l = 0 For j = 0 To nbNeuronsLayer - 1 sb.Append(" {") Dim nbWeights = nbNeuronsPreviousLayer For k = 0 To nbWeights - 1 Dim weight# = 0 If i = 1 Then If l <= lMax Then weight = Me.hiddenWeights(l) ElseIf i = Me.layerCount - 1 Then If l <= lMax Then weight = Me.outputWeights(l) End If l += 1 Dim sVal$ = weight.ToString(format).ReplaceCommaByDot() sb.Append(sVal) If Me.useBias OrElse k < nbWeights - 1 Then sb.Append(", ") Next k If Me.useBias Then Dim weightT = 0 ' useBias is not implemented here Dim sValT$ = weightT.ToString(format).ReplaceCommaByDot() sb.Append(sValT) End If sb.Append("}") If j < nbNeuronsLayer - 1 Then sb.Append("," & vbLf) Next j sb.Append("}" & vbLf) If i < Me.layerCount - 1 Then sb.AppendLine() Next i Return sb.ToString() End Function Public Overrides Sub PrintOutput(iteration%, Optional force As Boolean = False) If force OrElse ShowThisIteration(iteration) Then If computeScoreOneByOne Then TestAllSamples(Me.inputArray) If IsNothing(Me.outputs1D) Then ShowMessage(vbLf & "Iteration n°" & iteration + 1 & "/" & Me.nbIterations & vbLf & "Output: nothing!") Exit Sub End If Else If Not Me.vectorizedLearningMode Then TestAllSamples(Me.inputArray) If IsNothing(Me.outputs1D) Then ShowMessage(vbLf & "Iteration n°" & iteration + 1 & "/" & Me.nbIterations & vbLf & "Output: nothing!") Exit Sub End If If Me.vectorizedLearningMode Then SetOuput1D() ComputeAverageError() End If End If PrintSuccess(iteration) End If End Sub End Class modMLPTensorFlowTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Module modMLPTensorFlowTest Sub MainTensorFlowMLP() Console.WriteLine("TensorFlow MLP with the classical XOR test.") TensorFlowMLPXORTest() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub Public Sub TensorFlowMLPXORTest(Optional nbXor% = 1) Dim mlp As New clsMLPTensorFlow mlp.ShowMessage("TensorFlow MLP Xor test") mlp.ShowMessage("-----------------------") mlp.nbIterations = 5000 ' Hyperbolic tangent: works mlp.printOutput_ = True mlp.printOutputMatrix = False If nbXor = 1 Then mlp.nbIterations = 500 mlp.Initialize(learningRate:=0.2!) mlp.printOutputMatrix = True mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR mlp.InitializeStruct(m_neuronCountXOR261, addBiasColumn:=False) ElseIf nbXor = 2 Then ' 75% success mlp.nbIterations = 5000 mlp.Initialize(learningRate:=0.1!) mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR mlp.InitializeStruct(m_neuronCount2XOR462, addBiasColumn:=False) ElseIf nbXor = 3 Then ' 190/192: 99% success mlp.nbIterations = 10000 mlp.Initialize(learningRate:=0.05!) mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR mlp.InitializeStruct(m_neuronCount3XOR673, addBiasColumn:=False) End If mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintWeights() WaitForKeyToStart() mlp.TrainVector() ' Works fine mlp.ShowMessage("TensorFlow MLP Xor test: Done.") If nbXor > 1 Then Exit Sub WaitForKeyToContinue("Press a key to print MLP weights") mlp.PrintWeights() End Sub Public Sub TensorFlowMLPIrisFlowerAnalogTest() Dim mlp As New clsMLPTensorFlow mlp.ShowMessage("TensorFlow.NET MLP Iris flower analog test") mlp.ShowMessage("------------------------------------------") mlp.nbIterations = 10000 mlp.Initialize(learningRate:=0.01!, weightAdjustment:=0.01!) mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.inputArray = m_inputArrayIrisFlower mlp.targetArray = m_targetArrayIrisFlowerAnalog mlp.InitializeStruct(m_neuronCountIrisFlowerAnalog4_20_1, addBiasColumn:=False) mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain:=2) mlp.Randomize() mlp.PrintParameters() WaitForKeyToStart() mlp.minimalSuccessTreshold = 0.2 mlp.Train() mlp.ShowMessage("TensorFlow.NET MLP Iris flower analog test: Done.") End Sub End Module modActivation.vb ' Inspired from ' https://www.codeproject.com/Articles/1205732/Build-Simple-AI-NET-Library-Part-Perceptron ' https://en.wikipedia.org/wiki/Activation_function ' https://fr.wikipedia.org/wiki/Fonction_d'activation ' General formulas for derivate: ' (fg)' = f'g+fg' ' (f/g)' = f'g-fg'/g^2 ' (f°g)' = (f'°g)g' = f(g(x))' = f'(g(x))g'(x) Imports Perceptron.MLP.ActivationFunction Public Module modFctAct ''' <summary> ''' For non-derivatable activation functions, use an alternate derivative function ''' </summary> Public Const useAlternateDerivativeFunction As Boolean = False Public Const debugActivationFunction As Boolean = False Public Enum enumActivationFunction Undefined = 0 Identity = 1 Sigmoid = 2 HyperbolicTangent = 3 Gaussian = 4 ''' <summary> ''' Arc tangent (Atan or tan^-1: inverse of tangent function) ''' </summary> ArcTangent = 5 Sinus = 6 ''' <summary> ''' Exponential Linear Units (ELU) ''' </summary> ELU = 7 ''' <summary> ''' Rectified Linear Units (ReLU) ''' f(x) = Max(0, x) : return x if x > 0 or return 0 ''' </summary> ReLu = 8 ''' <summary> ''' Rectified Linear Units (ReLU) with sigmoid for derivate ''' </summary> ReLuSigmoid = 9 DoubleThreshold = 10 ''' <summary> ''' The Mish function, proposed by Diganta Misra (https://arxiv.org/abs/1908.08681) ''' Definition: x tanh(ln(1 + e^2)) ''' Implementation: x * Tanh(Softplus(x)) ''' See https://github.com/Sergio0694/NeuralNetwork.NET/issues/93 ''' </summary> Mish = 11 SigneDoux = 12 ' Converge assez lentement vers la cible (nouvelle fct 09/03/2019) PasUnitaire = 13 ' Aucun résultat obtenu (test avec 3Xor) LogistiqueVerhulst = 14 ' Aucun résultat obtenu (test avec 3Xor) Signe = 15 ' ToDo : à tester End Enum ''' <summary> ''' An activation function expressed from its direct function, e.g. f'(x)=g(f(x)), ''' can be optimized ''' </summary> Public Enum enumActivationFunctionOptimized Sigmoid = 1 HyperbolicTangent = 2 ''' <summary> ''' Exponential Linear Units (ELU) ''' </summary> ELU = 3 End Enum ''' <summary> ''' Activation function type ''' </summary> Public Enum enumActivationFunctionType ''' <summary> ''' Normal (can use general activation function) ''' (ex.: MLPClassic, MLPMatrix, MLPMatrixVec) ''' </summary> Normal = 1 ''' <summary> ''' Specific code (use only specific code for general activation function) (ex.: ?) ''' </summary> SpecificCode = 2 ''' <summary> ''' Specific code and optimized (use only specific code with only optimized activation function) ''' (ex.: MLPTensor, MLPRProp) ''' </summary> SpecificCodeOptimized = 3 ''' <summary> ''' Both normal and specific code (can use general or specific code for activation function, if defined) ''' (ex.: MLPOOP) ''' </summary> BothNormalAndSpecificCode = 4 ''' <summary> ''' Optimized: can use only activation function expressed from its direct function, ''' e.g. f'(x)=g(f(x)) (ex.: ?) ''' </summary> Optimized = 5 ''' <summary> ''' Library (can use general activation function proposed in the library) ''' (ex.: MLPEncog, MLPNeuralNet) ''' </summary> Library = 6 ''' <summary> ''' Library (can use only optimized activation function proposed in the library) ''' (ex.: MLPAccord, MLPKeras, MLPTensorFlow, MLPBrightWire) ''' </summary> LibraryOptimized = 7 End Enum End Module Namespace MLP.ActivationFunction Public Interface IActivationFunction ''' <summary> ''' Is non linear function? ''' </summary> Function IsNonLinear() As Boolean ''' <summary> ''' Activation function ''' </summary> Function Activation#(x#, Optional gain# = 1, Optional center# = 0) ''' <summary> ''' Derivative function ''' </summary> Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) ''' <summary> ''' Does the derivative f'(x) depend on the original function f(x)? ''' i.e. f'(x)=g(f(x)) ''' </summary> Function DoesDerivativeDependOnOriginalFunction() As Boolean ''' <summary> ''' Derivative computed from the direct function, when possible: f'(x)=g(f(x)) ''' </summary> Function DerivativeFromOriginalFunction#(x#, gain#) End Interface ''' <summary> ''' Identity Function : Always returns the same value that was used as its argument ''' f(x) = alpha.x ''' </summary> Public Class IdentityFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return True End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Return xc * gain End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Return gain End Function Public Function DerivativeFromOriginalFunction#(x#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction Return gain End Function End Class ''' <summary> ''' Implements f(x) = Sigmoid ''' </summary> Public Class SigmoidFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return True End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Return CommonActivation(x, gain, center) End Function Private Shared Function CommonActivation#(x#, gain#, center#) 'Const expMax As Boolean = False Dim xc# = x - center Dim xg# = -gain * xc Dim y# ' To avoid arithmetic overflow If xg > clsMLPGeneric.expMax Then y = 0 '1 'If expMax Then y = clsMLPGeneric.expMax ElseIf xg < -clsMLPGeneric.expMax Then y = 1 '0 'If expMax Then y = -clsMLPGeneric.expMax Else y = 1 / (1 + Math.Exp(xg)) End If If debugActivationFunction Then Dim sig# = 1 / (1 + Math.Exp(xg)) If Not clsMLPHelper.Compare(y, sig, dec:=5) Then Stop End If Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Return CommonDerivative(x, gain, center) End Function Public Shared Function CommonDerivative#(x#, gain#, center#) Dim xc# = x - center Dim fx# = CommonActivation(x, gain, center) Dim y# = gain * fx * (1 - fx) ' 31/07/2020 'If gain = 1 Then ' Dim fx# = CommonActivation(x, gain, center) ' y = fx * (1 - fx) 'Else ' Dim c# = -gain ' Dim exp# = Math.Exp(c * xc) ' Dim expP1# = 1 + exp ' y = -c * exp / (expP1 * expP1) 'End If ' https://www.wolframalpha.com/input/?i=sigmoid+(alpha+*+x)+derivate If debugActivationFunction Then Dim cosH# = Math.Cosh(gain * xc) Dim y2# = gain / ((2 * cosH) + 2) If Not clsMLPHelper.Compare(y, y2, dec:=5) Then Stop Dim c# = -gain Dim exp# = Math.Exp(c * xc) Dim expP1# = 1 + exp Dim y3 = -c * exp / (expP1 * expP1) If Not clsMLPHelper.Compare(y, y3, dec:=5) Then Stop End If Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction 'If gain <> 1 Then Return 0 'Dim y# = CommonDerivativeFromOriginalFunction(fx) Dim y# = gain * CommonDerivativeFromOriginalFunction(fx) ' 31/07/2020 Return y End Function Public Shared Function CommonDerivativeFromOriginalFunction#(fx#) Dim y# = fx * (1 - fx) Return y End Function End Class ''' <summary> ''' Implements f(x) = Hyperbolic Tangent (Bipolar Sigmoid) ''' </summary> Public Class HyperbolicTangentFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return True End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation 'Const expMax As Boolean = True Dim xc# = x - center 'Dim xg# = -2 * gain * xc Dim xg# = -gain * xc ' 31/07/2020 Dim y# If xg > clsMLPGeneric.expMax20 Then y = -1 ' 0 'If expMax Then y = -1 'clsMLPGeneric.expMax ElseIf xg < -clsMLPGeneric.expMax20 Then y = 1 'If expMax Then y = 1 '-clsMLPGeneric.expMax Else y = 2 / (1 + Math.Exp(xg)) - 1 ' = Math.Tanh(-xg / 2) 'y = Math.Tanh(-xg / 2) End If ' https://www.wolframalpha.com/input/?i=HyperbolicTangent If debugActivationFunction Then Dim th# = Math.Tanh(-xg / 2) If Not clsMLPHelper.Compare(y, th, dec:=5) Then Stop Dim e2x# = Math.Exp(-xg) Dim th2# = (e2x - 1) / (e2x + 1) If Not clsMLPHelper.Compare(y, th2, dec:=5) Then Stop End If Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Dim xc# = x - center Dim fx# = Activation(x, gain, center) Dim y# = gain * (1 - fx * fx) / 2 ' 31/07/2020 'Dim y# 'If gain = 1 Then ' Dim fx# = Activation(x, gain, center) ' y = 1 - fx * fx 'Else ' Dim xg# = -2 * gain ' Dim exp# = Math.Exp(xg * xc) ' Dim expP1# = 1 + exp ' y = -2 * xg * exp / (expP1 * expP1) 'End If ' https://www.wolframalpha.com/input/?i=2+%2F+%281+%2B+Exp%28alpha+x%29%29+-+1+derivative If debugActivationFunction Then 'Dim xg# = -2 * gain 'Dim xg# = -gain ' 31/07/2020 Dim xg# = gain ' 02/10/2020 Dim exp# = Math.Exp(xg * xc) Dim expP1# = 1 + exp 'Dim y2 = -2 * xg * exp / (expP1 * expP1) Dim y2 = 2 * xg * exp / (expP1 * expP1) ' 19/09/2020 If Not clsMLPHelper.Compare(y, y2, dec:=5) Then Stop End If Return y End Function Public Function DerivativeFromOriginalFunction#(x#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction 'If gain <> 1 Then Return 0 'Dim y# = 1 - x * x Dim y# = gain * (1 - x * x) / 2 ' 02/10/2020 'If debugActivationFunction Then ' Dim y2# = (1 - x) * (1 + x) ' If Not clsMLPHelper.Compare(y, y2, dec:=5) Then Stop 'End If Return y End Function End Class ''' <summary> ''' Implements f(x) = Gaussian ''' </summary> Public Class GaussianFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Const expMax As Boolean = False Dim xc# = x - center Dim xg# = -gain * xc * xc Dim y# If xg > clsMLPGeneric.expMax Then y = 1 If expMax Then y = clsMLPGeneric.expMax ElseIf xg < -clsMLPGeneric.expMax Then y = 0 If expMax Then y = -clsMLPGeneric.expMax Else ' https://www.wolframalpha.com/input/?i=exp+%28-alpha+x2%29 y = Math.Exp(xg) End If Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Dim xc# = x - center 'Dim c# = -gain * gain 'Dim exp# = Math.Exp(c * xc * xc) 'Dim y# = 2 * c * xc * exp Dim y# = -2 * gain * xc * Math.Exp(-gain * xc * xc) ' 16/10/2020 Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction Return 0 End Function End Class ''' <summary> ''' Implements f(x) = Arc tangent (Atan or tan^-1: inverse of tangent function) ''' </summary> Public Class ArcTangentFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Dim xg# = gain * xc Dim y# = gain * Math.Atan(xg) Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Dim xc# = x - center ' https://www.wolframalpha.com/input/?i=arctan(alpha+*+x)+derivative Dim y# = gain / (1 + gain * gain * xc * xc) Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction Return 0 End Function End Class ''' <summary> ''' Implements f(x) = sin(alpha.x) ''' </summary> Public Class SinusFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center ' https://www.wolframalpha.com/input/?i=sin+%28alpha+x%29 Dim y# = gain * Math.Sin(xc) Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Dim xc# = x - center 'Dim y# = gain * Math.Cos(xc) Dim y# = gain * Math.Cos(gain * xc) ' 17/10/2020 Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction Return 0 End Function End Class ''' <summary> ''' Implements f(x) = ELU(x) : Exponential Linear Units ''' </summary> Public Class ELUFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return True End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Dim y# If xc >= 0 Then y = xc Else y = gain * (Math.Exp(xc) - 1) End If Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative ' If gain < 0 the derivate is undefined If gain < 0 Then Return 0 Dim xc# = x - center Dim y# If xc >= 0 Then y = 1 Else ' Function: alpha(exp(x)-1) ' https://www.wolframalpha.com/input/?i=alpha(exp(x)-1) ' Derivate: alpha . exp(x) = f(x) + alpha = alpha(exp(x)-1) + alpha ' https://www.wolframalpha.com/input/?i=alpha(exp(x)-1)+derivate Dim fx# = Activation(x, gain, center) y = fx + gain End If Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction ' If gain < 0 the derivate is undefined If gain < 0 Then Return 0 Dim y# If fx >= 0 Then y = 1 Else y = fx + gain End If Return y End Function End Class ''' <summary> ''' Implements f(x) = ReLU(x) : Rectified Linear Unit ''' </summary> Public Class ReLuFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return True End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction 'If useAlternateDerivativeFunction Then Return True Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Dim y# = Math.Max(xc * gain, 0) Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Dim xc# = x - center If xc >= 0 Then Return gain Return 0 End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction ' ReLUFunction 'If useAlternateDerivativeFunction Then _ ' Return SigmoidFunction.CommonDerivativeFromOriginalFunction(fx) Return 0 End Function End Class ''' <summary> ''' Implements f(x) = ReLU(x) : Rectified Linear Unit (ReLU) with sigmoid for derivate ''' </summary> Public Class ReLuSigmoidFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return True 'Return False ' Linear using sigmoid? End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction 'If useAlternateDerivativeFunction Then Return True Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Dim y# = Math.Max(xc * gain, 0) Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative Return SigmoidFunction.CommonDerivative(x, gain, center) End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction Return SigmoidFunction.CommonDerivativeFromOriginalFunction(fx) End Function End Class ''' <summary> ''' f(x) = Double-threshold(x) ''' </summary> Public Class DoubleThresholdFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return True End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction 'If useAlternateDerivativeFunction Then Return True Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center Dim reducedGain# = gain / 8 Dim x2# If reducedGain = 0 Then x2 = 0.5 Else x2 = (xc + 0.5 / reducedGain) * reducedGain End If Dim y# If x2 < 0.33 Then y = 0 ElseIf x2 > 0.66 Then y = 1 Else y = x2 / 0.33 - 1 End If Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative If useAlternateDerivativeFunction Then _ Return SigmoidFunction.CommonDerivative(x, gain, center) ' 24/04/2021 Dim xc# = ((x - center) * gain + 4) / 8 Dim y# If xc < 0.33 Then y = 0 ElseIf xc > 0.66 Then y = 0 Else y = 0.5 * gain End If Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction If useAlternateDerivativeFunction Then _ Return SigmoidFunction.CommonDerivativeFromOriginalFunction(fx) Return 0 End Function End Class ''' <summary> ''' Implements f(x) = Mish(x) = x * tanh(log(1+exp(x))) = x ((1+ exp(x))^2 -1)/((1+ exp(x))^2 +1) ''' https://github.com/Sergio0694/NeuralNetwork.NET/issues/93 ''' </summary> Public Class MishFunction : Implements IActivationFunction Public Function IsNonLinear() As Boolean Implements IActivationFunction.IsNonLinear Return False End Function Function DoesDerivativeDependOnOriginalFunction() As Boolean Implements IActivationFunction.DoesDerivativeDependOnOriginalFunction Return False End Function Public Function Activation#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Activation Dim xc# = x - center 'Dim y# = xc * Tanh(Softplus(xc)) Dim y# = xc * Math.Tanh(Softplus(xc)) Return y End Function Public Function Derivative#(x#, Optional gain# = 1, Optional center# = 0) Implements IActivationFunction.Derivative ' https://www.wolframalpha.com/input/?i=x+*+tanh%28log%281%2Bexp%28x%29%29%29+derivate Dim xc# = x - center Dim s# = 2 * Math.Exp(xc) + Math.Exp(2 * xc) + 2 Dim w# = 4 * (xc + 1) + (4 * (Math.Exp(2 * xc))) + Math.Exp(3 * xc) + Math.Exp(xc) * (4 * xc + 6) Dim y# = Math.Exp(xc) * w / (s * s) Return y End Function Public Function DerivativeFromOriginalFunction#(fx#, gain#) Implements IActivationFunction.DerivativeFromOriginalFunction If useAlternateDerivativeFunction Then _ Return SigmoidFunction.CommonDerivativeFromOriginalFunction(fx) Return 0 End Function Private Function Softplus#(x#) Dim exp# = Math.Exp(x) Dim sum# = 1 + exp Dim ln# = Math.Log(sum) Return ln End Function 'Private Function Tanh#(x#) ' Dim e2x# = Math.Exp(2 * x) ' Return (e2x - 1) / (e2x + 1) 'End Function End Class End Namespace modMLPHelper.vb Imports System.Runtime.CompilerServices ' Extension Public Module modMLPHelper Public Const format2Dec$ = "0.00" Public Const format4Dec$ = "0.0000" Public Const format6Dec$ = "0.000000" Public Function removeNegativeSignFromZero$(format$) If format.Contains(";") Then Return format Else ' Replace "-0.0" by "0.0" (the sign - is not relevant here) 'If dec = "0" Then dec = "0;-0;0" 'If dec = "0.0" Then dec = "0.0;-0.0;0.0" 'If dec = "0.00" Then dec = "0.00;-0.00;0.00" Dim formatWithoutNegSign$ = format & ";-" & format & ";" & format Return formatWithoutNegSign End If End Function <Extension()> Public Function ReplaceCommaByDot$(text$) Return text.Replace(",", ".") End Function Public Function isConsoleApp() As Boolean ' https://www.codeproject.com/Questions/865642/Csharp-equivalent-for-sharpIf-TARGET-equals-winexe #If TARGET = "winexe" Then ' Insert code to be compiled for a Windows application. Return False #ElseIf TARGET = "exe" Then ' Insert code to be compiled for a console application. return True #End If ' Ok, but show "'System.IO.IOException' in mscorlib.dll" message exception ' in WinForm app in the Debug windows of Visual Studio 'Try ' Return Console.WindowHeight > 0 'Catch 'ex As Exception ' Return False 'End Try ' Does not work: 'Dim isReallyAConsoleWindow = Console.Read() <> -1 'Return isReallyAConsoleWindow ' .NET Core: 'System.Reflection.PortableExecutable.IsConsoleApplication 'Dim is_console_app = Not Console.OpenStandardInput(1) = System.IO.Stream.Null 'Dim is_console_app = Not Console.In = System.IO.Stream.Null End Function End Module modMLPTest.vb Imports Perceptron.Utility ' Matrix Imports Perceptron.clsMLPGeneric ' enumLearningMode Public Module modMLPTest #Region "XOR data set" Public ReadOnly m_neuronCountXOR%() = {2, 2, 1} Public ReadOnly m_neuronCountXOR231%() = {2, 3, 1} ' With bias Public ReadOnly m_neuronCountXOR241%() = {2, 4, 1} Public ReadOnly m_neuronCountXOR261%() = {2, 6, 1} ' TensorFlow minimal size Public ReadOnly m_neuronCountXOR271%() = {2, 7, 1} ' Keras minimal size Public ReadOnly m_neuronCountXOR291%() = {2, 9, 1} ' Keras minimal size for tanh Public ReadOnly m_neuronCountXOR2_10_1%() = {2, 10, 1} Public ReadOnly m_neuronCountXOR2_16_1%() = {2, 16, 1} Public ReadOnly m_neuronCount2XOR%() = {4, 4, 2} Public ReadOnly m_neuronCount2XOR4Layers%() = {4, 4, 4, 2} Public ReadOnly m_neuronCount2XOR5Layers%() = {4, 4, 4, 4, 2} Public ReadOnly m_neuronCount2XOR452%() = {4, 5, 2} Public ReadOnly m_neuronCount2XOR462%() = {4, 6, 2} ' TensorFlow minimal size Public ReadOnly m_neuronCount2XOR472%() = {4, 7, 2} Public ReadOnly m_neuronCount2XOR482%() = {4, 8, 2} Public ReadOnly m_neuronCount2XOR4_10_2%() = {4, 10, 2} Public ReadOnly m_neuronCount2XOR4_32_2%() = {4, 32, 2} ' Keras minimal size: stable! Public ReadOnly m_neuronCount3XOR%() = {6, 6, 3} Public ReadOnly m_neuronCount3XOR4Layers%() = {6, 6, 6, 3} Public ReadOnly m_neuronCount3XOR5Layers%() = {6, 6, 6, 6, 3} Public ReadOnly m_neuronCount3XOR673%() = {6, 7, 3} Public ReadOnly m_neuronCount3XOR683%() = {6, 8, 3} Public ReadOnly m_neuronCount3XOR6_10_3%() = {6, 10, 3} ' Keras minimal size: stable! Public ReadOnly m_neuronCount3XOR6_32_3%() = {6, 32, 3} ' Keras stable size for tanh Public ReadOnly m_neuronCountXOR4Layers%() = {2, 2, 2, 1} Public ReadOnly m_neuronCountXOR4Layers2331%() = {2, 3, 3, 1} Public ReadOnly m_neuronCountXOR4Layers2661%() = {2, 6, 6, 1} ' Keras minimal size Public ReadOnly m_neuronCountXOR5Layers%() = {2, 2, 2, 2, 1} Public ReadOnly m_neuronCountXOR5Layers23331%() = {2, 3, 3, 3, 1} Public ReadOnly m_neuronCountXOR5Layers27771%() = {2, 7, 7, 7, 1} ' Keras minimal size Public Sub InitXOR(mlp As clsMLPGeneric) mlp.Initialize(learningRate:=0.01!) mlp.inputArray = m_inputArrayXOR mlp.targetArray = m_targetArrayXOR End Sub Public Sub Init2XOR(mlp As clsMLPGeneric) mlp.Initialize(learningRate:=0.01!) mlp.inputArray = m_inputArray2XOR mlp.targetArray = m_targetArray2XOR End Sub Public Sub Init3XOR(mlp As clsMLPGeneric) mlp.Initialize(learningRate:=0.01!) mlp.inputArray = m_inputArray3XOR mlp.targetArray = m_targetArray3XOR End Sub #End Region #Region "Iris flower data set" Public ReadOnly m_neuronCountIrisFlowerAnalog%() = {4, 16, 16, 1} Public ReadOnly m_neuronCountIrisFlowerAnalog451%() = {4, 5, 1} Public ReadOnly m_neuronCountIrisFlowerAnalog4_20_1%() = {4, 20, 1} Public ReadOnly m_neuronCountIrisFlowerAnalog4991%() = {4, 9, 9, 1} Public ReadOnly m_neuronCountIrisFlowerLogical%() = {4, 16, 16, 3} Public ReadOnly m_neuronCountIrisFlowerLogical443%() = {4, 4, 3} Public ReadOnly m_neuronCountIrisFlowerLogical453%() = {4, 5, 3} Public ReadOnly m_neuronCountIrisFlowerLogical463%() = {4, 6, 3} Public ReadOnly m_neuronCountIrisFlowerLogical4663%() = {4, 6, 6, 3} Public ReadOnly m_neuronCountIrisFlowerLogical4773%() = {4, 7, 7, 3} Public ReadOnly m_neuronCountIrisFlowerLogical4883%() = {4, 8, 8, 3} Public ReadOnly m_neuronCountIrisFlowerLogical4_16_83%() = {4, 16, 8, 3} Public ReadOnly m_neuronCountIrisFlowerLogical4_20_3%() = {4, 20, 3} Public Sub InitIrisFlowerAnalog4Layers(mlp As clsMLPGeneric) mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerAnalogTrain mlp.InitializeStruct(m_neuronCountIrisFlowerAnalog4991, addBiasColumn:=True) End Sub Public Sub InitIrisFlowerLogical(mlp As clsMLPGeneric) 'mlp.inputArray = m_inputArrayIrisFlower 'mlp.targetArray = m_targetArrayIrisFlowerLogical mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerLogicalTrain mlp.InitializeStruct(m_neuronCountIrisFlowerLogical443, addBiasColumn:=True) End Sub Public Sub InitIrisFlowerLogical4Layers(mlp As clsMLPGeneric) mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerLogicalTrain mlp.InitializeStruct(m_neuronCountIrisFlowerLogical4_16_83, addBiasColumn:=True) End Sub #End Region #Region "Sunspot data set" Public Sub InitSunspot1(mlp As clsMLPGeneric) mlp.seriesArray = m_sunspotArray mlp.windowsSize = 7 mlp.nbLinesToLearn = 48 mlp.nbLinesToPredict = 10 mlp.InitializeStruct({7, 20, 1}, addBiasColumn:=True) End Sub Public Sub InitSunspot2(mlp As clsMLPGeneric) mlp.seriesArray = m_sunspotArray mlp.windowsSize = 3 mlp.nbLinesToLearn = 95 mlp.nbLinesToPredict = 100 mlp.InitializeStruct({3, 20, 1}, addBiasColumn:=True) End Sub #End Region #Region "Vectorized tests" #End Region #Region "Iris flower standard tests" Public Sub MLPGenericIrisFlowerTest(mlp As clsMLPGeneric, testName$, Optional nbIterations% = 2000, Optional threeLayers As Boolean = False, Optional addBiasColumn As Boolean = True, Optional nbHiddenLayersFromInput As Boolean = False, Optional sigmoid As Boolean = False, Optional minValue! = -0.5, Optional maxValue! = 0.5, Optional gain! = 2, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) mlp.ShowMessage(testName) mlp.nbIterations = nbIterations mlp.Initialize(learningRate:=0.1!, weightAdjustment:=0.1!) mlp.minimalSuccessTreshold = 0.3 mlp.printOutput_ = True mlp.printOutputMatrix = False 'mlp.classificationObjective = True If threeLayers Then mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerLogicalTrain mlp.InitializeStruct(m_neuronCountIrisFlowerLogical4_20_3, addBiasColumn) ElseIf nbHiddenLayersFromInput Then mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerLogicalTrain ' clsMLPTensor: Set activation function before InitializeStruct mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.Sigmoid, gain) mlp.InitializeStruct({4, 4, 4, 3}, addBiasColumn) Else InitIrisFlowerLogical4Layers(mlp) End If If sigmoid Then mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain) Else mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain) End If 'mlp.Randomize() mlp.Randomize(minValue, maxValue) mlp.PrintParameters() WaitForKeyToStart() mlp.Train(learningMode) mlp.TestAllSamples(m_inputArrayIrisFlowerTest, m_targetArrayIrisFlowerLogicalTest, nbOutputs:=3) mlp.PrintSuccessPrediction() mlp.ShowMessage(testName & ": Done.") End Sub Public Sub MLPGenericIrisFlowerTestAnalog(mlp As clsMLPGeneric, testName$, Optional nbIterations% = 1000, Optional threeLayers As Boolean = False, Optional addBiasColumn As Boolean = True, Optional nbHiddenLayersFromInput As Boolean = False, Optional sigmoid As Boolean = False, Optional minValue! = -0.5, Optional maxValue! = 0.5, Optional gain! = 2, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) mlp.ShowMessage(testName) mlp.nbIterations = nbIterations mlp.Initialize(learningRate:=0.1!, weightAdjustment:=0.1!) mlp.minimalSuccessTreshold = 0.2 mlp.printOutput_ = True mlp.printOutputMatrix = False If threeLayers Then mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerAnalogTrain mlp.InitializeStruct(m_neuronCountIrisFlowerAnalog4_20_1, addBiasColumn) ElseIf nbHiddenLayersFromInput Then mlp.inputArray = m_inputArrayIrisFlowerTrain mlp.targetArray = m_targetArrayIrisFlowerAnalogTrain ' clsMLPTensor: Set activation function before InitializeStruct mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.Sigmoid, gain) mlp.InitializeStruct({4, 4, 4, 1}, addBiasColumn) Else InitIrisFlowerAnalog4Layers(mlp) End If If sigmoid Then mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain) Else mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain) End If 'mlp.Randomize() mlp.Randomize(minValue, maxValue) mlp.PrintParameters() WaitForKeyToStart() mlp.Train(learningMode) mlp.TestAllSamples(m_inputArrayIrisFlowerTest, m_targetArrayIrisFlowerAnalogTest, nbOutputs:=1) mlp.PrintSuccessPrediction() mlp.ShowMessage(testName & ": Done.") End Sub #End Region #Region "Sunspot tests" Public Sub MLPGenericSunspotTest(mlp As clsMLPGeneric, testName$, Optional nbIterations% = 500, Optional addBiasColumn As Boolean = True, Optional nbHiddenLayersFromInput As Boolean = False, Optional sigmoid As Boolean = False, Optional minValue! = -0.5, Optional maxValue! = 0.5, Optional gain! = 1, Optional learningMode As enumLearningMode = enumLearningMode.Defaut) mlp.ShowMessage(testName) mlp.nbIterations = nbIterations mlp.Initialize(learningRate:=0.1!, weightAdjustment:=0.1!) mlp.minimalSuccessTreshold = 0.1 mlp.printOutput_ = True mlp.printOutputMatrix = False mlp.seriesArray = m_sunspotArray mlp.windowsSize = 10 mlp.nbLinesToLearn = 48 mlp.nbLinesToPredict = 10 If nbHiddenLayersFromInput Then ' clsMLPTensor: Set activation function before InitializeStruct mlp.SetActivationFunctionOptimized(enumActivationFunctionOptimized.Sigmoid, gain) mlp.InitializeStruct({10, 10, 1}, addBiasColumn) Else mlp.InitializeStruct({10, 20, 1}, addBiasColumn) End If If sigmoid Then mlp.SetActivationFunction(enumActivationFunction.Sigmoid, gain) Else mlp.SetActivationFunction(enumActivationFunction.HyperbolicTangent, gain) End If mlp.Randomize(minValue, maxValue) mlp.PrintParameters() WaitForKeyToStart() mlp.Train(learningMode) mlp.TestAllSamples(mlp.inputArrayTest, mlp.targetArrayTest, nbOutputs:=1) mlp.PrintSuccessPrediction() mlp.ShowMessage(testName & ": Done.") End Sub #End Region End Module modTests.vb Module modTests Sub Main(args As String()) MLPMenu() End Sub Public Sub MLPMenu() Retry: Console.WriteLine("") Console.WriteLine("") Console.WriteLine("MLP Test, choose an option from the following list:") Console.WriteLine("0: Exit") Console.WriteLine("a: Accord MLP") Console.WriteLine("b: BrightWire MLP") Console.WriteLine("c: Classical MLP") Console.WriteLine("e: Encog MLP") Console.WriteLine("f: TensorFlow.NET MLP") Console.WriteLine("k: Keras.NET MLP") Console.WriteLine("m: Matrix MLP") Console.WriteLine("n: NeuralNet.NET MLP") Console.WriteLine("o: Object-oriented programming MLP") Console.WriteLine("r: Resilient Propagation MLP") Console.WriteLine("t: Tensor MLP") Console.WriteLine("v: Vectorized Matrix MLP") Dim k = Console.ReadKey Select Case k.KeyChar Case "0"c : Exit Sub Case "a"c : ApplicationMenu(k.KeyChar) Case "b"c : ApplicationMenu(k.KeyChar) Case "c"c : ApplicationMenu(k.KeyChar) Case "e"c : ApplicationMenu(k.KeyChar) Case "f"c : ApplicationMenu(k.KeyChar) Case "k"c : ApplicationMenu(k.KeyChar) Case "m"c : ApplicationMenu(k.KeyChar) Case "n"c : ApplicationMenu(k.KeyChar) Case "o"c : ApplicationMenu(k.KeyChar) Case "r"c : ApplicationMenu(k.KeyChar) Case "t"c : ApplicationMenu(k.KeyChar) Case "v"c : ApplicationMenu(k.KeyChar) End Select GoTo Retry End Sub Private Sub ApplicationMenu(mlpChoice As Char) Retry: Console.WriteLine("") Console.WriteLine("") Console.WriteLine("MLP Test, choose an option from the following list:") Console.WriteLine("0: Exit") Console.WriteLine("1: 1 XOR") Console.WriteLine("2: 2 XOR") Console.WriteLine("3: 3 XOR") Console.WriteLine("4: IRIS (Logical)") Console.WriteLine("5: IRIS (Analog)") Console.WriteLine("6: Sunspot") Dim k = Console.ReadKey Select Case k.KeyChar Case "0"c : Exit Sub Case "1"c : XORTest(mlpChoice, nbXor:=1) Case "2"c : XORTest(mlpChoice, nbXor:=2) Case "3"c : XORTest(mlpChoice, nbXor:=3) Case "4"c : IrisFlowerTestLogical(mlpChoice) Case "5"c : IrisFlowerTestAnalog(mlpChoice) Case "6"c : SunspotTest(mlpChoice) End Select GoTo Retry End Sub Private Sub XORTest(mlpChoice As Char, nbXor%) Console.WriteLine("") Select Case mlpChoice Case "a"c : AccordMLPXorTest(nbXor) Case "b"c : BrightWireMLPXorTest(nbXor) Case "c"c : ClassicMLPXorTest(nbXor) Case "e"c : EncogMLPXorTest(nbXor) Case "f"c : TensorFlowMLPXORTest(nbXor) ' Works only with 1XOR? Case "k"c : KerasMLPXorTest(nbXor) Case "m"c : MatrixMLPXorTest(nbXor) Case "n"c : NeuralNetMLPXorTest(nbXor) Case "o"c : OOPMLPXorTest(nbXor) Case "r"c : RPropMLPXorTest(nbXor) Case "t"c : TensorMLPXorTest(nbXor) Case "v"c : VectorizedMatrixMLPXorTest(nbXor) End Select NextTest() End Sub Private Sub IrisFlowerTestLogical(mlpChoice As Char) Console.WriteLine("") Select Case mlpChoice Case "a"c MLPGenericIrisFlowerTest(New clsMLPAccord, "Accord MLP Iris flower logical test") Case "b"c MLPGenericIrisFlowerTest(New clsMLPBrightWire, "BrightWire MLP Iris flower logical test", sigmoid:=True, learningMode:=clsMLPGeneric.enumLearningMode.VectorialBatch) Case "c"c ' Works only using sigmoid activation MLPGenericIrisFlowerTest(New clsMLPClassic, "Classic MLP Iris flower logical test", sigmoid:=True) Case "e"c MLPGenericIrisFlowerTest(New clsMLPEncog, "Encog MLP Iris flower logical test") Case "f"c ' No bias, only 3 layers, poor results! (only 50% learning, 90% prediction) MLPGenericIrisFlowerTest(New clsMLPTensorFlow, "TensorFlow.NET MLP Iris flower logical test", nbIterations:=4000, threeLayers:=True, addBiasColumn:=False) Case "k"c ' Works only using sigmoid activation MLPGenericIrisFlowerTest(New clsMLPKeras, "Keras.NET MLP Iris flower logical test", nbIterations:=100, sigmoid:=True) Case "m"c ' Three layers only, poor results! MLPGenericIrisFlowerTest(New clsMPLMatrix, "Matrix MLP Iris flower logical test", nbIterations:=4000, threeLayers:=True) Case "n"c MLPGenericIrisFlowerTest(New clsMLPNeuralNet, "NeuralNet.NET MLP Iris flower logical test", nbIterations:=3000, threeLayers:=True, sigmoid:=False) Case "o"c MLPGenericIrisFlowerTest(New clsMLPOOP, "Object-oriented programming MLP Iris flower logical test") Case "r"c Dim mlpRProp As New clsMLPRProp mlpRProp.classificationObjective = True ' Sometimes 100% prediction MLPGenericIrisFlowerTest(mlpRProp, "RProp MLP Iris flower logical test", nbIterations:=200, threeLayers:=True, minValue:=-10.0!, maxValue:=10.0!) 'MLPGenericIrisFlowerTest(mlpRProp, "RProp MLP Iris flower logical test", ' nbIterations:=200, sigmoid:=True) 'NextTest() 'MLPGenericIrisFlowerTest(New clsMLPRProp, "RProp MLP Iris flower logical test", ' nbIterations:=200, sigmoid:=True) 'NextTest() Case "t"c ' 97.8% prediction, 98.9% learning with 300 iterations ' Nb hidden neurons = nb input neurons, works only using sigmoid activation MLPGenericIrisFlowerTest(New clsMLPTensor, "Tensor MLP Iris flower logical test", nbIterations:=300, nbHiddenLayersFromInput:=True, sigmoid:=True) Case "v"c ' Works only using sigmoid activation MLPGenericIrisFlowerTest(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Iris flower logical test", nbIterations:=1000, sigmoid:=True) End Select NextTest() End Sub Private Sub IrisFlowerTestAnalog(mlpChoice As Char) Console.WriteLine("") Select Case mlpChoice Case "a"c MLPGenericIrisFlowerTestAnalog(New clsMLPAccord, "Accord MLP Iris flower analog test") Case "b"c MLPGenericIrisFlowerTestAnalog(New clsMLPBrightWire, "BrightWire MLP Iris flower analog test", sigmoid:=True, learningMode:=clsMLPGeneric.enumLearningMode.VectorialBatch) Case "c"c MLPGenericIrisFlowerTestAnalog(New clsMLPClassic, "Classic MLP Iris flower analog test") Case "e"c MLPGenericIrisFlowerTestAnalog(New clsMLPEncog, "Encog MLP Iris flower analog test") Case "f"c ' No bias, only 3 layers, no result! MLPGenericIrisFlowerTestAnalog(New clsMLPTensorFlow, "TensorFlow.NET MLP Iris flower analog test", nbIterations:=4000, threeLayers:=True, addBiasColumn:=False) Case "k"c ' Works only using sigmoid activation MLPGenericIrisFlowerTestAnalog(New clsMLPKeras, "Keras.NET MLP Iris flower analog test", nbIterations:=100, sigmoid:=True) Case "m"c ' Three layers only, same results! MLPGenericIrisFlowerTestAnalog(New clsMPLMatrix, "Matrix MLP Iris flower analog test", nbIterations:=4000, threeLayers:=True) Case "n"c MLPGenericIrisFlowerTestAnalog(New clsMLPNeuralNet, "NeuralNet.NET MLP Iris flower analog test", nbIterations:=3000, sigmoid:=False) Case "o"c MLPGenericIrisFlowerTestAnalog(New clsMLPOOP, "Object-oriented programming MLP Iris flower analog test") Case "r"c MLPGenericIrisFlowerTestAnalog(New clsMLPRProp, "RProp MLP Iris flower analog test", sigmoid:=True, nbIterations:=300) ' minValue:=-10, maxValue:=10, gain:=0.5!) Case "t"c ' 93.3% prediction, 95% learning with 300 iterations ' Nb hidden neurons = nb input neurons, works only using sigmoid activation MLPGenericIrisFlowerTestAnalog(New clsMLPTensor, "Tensor MLP Iris flower analog test", nbIterations:=300, nbHiddenLayersFromInput:=True, sigmoid:=True) Case "v"c ' Works only using sigmoid activation, poor results! MLPGenericIrisFlowerTestAnalog(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Iris flower analog test", sigmoid:=True) End Select NextTest() End Sub Public Sub SunspotTest(mlpChoice As Char) Console.WriteLine("") Select Case mlpChoice Case "a"c MLPGenericSunspotTest(New clsMLPAccord, "Accord MLP Sunspot test") Case "b"c MLPGenericSunspotTest(New clsMLPBrightWire, "BrightWire MLP Sunspot test", sigmoid:=True, learningMode:=clsMLPGeneric.enumLearningMode.VectorialBatch) Case "c"c MLPGenericSunspotTest(New clsMLPClassic, "Classic MLP Sunspot test") Case "e"c MLPGenericSunspotTest(New clsMLPEncog, "Encog MLP Sunspot test") Case "f"c ' No bias, poor result! MLPGenericSunspotTest(New clsMLPTensorFlow, "TensorFlow.NET MLP Sunspot test", nbIterations:=4000, addBiasColumn:=False) Case "k"c ' Works only using sigmoid activation MLPGenericSunspotTest(New clsMLPKeras, "Keras.NET MLP Sunspot test", nbIterations:=100, sigmoid:=True) Case "m"c MLPGenericSunspotTest(New clsMPLMatrix, "Matrix MLP Sunspot test") Case "n"c MLPGenericSunspotTest(New clsMLPNeuralNet, "NeuralNet.NET MLP Sunspot test", sigmoid:=True, nbIterations:=3000) Case "o"c MLPGenericSunspotTest(New clsMLPOOP, "Object-oriented programming MLP Sunspot test") Case "r"c MLPGenericSunspotTest(New clsMLPRProp, "RProp MLP Sunspot test", sigmoid:=True, nbIterations:=300) Case "t"c ' Nb hidden neurons = nb input neurons, works only using sigmoid activation MLPGenericSunspotTest(New clsMLPTensor, "Tensor MLP Sunspot test", nbHiddenLayersFromInput:=True, sigmoid:=True) Case "v"c ' Works only using sigmoid activation MLPGenericSunspotTest(New clsVectorizedMatrixMLP, "Vectorized Matrix MLP Sunspot test", sigmoid:=True) End Select NextTest() End Sub Public Sub NextTest() Console.WriteLine("Press a key to continue.") Console.ReadKey() Console.WriteLine() End Sub Public Sub WaitForKeyToContinue(msg$) If Not isConsoleApp() Then Exit Sub Console.WriteLine(msg) Console.ReadKey() End Sub Public Sub WaitForKeyToStart() If Not isConsoleApp() Then Exit Sub Console.WriteLine("Press a key to start.") Console.ReadKey() End Sub Public Sub WaitForKeyToQuit() Console.WriteLine("Press a key to quit.") Console.ReadKey() End Sub End Module