From 5a378ec7d7358da8ed2773b05f19381c6a47fbf0 Mon Sep 17 00:00:00 2001 From: Kamal Saleh Date: Tue, 16 Jul 2024 16:20:26 +0200 Subject: [PATCH 1/3] install Eval method for parametrized morphisms --- gap/CategoryOfParametrisedMorphisms.gi | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gap/CategoryOfParametrisedMorphisms.gi b/gap/CategoryOfParametrisedMorphisms.gi index 9b8ec27..10cbe10 100644 --- a/gap/CategoryOfParametrisedMorphisms.gi +++ b/gap/CategoryOfParametrisedMorphisms.gi @@ -304,6 +304,16 @@ InstallMethod( ReparametriseMorphism, end ); +## +InstallOtherMethod( Eval, + [ IsMorphismInCategoryOfParametrisedMorphisms, IsDenseList ], + + function( f, pair ) + + return Eval( UnderlyingMorphism( f ), Concatenation( pair ) ); + +end ); + ## InstallMethod( NaturalEmbeddingIntoCategoryOfParametrisedMorphisms, [ IsCapCategory, IsCategoryOfParametrisedMorphisms ], From 2507cf52d6a5913926acc3469643add6d518419b Mon Sep 17 00:00:00 2001 From: Kamal Saleh Date: Thu, 18 Jul 2024 10:18:01 +0200 Subject: [PATCH 2/3] no comments are printed while constructing a neural network --- PackageInfo.g | 2 +- gap/NeuralNetworks.gi | 6 ------ tst/functor.tst | 9 +++------ tst/neural-network-1.tst | 3 --- tst/neural-network-2.tst | 5 ----- 5 files changed, 4 insertions(+), 21 deletions(-) diff --git a/PackageInfo.g b/PackageInfo.g index c7ef4b1..e060bfc 100644 --- a/PackageInfo.g +++ b/PackageInfo.g @@ -10,7 +10,7 @@ SetPackageInfo( rec( PackageName := "MachineLearningForCAP", Subtitle := "Exploring categorical machine learning in CAP", -Version := "2024.07-15", +Version := "2024.07-16", Date := (function ( ) if IsBound( GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE ) then return GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE; else return Concatenation( ~.Version{[ 1 .. 4 ]}, "-", ~.Version{[ 6, 7 ]}, "-01" ); fi; end)( ), License := "GPL-2.0-or-later", diff --git a/gap/NeuralNetworks.gi b/gap/NeuralNetworks.gi index 5fb6b2f..3f1b4a4 100644 --- a/gap/NeuralNetworks.gi +++ b/gap/NeuralNetworks.gi @@ -15,8 +15,6 @@ InstallMethod( LogitsMorphismOfNeuralNetwork, N := Length( dims ); - Print( "The total number of layers is ", String( N ), "\n\n" ); - L := [ ]; for i in [ 1 .. N - 1 ] do @@ -25,8 +23,6 @@ InstallMethod( LogitsMorphismOfNeuralNetwork, P_i := UnderlyingObject( l_i ); - Print( "Creating a morphism from layer ", String( i ), " to ", String( i + 1 ), " with ", String( RankOfObject( P_i ) ), " parameters\n" ); - Add( L, l_i ); if i <> N - 1 then @@ -37,8 +33,6 @@ InstallMethod( LogitsMorphismOfNeuralNetwork, od; - Print( "\n" ); - return PreComposeList( Para, L ); end ); diff --git a/tst/functor.tst b/tst/functor.tst index 496347e..cac1106 100644 --- a/tst/functor.tst +++ b/tst/functor.tst @@ -4,13 +4,10 @@ gap> Smooth := SkeletalSmoothMaps;; gap> Lenses := CategoryOfLenses( Smooth );; gap> Para := CategoryOfParametrisedMorphisms( Smooth );; gap> Para_Lenses := CategoryOfParametrisedMorphisms( Lenses );; -gap> f := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );; -The total number of layers is 2 - -Creating a morphism from layer 1 to 2 with 3 parameters +gap> ell := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );; gap> dummy_input := ConvertToExpressions( [ "w1", "w2", "b1", "x1", "x2", "y" ] ); [ w1, w2, b1, x1, x2, y ] -gap> Display( f : dummy_input := dummy_input ); +gap> Display( ell : dummy_input := dummy_input ); ℝ^3 -> ℝ^1 defined by: Parameter Object: @@ -24,7 +21,7 @@ Parametrised Morphism: ‣ (w1 * x1 + w2 * x2 + b1 - y) ^ 2 / 1 gap> R := EmbeddingIntoCategoryOfParametrisedMorphisms( Para, Para_Lenses ); Embedding into category of parametrised morphisms -gap> Rf := ApplyFunctor( R, f ); +gap> Rf := ApplyFunctor( R, ell ); (ℝ^3, ℝ^3) -> (ℝ^1, ℝ^1) defined by: Parameter Object: diff --git a/tst/neural-network-1.tst b/tst/neural-network-1.tst index 584702b..78dfa69 100644 --- a/tst/neural-network-1.tst +++ b/tst/neural-network-1.tst @@ -2,9 +2,6 @@ gap> Smooth := SkeletalSmoothMaps;; gap> Lenses := CategoryOfLenses( Smooth );; gap> Para := CategoryOfParametrisedMorphisms( Smooth );; gap> f := LossMorphismOfNeuralNetwork( Para, 2, [], 1, "IdFunc" );; -The total number of layers is 2 - -Creating a morphism from layer 1 to 2 with 3 parameters gap> optimizer := Lenses.AdamOptimizer( );; gap> training_examples_path := SelectBasedOnCondition( IsExistingFile( "data-1.txt" ), "data-1.txt", "tst/data-1.txt" );; gap> batch_size := 5;; diff --git a/tst/neural-network-2.tst b/tst/neural-network-2.tst index 4af8750..0d64663 100644 --- a/tst/neural-network-2.tst +++ b/tst/neural-network-2.tst @@ -2,11 +2,6 @@ gap> Smooth := SkeletalSmoothMaps;; gap> Lenses := CategoryOfLenses( Smooth );; gap> Para := CategoryOfParametrisedMorphisms( Smooth );; gap> f := LossMorphismOfNeuralNetwork( Para, 2, [ 5, 5 ], 4, "Softmax" );; -The total number of layers is 4 - -Creating a morphism from layer 1 to 2 with 15 parameters -Creating a morphism from layer 2 to 3 with 30 parameters -Creating a morphism from layer 3 to 4 with 24 parameters gap> optimizer := Lenses.GradientDescentOptimizer( : learning_rate := 0.01 );; gap> training_examples_path := SelectBasedOnCondition( IsExistingFile( "data-2.txt" ), "data-2.txt", "tst/data-2.txt" );; gap> batch_size := 1;; From 40a581d41504feaf684de99aa37142e7ad001e61 Mon Sep 17 00:00:00 2001 From: Kamal Saleh Date: Thu, 18 Jul 2024 10:26:50 +0200 Subject: [PATCH 3/3] if the number of epochs is n then display the loss value n + 1 times, representing the loss of the initial weights and the loss of the weights over the course of n updates. --- PackageInfo.g | 2 +- gap/FitParameters.gi | 8 +++++--- tst/neural-network-1.tst | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/PackageInfo.g b/PackageInfo.g index e060bfc..cca34fa 100644 --- a/PackageInfo.g +++ b/PackageInfo.g @@ -10,7 +10,7 @@ SetPackageInfo( rec( PackageName := "MachineLearningForCAP", Subtitle := "Exploring categorical machine learning in CAP", -Version := "2024.07-16", +Version := "2024.07-17", Date := (function ( ) if IsBound( GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE ) then return GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE; else return Concatenation( ~.Version{[ 1 .. 4 ]}, "-", ~.Version{[ 6, 7 ]}, "-01" ); fi; end)( ), License := "GPL-2.0-or-later", diff --git a/gap/FitParameters.gi b/gap/FitParameters.gi index 159cfc0..1a6a4c4 100644 --- a/gap/FitParameters.gi +++ b/gap/FitParameters.gi @@ -139,7 +139,9 @@ InstallMethod( Fit, l_n := Length( String( n ) ); - for i in [ 0 .. n ] do + Print( "Epoch ", JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - 1, " " ), "" ), "0/", String( n ), " - loss = ", String( get( w )[1] ), "\n" ); + + for i in [ 1 .. n ] do str_i := String( i ); @@ -147,12 +149,12 @@ InstallMethod( Fit, spaces := JoinStringsWithSeparator( ListWithIdenticalEntries( l_n - l_i, " " ), "" ); + w := put( w ); + loss := get( w ); Print( "Epoch ", spaces, String( i ), "/", String( n ), " - loss = ", String( loss[1] ), "\n" ); - w := put( w ); - #Display( w ); od; diff --git a/tst/neural-network-1.tst b/tst/neural-network-1.tst index 78dfa69..5390ad9 100644 --- a/tst/neural-network-1.tst +++ b/tst/neural-network-1.tst @@ -60,4 +60,4 @@ Epoch 47/50 - loss = 0.00092586526390837627 Epoch 48/50 - loss = 0.00091733056032563621 Epoch 49/50 - loss = 0.00091003319324738866 Epoch 50/50 - loss = 0.00090379645850528724 -[ 1021, -0.00236067, -0.00633157, 0.000258869, 0.0135747, 0.0500079, 0.0310695, 2.00197, -2.99162, 0.997524 ] +[ 1001, -0.0024867, -0.00684601, 0.000293851, 0.0138455, 0.051012, 0.0316896, 2.00229, -2.99088, 0.997374 ]