Skip to content

Commit 88f5250

Browse files
authored
Merge pull request #2 from kamalsaleh/master
Fix bug in `(Sigmoid)BinaryCrossEntropyLoss`
2 parents a342ff5 + 084259d commit 88f5250

4 files changed

+30
-4
lines changed

PackageInfo.g

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ SetPackageInfo( rec(
1010

1111
PackageName := "MachineLearningForCAP",
1212
Subtitle := "Exploring categorical machine learning in CAP",
13-
Version := "2024.07-04",
13+
Version := "2024.07-09",
1414
Date := (function ( ) if IsBound( GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE ) then return GAPInfo.SystemEnvironment.GAP_PKG_RELEASE_DATE; else return Concatenation( ~.Version{[ 1 .. 4 ]}, "-", ~.Version{[ 6, 7 ]}, "-01" ); fi; end)( ),
1515
License := "GPL-2.0-or-later",
1616

gap/CategoryOfParametrisedMorphisms.gi

+3-2
Original file line numberDiff line numberDiff line change
@@ -446,7 +446,8 @@ InstallOtherMethod( \.,
446446
end;
447447

448448
elif f in [ "Constant", "Zero", "IdFunc", "Sum", "Mean", "Mul", "Power", "PowerBase", "Relu", "Sigmoid_", "Sigmoid", "Softmax_", "Softmax",
449-
"QuadraticLoss_", "QuadraticLoss", "CrossEntropyLoss_", "CrossEntropyLoss", "SoftmaxCrossEntropyLoss_", "SoftmaxCrossEntropyLoss" ] then
449+
"QuadraticLoss_", "QuadraticLoss", "CrossEntropyLoss_", "CrossEntropyLoss", "SoftmaxCrossEntropyLoss_", "SoftmaxCrossEntropyLoss",
450+
"SigmoidBinaryCrossEntropyLoss_", "SigmoidBinaryCrossEntropyLoss" ] then
450451

451452
return
452453
function ( arg... )
@@ -455,7 +456,7 @@ InstallOtherMethod( \.,
455456

456457
end;
457458

458-
elif f in [ "Sqrt", "Exp", "Log", "Sin", "Cos", "SigmoidBinaryCrossEntropyLoss_", "SigmoidBinaryCrossEntropyLoss" ] then
459+
elif f in [ "Sqrt", "Exp", "Log", "Sin", "Cos" ] then
459460

460461
return C.( f ) / Para;
461462

gap/CategoryOfSkeletalSmoothMaps.gi

+1-1
Original file line numberDiff line numberDiff line change
@@ -1408,7 +1408,7 @@ InstallOtherMethod( \.,
14081408

14091409
return PreCompose( Smooth,
14101410
DirectProductFunctorial( Smooth, [ Smooth.Sigmoid_( 1 ), Smooth.IdFunc( 1 ) ] ),
1411-
Smooth.BinaryCrossEntropyLoss_ );
1411+
Smooth.BinaryCrossEntropyLoss_( n ) );
14121412

14131413
end;
14141414

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
gap> Smooth := SkeletalSmoothMaps;;
2+
gap> Lenses := CategoryOfLenses( Smooth );;
3+
gap> Para := CategoryOfParametrisedMorphisms( Smooth );;
4+
gap> Assert( 0, Para.Softmax_( 3 ) = Para.Softmax( 3 ) );
5+
gap> Assert( 0, Para.Sigmoid_( 3 ) = Para.Sigmoid( 3 ) );
6+
gap> Assert( 0, Para.QuadraticLoss_( 3 ) = Para.QuadraticLoss( 3 ) );
7+
gap> Assert( 0, Para.CrossEntropyLoss_( 3 ) = Para.CrossEntropyLoss( 3 ) );
8+
gap> Assert( 0, Para.SoftmaxCrossEntropyLoss_( 3 ) = Para.SoftmaxCrossEntropyLoss( 3 ) );
9+
gap> Assert( 0, Para.QuadraticLoss_( 3 ) = Para.QuadraticLoss( 3 ) );
10+
gap> Assert( 0, Para.QuadraticLoss_( 3 ) = Para.QuadraticLoss( 3 ) );
11+
gap> Assert( 0, Para.SigmoidBinaryCrossEntropyLoss_( 1 ) = Para.SigmoidBinaryCrossEntropyLoss( 1 ) );
12+
gap> Assert( 0, Para.AffineTransformation_( 3, 4 ) = Para.AffineTransformation( 3, 4 ) );
13+
gap> Eval( Smooth.PolynomialTransformation( 2, 3, 4 ), [ 1 .. 47 ] );
14+
[ 122341573, 479204128, 836066683 ]
15+
gap> EvalJacobianMatrix( Smooth.PolynomialTransformation( 2, 3, 4 ), [ 1 .. 47 ] );
16+
[ [ 4477456, 4574792, 97336, 4674244, 99452, 2116, 4775858, 101614, 2162, 46, 4879681, 103823, 2209, 47, 1, 0, 0, 0, 0, 0,
17+
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2585050, 7818740 ],
18+
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4477456, 4574792, 97336, 4674244, 99452, 2116, 4775858, 101614, 2162, 46,
19+
4879681, 103823, 2209, 47, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17701630, 23262470 ],
20+
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4477456, 4574792, 97336,
21+
4674244, 99452, 2116, 4775858, 101614, 2162, 46, 4879681, 103823, 2209, 47, 1, 32818210, 38706200 ] ]
22+
gap> Assert( 0, Lenses.GradientDescentOptimizer_( )( 3 ) = Lenses.GradientDescentOptimizer( )( 3 ) );
23+
gap> Assert( 0, Lenses.GradientDescentWithMomentumOptimizer_( )( 3 ) = Lenses.GradientDescentWithMomentumOptimizer( )( 3 ) );
24+
gap> Assert( 0, Lenses.AdagradOptimizer_( )( 3 ) = Lenses.AdagradOptimizer( )( 3 ) );
25+
gap> Assert( 0, Lenses.AdamOptimizer_( )( 3 ) = Lenses.AdamOptimizer( )( 3 ) );

0 commit comments

Comments
 (0)