Skip to content

Commit 0cc64a1

Browse files
authored
Merge pull request cbovar#133 from cbovar/Fix_Dropout
Fix Dropout exception
2 parents 83f8ca0 + a40b365 commit 0cc64a1

File tree

3 files changed

+49
-1
lines changed

3 files changed

+49
-1
lines changed

src/ConvNetSharp.Core.Tests/ConvNetSharp.Core.Tests.csproj

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
</ItemGroup>
4747
<ItemGroup>
4848
<Compile Include="ConvLayerTests.cs" />
49+
<Compile Include="DropoutLayerTests.cs" />
4950
<Compile Include="FullyConnLayerTests.cs" />
5051
<Compile Include="GradientCheckTools.cs" />
5152
<Compile Include="PoolLayerTests.cs" />
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
using System.Linq;
2+
using ConvNetSharp.Core.Layers;
3+
using ConvNetSharp.Volume;
4+
using ConvNetSharp.Volume.Double;
5+
using Microsoft.VisualStudio.TestTools.UnitTesting;
6+
7+
namespace ConvNetSharp.Core.Tests
8+
{
9+
[TestClass]
10+
public class DropoutLayerTests
11+
{
12+
[TestMethod]
13+
public void Learning()
14+
{
15+
var n = 1000000;
16+
var dropProbability = 0.2;
17+
var layer = new DropoutLayer<double>(dropProbability);
18+
layer.Init(1, 1, n);
19+
20+
var input = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
21+
var result = layer.DoForward(input, true);
22+
23+
var val = result.ToArray().First(o => o != 0.0);
24+
var scalingFactor = 1.0 / (1.0 - dropProbability);
25+
Assert.AreEqual(scalingFactor, val); // Make sure output is scaled during learning
26+
27+
var average = result.ToArray().Average();
28+
var measuredProba = average * dropProbability;
29+
Assert.AreEqual(dropProbability, measuredProba, 0.001); // Make sure dropout really happened
30+
}
31+
32+
[TestMethod]
33+
public void NotLearning()
34+
{
35+
var n = 1000000;
36+
var dropProbability = 0.2;
37+
var layer = new DropoutLayer<double>(dropProbability);
38+
layer.Init(1, 1, n);
39+
40+
var input = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
41+
var result = layer.DoForward(input, false);
42+
43+
var average = result.ToArray().Average();
44+
Assert.AreEqual(1.0, average); // Let everything go through
45+
}
46+
}
47+
}

src/ConvNetSharp.Core/Layers/DropoutLayer.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ public override void Backward(Volume<T> outputGradient)
2424

2525
this.InputActivationGradients.Clear();
2626

27-
this.OutputActivation.DropoutGradient(this.InputActivation, this.OutputActivationGradients, this.InputActivationGradients, this.DropProbability);
27+
this.OutputActivation.DropoutGradient(this.InputActivation, this.OutputActivationGradients, this.DropProbability, this.InputActivationGradients);
2828
}
2929

3030
protected override Volume<T> Forward(Volume<T> input, bool isTraining = false)

0 commit comments

Comments
 (0)