Skip to content

Commit 74716a6

Browse files
authored
Merge pull request cbovar#137 from cbovar/Remove_Unused_Regularization
Remove unused regularization parameters.
2 parents 7f58b36 + 8a1252d commit 74716a6

File tree

7 files changed

+2
-20
lines changed

7 files changed

+2
-20
lines changed

Examples/Classify2DDemo/Program.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ private static void Classify2DDemo()
5252
labels.Add(1);
5353
var n = labels.Count;
5454

55-
var trainer = new SgdTrainer<double>(net) { LearningRate = 0.01, L2Decay = 0.001, BatchSize = n };
55+
var trainer = new SgdTrainer<double>(net) { LearningRate = 0.01, BatchSize = n };
5656

5757
// Training
5858
do

Examples/FluentMnistDemo/Program.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@ private void MnistDemo()
4545
{
4646
LearningRate = 0.01,
4747
BatchSize = 20,
48-
L2Decay = 0.001,
4948
Momentum = 0.9
5049
};
5150

Examples/MinimalExample/Program.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ private static void Main()
3939
// prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
4040
Console.WriteLine("probability that x is class 0: " + prob.Get(0)); // prints e.g. 0.50101
4141

42-
var trainer = new SgdTrainer(net) { LearningRate = 0.01, L2Decay = 0.001 };
42+
var trainer = new SgdTrainer(net) { LearningRate = 0.01 };
4343
trainer.Train(x, BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(1, 1, 10, 1))); // train the network, specifying that x is class zero
4444

4545
var prob2 = net.Forward(x);

Examples/MnistDemo/Program.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@ private void MnistDemo()
4545
{
4646
LearningRate = 0.01,
4747
BatchSize = 20,
48-
L2Decay = 0.001,
4948
Momentum = 0.9
5049
};
5150

src/ConvNetSharp.Core/Layers/ConvLayer.cs

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,6 @@ public class ConvLayer<T> : LayerBase<T>, IDotProductLayer<T> where T : struct,
1313

1414
public ConvLayer(Dictionary<string, object> data) : base(data)
1515
{
16-
this.L1DecayMul = Ops<T>.Zero;
17-
this.L2DecayMul = Ops<T>.One;
18-
1916
this.FilterCount = Convert.ToInt32(data["FilterCount"]);
2017
this.Width = Convert.ToInt32(data["Width"]);
2118
this.Height = Convert.ToInt32(data["Height"]);
@@ -32,9 +29,6 @@ public ConvLayer(Dictionary<string, object> data) : base(data)
3229

3330
public ConvLayer(int width, int height, int filterCount)
3431
{
35-
this.L1DecayMul = Ops<T>.Zero;
36-
this.L2DecayMul = Ops<T>.One;
37-
3832
this.FilterCount = filterCount;
3933
this.Width = width;
4034
this.Height = height;
@@ -54,10 +48,6 @@ public ConvLayer(int width, int height, int filterCount)
5448

5549
public int FilterCount { get; }
5650

57-
public T L1DecayMul { get; set; }
58-
59-
public T L2DecayMul { get; set; }
60-
6151
public int Stride
6252
{
6353
get { return this._stride; }

src/ConvNetSharp.Core/Training/SgdTrainer.cs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,6 @@ public SgdTrainer(INet<T> net) : base(net)
1818
{
1919
}
2020

21-
public T L1Decay { get; set; }
22-
23-
public T L2Decay { get; set; }
24-
2521
public T Momentum { get; set; }
2622

2723
public T LearningRate { get; set; }

src/ConvNetSharp.Performance.Tests/Program.cs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,6 @@ private static void ExecuteNeuralNet(
117117
var trainer = new SgdTrainer(net);
118118
trainer.LearningRate = 0.01;
119119
trainer.Momentum = 0.5;
120-
trainer.L1Decay = 0.01;
121-
trainer.L2Decay = 0.01;
122120
trainer.BatchSize = batchSize;
123121

124122
for (var i = 0; i < iterations; i++)

0 commit comments

Comments
 (0)