Skip to content

Commit 3e76c19

Browse files
committed
Add LeakyReLU layer.
1 parent 1623bb8 commit 3e76c19

File tree

4 files changed

+78
-0
lines changed

4 files changed

+78
-0
lines changed
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Text;
4+
5+
namespace Tensorflow.Keras.ArgsDefinition
6+
{
7+
public class LeakyReLUArgs : LayerArgs
8+
{
9+
/// <summary>
10+
/// Negative slope coefficient.
11+
/// </summary>
12+
public float Alpha { get; set; } = 0.3f;
13+
}
14+
}

src/TensorFlowNET.Keras/Layers/LayersApi.cs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -315,6 +315,17 @@ public Tensor max_pooling2d(Tensor inputs,
315315
return layer.Apply(inputs);
316316
}
317317

318+
/// <summary>
319+
/// Leaky version of a Rectified Linear Unit.
320+
/// </summary>
321+
/// <param name="alpha">Negative slope coefficient.</param>
322+
/// <returns></returns>
323+
public Layer LeakyReLU(float alpha = 0.3f)
324+
=> new LeakyReLU(new LeakyReLUArgs
325+
{
326+
Alpha = alpha
327+
});
328+
318329
public Layer LSTM(int units,
319330
Activation activation = null,
320331
Activation recurrent_activation = null,
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Text;
4+
using Tensorflow.Keras.ArgsDefinition;
5+
using Tensorflow.Keras.Engine;
6+
using static Tensorflow.Binding;
7+
8+
namespace Tensorflow.Keras.Layers
9+
{
10+
/// <summary>
11+
/// Leaky version of a Rectified Linear Unit.
12+
/// </summary>
13+
public class LeakyReLU : Layer
14+
{
15+
LeakyReLUArgs args;
16+
float alpha => args.Alpha;
17+
public LeakyReLU(LeakyReLUArgs args) : base(args)
18+
{
19+
this.args = args;
20+
}
21+
22+
protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
23+
{
24+
return tf.nn.leaky_relu(inputs, alpha: alpha);
25+
}
26+
}
27+
}
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
using Microsoft.VisualStudio.TestTools.UnitTesting;
2+
using System;
3+
using System.Collections.Generic;
4+
using System.Text;
5+
using NumSharp;
6+
using static Tensorflow.KerasApi;
7+
using Tensorflow;
8+
9+
namespace TensorFlowNET.UnitTest.Keras
10+
{
11+
[TestClass]
12+
public class ActivationTest : EagerModeTestBase
13+
{
14+
[TestMethod]
15+
public void LeakyReLU()
16+
{
17+
var layer = keras.layers.LeakyReLU();
18+
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
19+
var outputArray = output.ToArray<float>();
20+
assertFloat32Equal(-0.9f, outputArray[0], "Not equal");
21+
assertFloat32Equal(-0.3f, outputArray[1], "Not equal");
22+
assertFloat32Equal(0.0f, outputArray[2], "Not equal");
23+
assertFloat32Equal(2.0f, outputArray[3], "Not equal");
24+
}
25+
}
26+
}

0 commit comments

Comments
 (0)