ReLU

TODO implement scale with cudnnScaleTensor rectified linear unit nonlinearity (using cuDNN)

struct ReLU (
T
size_t dim
) {
bool inplace;
bool useCuDNN;
Variable!(T, dim, HostStorage) hx;
Variable!(T, dim, DeviceStorage) dx;
Variable!(T, dim, DeviceStorage) dy;
}

Examples

test relu

1 import grain.testing : gradCheck;
2 
3 foreach (inplace; [true, false]) {
4     foreach (useCuDNN; [true, false]) {
5         auto func = new ReLU!(float, 1);
6         func.inplace = inplace;
7         func.useCuDNN = useCuDNN;
8 
9         // test CPU
10         {
11             auto x = [-1.0f, 1.0f, 0.0f].variable;
12             // fail because of non-smooth function?
13             // gradCheck(func, x, [0.1f, 0.1f, 0.1f].variable);
14 
15             auto y = func.forward(x);
16             assert(x.data == (inplace ? y.data : [-1.0f, 1.0f, 0.0f]));
17             assert(y.data == [0.0f, 1.0f, 0.0f]);
18 
19             auto gy = [1.0f, 2.0f, 3.0f].variable;
20             auto gx = func.backward(gy);
21             assert(gx.data == [0.0f, 2.0f, 3.0f]);
22         }
23 
24         // test CUDA
25         version (grain_cuda) {
26             auto x = [-1.0f, 1.0f, 0.0f].variable;
27             auto xd = x.to!DeviceStorage;
28             auto yd = func.forward(xd);
29             x = xd.to!HostStorage;
30             auto y = yd.to!HostStorage;
31             assert(x.data == (inplace ? y.data : [-1.0f, 1.0f, 0.0f]));
32             assert(y.data == [0.0f, 1.0f, 0.0f]);
33 
34             x = [-1.0f, 1.0f, 0.0f].variable;
35             auto gy = [1.0f, 2.0f, 3.0f].variable;
36             auto gxd = func.backward(gy.to!DeviceStorage);
37             auto gx = gxd.to!HostStorage;
38             assert(gx.data == [0.0, 2.0, 0.0]);
39         }
40     }
41 }

Meta