-
Notifications
You must be signed in to change notification settings - Fork 36
/
NN.m
65 lines (64 loc) · 1.65 KB
/
NN.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
% Standard NN (seen in scatter_net_core forward) ported to matlab.
function [f,dfdx] = NN(weights,biases,input)
%Weights and Biases must be cell arrays of Matrices/Vectors
%Input must be a 1D Vector
depth = size(weights);
depth = depth(2);
chain = cell(0);
%FeedForward
layer = weights{1}*input;
dim = size(layer);
dim = dim(1);
dydx = zeros(dim,dim);
z = 0;
layer = layer + biases{1}; %Note standard NN architecture
for i=1:dim
if layer(i) > 0
dydx(i,i) = 1;
else
layer(i)=0;
z = 1;
end
end
%Append dL_1/dx evaluated at W_1*x+b_1 to chain
%If dL_1/dx = I, append 1 instead to make it faster
if z == 0
chain{1} = 1;
else
chain{1} = dydx;
end
for j=2:depth
layer = weights{j}*layer;
dim = size(layer);
dim = dim(1);
layer = layer + biases{j};
if j ~= depth
dydx = zeros(dim,dim);
z = 0;
for i=1:dim
if layer(i) > 0
dydx(i,i) = 1;
else
layer(i) = 0;
z = 1;
end
end
%Append dL_1/dx evaluated at W_i*L_{i-1}+b_i to chain
%If dL_1/dx = I, append 1 instead to make it faster
if z == 0
chain{j} = 1;
else
chain{j} = dydx;
end
end
end
f = layer;
%Finding the gradient
df = chain{1}*weights{1};
for i=2:depth
df = weights{i}*df;
if i~= depth
df = chain{i}*df;
end
end
dfdx = df;