-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbackprop.m~
87 lines (75 loc) · 2.52 KB
/
backprop.m~
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
function [W,b,a] = backprop(p,t, L, s, learning_rate)
% p = input, t = target, L = number of hidden layers, s = number of neurons
% counting the number of input and make a n array of n inputs
% input_counter = size(p,1);
% outputs = cell(input_counter,1);
% store all matricies
total_layers = (L+1); %hidden layers + output
W = cell(total_layers,1); %weight
b = cell(total_layers,1); %bias
n = cell(total_layers,1); %output before transfer function
a = cell(total_layers,1); %output for neuron
S = cell(total_layers,1); %sensitivity
alpha = learning_rate; %learning_rate
%generate Weights and biases
for m = 1:total_layers
if (m == 1)
%calculate the weight, bias, n for first hidden layer
W{m} = rand(s,length(p))*.1;
b{m} = rand (s,1);
elseif (m == total_layers)
% calculate the weight, bias, n for the last hidden layer
W{m} = rand (3, s)*.1;
b{m} = rand (3, 1);
else
%calculate the weights, and biases between hidden layers
W{m} = rand (s)*.1;
b{m} = rand (s,1);
end
end
%calculate and propagate sensitivites backwards
x = 1;
iterations = 650;
error = zeros(iterations);
while x<iterations
%feedforward
for i = 1:length(t)
for m = 1:total_layers
if (m == 1)
n{m} = W{m}*p(i,:)'+b{m};
else
n{m} = W{m}*a{m-1}+b{m};
end
a{m} = logsig(n{m}); %calculate the output for each layer
%show the last output for all digits in the input
% if x == 999 && m == total_layers
% outputs{i} = a{m};
% celldisp(outputs);
% end
end
% outputs(:,i) = a{total_layers};
%propagate sensitivities backwards
for m = total_layers:-1:1
diff_sig = diag((1-a{m}).*a{m});
if (m == total_layers)
S{m} = -2*diff_sig*(t(:,i)-a{m});
else
S{m} = diff_sig*W{m+1}'*S{m+1};
end
end
% error (x*length(t)+i) = sum((t(:,i)-a{total_layers}).^2)/length(a);
%update weights and biases
for m = 1:total_layers
if (m == 1)
W{m} = W{m}-alpha*S{m}*p(i,:);
else
W{m} = W{m}-alpha*S{m}*a{m-1}';
end
b{m} = b{m}-alpha*S{m};
end
end
x=x+1;
end
error(x) = sum((t(:,i)-a{total_layers}).^2);
plot1 = plot([1:iterations],error);
end