Skip to content

Commit

Permalink
HW1
Browse files Browse the repository at this point in the history
HW1 description and implementation.
  • Loading branch information
glfpesYG committed Mar 19, 2015
1 parent 94f62cc commit 5f4025e
Show file tree
Hide file tree
Showing 9 changed files with 462 additions and 0 deletions.
131 changes: 131 additions & 0 deletions HW1/BP_batch.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@

%%四个参数依次是a1,a2,eta1,eta2
function BP_batch = BP_batch(p1,p2,p3,p4)
data_train = load('two_spiral_train.txt');
data_test = load('two_spiral_test.txt');



%%------------------------STEP1 INITIALIZE-----------------------------
%%hidden层神经元设定为10个
num_of_hidden_neurons = 10;
%%每行的三个元素依次是第一个输入的u和第二个输入的u和偏置值1的权值,一共10行表示有十个hidden层神经元
u1 = rand(num_of_hidden_neurons,3).*2-1;
delta_u1_temp = zeros(num_of_hidden_neurons,3); %%存储temp delta u
%%每行的2个元素依次是第一个输入的v和第二个输入的v,一共10行表示有十个hidden层神经元
v1 = rand(num_of_hidden_neurons,2).*2-1;
delta_v1_temp = zeros(num_of_hidden_neurons,2);

%%11个权值分别是10个hidden层神经元到输出层的u权值以及偏置值1的权值
u2 = rand(num_of_hidden_neurons+1,1).*2-1;
delta_u2_temp = zeros(num_of_hidden_neurons+1,1);
%%10个权值分别是10个hidden层神经元到输出层的v权值
v2 = rand(num_of_hidden_neurons,1).*2-1;
delta_v2_temp = zeros(num_of_hidden_neurons,1);

%%初始化参数
a1 = p1;
a2 = p2;
eta1 = p3;
eta2 = p4;


%%------------------------STEP2 TRAINING-------------------------------
%%Batch training,每一步循环读取一个数据,计算其对权值的修改量
%%循环训练
tic;
t1 = clock;
for i=1:size(data_train,1)
raw_input1 = data_train(i,1);
raw_input2 = data_train(i,2);
raw_output = data_train(i,3);

%%顺序执行
%%存储hidden神经元的实际输入(sigmoid函数的自变量)
hidden_input = zeros(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_input(j,1) = input_of_hidden(raw_input1,raw_input2,u1(j,1),u1(j,2),u1(j,3),v1(j,1),v1(j,2));
end
%%存储hidden神经元的输出,为中间十个神经元的输入的sigmoid函数输出
hidden_output = zeros(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_output(j,1) = logsig(hidden_input(j,1));
end

%%存储output神经元的输入
output_input = input_of_output(u2,v2,hidden_output);
%%存储output神经元的输出 完成feed forward,接下来开始back propagation
output_output = logsig(output_input);

%%back propagation 1: output layer
%%误差
err = raw_output - output_output;
delta_outputlayer = err * exp(-1 * output_input) * output_output^2;
for j=1:num_of_hidden_neurons
delta_u2_temp(j,1) = a1*delta_u2_temp(j,1) + eta1 * delta_outputlayer * hidden_output(j,1)^2;
delta_v2_temp(j,1) = a2*delta_v2_temp(j,1) + eta2 * delta_outputlayer * hidden_output(j,1);
end
%%偏置位的权值
delta_u2_temp(11,1) = a1*delta_u2_temp(11,1) + eta1 * delta_outputlayer * 1;

%%back propagation 2: hidden layer
for j=1:10
input_v = hidden_input(j,1);
output_x = logsig(input_v);
delta_hiddenlayer = exp(-1*input_v)*output_x^2 * delta_outputlayer * (2*u2(j,1)*output_x+v2(j,1));
for k=1:2
delta_u1_temp(j,k) = a1*delta_u1_temp(j,k) + eta1 * delta_hiddenlayer * data_train(i,k)^2;
delta_v1_temp(j,k) = a2*delta_v1_temp(j,k) + eta2 * delta_hiddenlayer * data_train(i,k);
end
delta_u1_temp(j,3) = a1*delta_u1_temp(j,3) + eta1 * delta_hiddenlayer * 1;
end
end

%%将修改量累计的值修改到权值之上,完成训练
for i=1:size(data_train,1)
for j=1:num_of_hidden_neurons
u2(j,1) = u2(j,1) + delta_u2_temp(j,1);
v2(j,1) = v2(j,1) + delta_v2_temp(j,1);
end
u2(11,1) = u2(11,1) + delta_u2_temp(11,1);

for k=1:2
u1(j,k) = u1(j,k) + delta_u1_temp(j,k);
v1(j,k) = v1(j,k) + delta_v1_temp(j,k);
end
u1(j,3) = u1(j,3) + delta_u1_temp(j,3);
end
disp(['batch training程序总运行时间:',num2str(etime(clock,t1))]);

%%--------------------STEP3 TESTING-----------------------------------
error = 0;
data_test_online=data_test;
for i=1:size(data_test,1)
raw_input1 = data_test(i,1);
raw_input2 = data_test(i,2);
raw_output = data_test(i,3);
%%顺序执行
%%存储hidden神经元的实际输入(sigmoid函数的自变量)
hidden_input = rand(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_input(j,1) = input_of_hidden(raw_input1,raw_input2,u1(j,1),u1(j,2),u1(j,3),v1(j,1),v1(j,2));
end
%%存储hidden神经元的输出,为中间十个神经元的输入的sigmoid函数输出
hidden_output = rand(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_output(j,1) = logsig(hidden_input(j,1));
end

%%存储output神经元的输入
output_input = input_of_output(u2,v2,hidden_output);
%%存储output神经元的输出 完成feed forward,接下来开始back propagation
output_output = logsig(output_input);
data_test_online(i,3) = output_output;
%%back propagation 1: output layer
%%误差
err = raw_output - output_output;
error = error+err;
end
error = error/size(data_test,1);
BP_batch = error;
end
127 changes: 127 additions & 0 deletions HW1/BP_online.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@

%%四个参数依次是a1,a2,eta1,eta2
function BP_online = BP_online(p1,p2,p3,p4)
data_train = load('two_spiral_train.txt');
data_test = load('two_spiral_test.txt');



%%------------------------STEP1 INITIALIZE-----------------------------
%%hidden层神经元设定为10个
num_of_hidden_neurons = 10;
%%每行的三个元素依次是第一个输入的u和第二个输入的u和偏置值1的权值,一共10行表示有十个hidden层神经元
u1 = rand(num_of_hidden_neurons,3).*2-1;
delta_u1_temp = zeros(num_of_hidden_neurons,3); %%存储temp delta u
%%每行的2个元素依次是第一个输入的v和第二个输入的v,一共10行表示有十个hidden层神经元
v1 = rand(num_of_hidden_neurons,2).*2-1;
delta_v1_temp = zeros(num_of_hidden_neurons,2);

%%11个权值分别是10个hidden层神经元到输出层的u权值以及偏置值1的权值
u2 = rand(num_of_hidden_neurons+1,1).*2-1;
delta_u2_temp = zeros(num_of_hidden_neurons+1,1);
%%10个权值分别是10个hidden层神经元到输出层的v权值
v2 = rand(num_of_hidden_neurons,1).*2-1;
delta_v2_temp = zeros(num_of_hidden_neurons,1);

%%初始化参数
a1 = p1;
a2 = p2;
eta1 = p3;
eta2 = p4;



%%------------------------STEP2 TRAINING-------------------------------
%%循环训练
t1 = clock;
for i=1:size(data_train,1)
raw_input1 = data_train(i,1);
raw_input2 = data_train(i,2);
raw_output = data_train(i,3);

%%顺序执行
%%存储hidden神经元的实际输入(sigmoid函数的自变量)
hidden_input = zeros(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_input(j,1) = input_of_hidden(raw_input1,raw_input2,u1(j,1),u1(j,2),u1(j,3),v1(j,1),v1(j,2));
end
%%存储hidden神经元的输出,为中间十个神经元的输入的sigmoid函数输出
hidden_output = zeros(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_output(j,1) = logsig(hidden_input(j,1));
end

%%存储output神经元的输入
output_input = input_of_output(u2,v2,hidden_output);
%%存储output神经元的输出 完成feed forward,接下来开始back propagation
output_output = logsig(output_input);

%%back propagation 1: output layer
%%误差
err = raw_output - output_output;

delta_outputlayer = err * exp(-1 * output_input) * output_output^2;
for j=1:num_of_hidden_neurons
delta_u2_temp(j,1) = a1*delta_u2_temp(j,1) + eta1 * delta_outputlayer * hidden_output(j,1)^2;
delta_v2_temp(j,1) = a2*delta_v2_temp(j,1) + eta2 * delta_outputlayer * hidden_output(j,1);
end
%%偏置位的权值
delta_u2_temp(11,1) = a1*delta_u2_temp(11,1) + eta1 * delta_outputlayer * 1;
%%更新权值
for j=1:num_of_hidden_neurons
u2(j,1) = u2(j,1) + delta_u2_temp(j,1);
v2(j,1) = v2(j,1) + delta_v2_temp(j,1);
end
u2(11,1) = u2(11,1) + delta_u2_temp(11,1);

%%back propagation 2: hidden layer
for j=1:10
input_v = hidden_input(j,1);
output_x = logsig(input_v);
delta_hiddenlayer = exp(-1*input_v)*output_x^2 * delta_outputlayer * (2*u2(j,1)*output_x+v2(j,1));
for k=1:2
delta_u1_temp(j,k) = a1*delta_u1_temp(j,k) + eta1 * delta_hiddenlayer * data_train(i,k)^2;
delta_v1_temp(j,k) = a2*delta_v1_temp(j,k) + eta2 * delta_hiddenlayer * data_train(i,k);
end
delta_u1_temp(j,3) = a1*delta_u1_temp(j,3) + eta1 * delta_hiddenlayer * 1;
for k=1:2
u1(j,k) = u1(j,k) + delta_u1_temp(j,k);
v1(j,k) = v1(j,k) + delta_v1_temp(j,k);
end
u1(j,3) = u1(j,3) + delta_u1_temp(j,3);
end
end
disp(['online training程序总运行时间:',num2str(etime(clock,t1))]);

%%--------------------STEP3 TESTING-----------------------------------
error = 0;
data_test_online=data_test;
for i=1:size(data_test,1)
raw_input1 = data_test(i,1);
raw_input2 = data_test(i,2);
raw_output = data_test(i,3);
%%顺序执行
%%存储hidden神经元的实际输入(sigmoid函数的自变量)
hidden_input = rand(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_input(j,1) = input_of_hidden(raw_input1,raw_input2,u1(j,1),u1(j,2),u1(j,3),v1(j,1),v1(j,2));
end
%%存储hidden神经元的输出,为中间十个神经元的输入的sigmoid函数输出
hidden_output = rand(num_of_hidden_neurons,1);
for j=1:num_of_hidden_neurons
hidden_output(j,1) = logsig(hidden_input(j,1));
end

%%存储output神经元的输入
output_input = input_of_output(u2,v2,hidden_output);
%%存储output神经元的输出 完成feed forward,接下来开始back propagation
output_output = logsig(output_input);
data_test_online(i,3) = output_output;
%%back propagation 1: output layer
%%误差
err = raw_output - output_output;
error = error+err;
end
error = error/size(data_test,1);
BP_online = error;
end
Binary file added HW1/HW1 Description.pdf
Binary file not shown.
Binary file added HW1/HW1.pdf
Binary file not shown.
Empty file added HW1/Readme.md
Empty file.
4 changes: 4 additions & 0 deletions HW1/input_of_hidden.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
function input_hidden = input_of_hidden(i1, i2, u1, u2, u3, v1, v2)
input_hidden = u1*i1^2 + u2*i2^2 + v1*i1 + v2*i2 + u3*1;
end

8 changes: 8 additions & 0 deletions HW1/input_of_output.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function out_input = input_of_output(u,v,input)
temp = 0;
for i=1:10
temp = temp + u(i)*input(i)^2 + v(i)*input(i);
end
temp = temp + 1*u(11);
out_input = temp;
end
96 changes: 96 additions & 0 deletions HW1/two_spiral_test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
-0.0978 0.5476 1
0.0978 -0.5476 0
-0.2255 0.5762 1
0.2255 -0.5762 0
-0.3673 0.5738 1
0.3673 -0.5738 0
-0.5155 0.5361 1
0.5155 -0.5361 0
-0.6614 0.461 1
0.6614 -0.461 0
-0.7959 0.3482 1
0.7959 -0.3482 0
-0.9096 0.1996 1
0.9096 -0.1996 0
-0.9936 0.0195 1
0.9936 -0.0195 0
-1.0398 -0.1857 1
1.0398 0.1857 0
-1.0418 -0.4078 1
1.0418 0.4078 0
-0.9949 -0.6369 1
0.9949 0.6369 0
-0.8966 -0.862 1
0.8966 0.862 0
-0.7469 -1.0717 1
0.7469 1.0717 0
-0.5485 -1.254 1
0.5485 1.254 0
-0.3067 -1.398 1
0.3067 1.398 0
-0.0293 -1.4935 1
0.0293 1.4935 0
0.2736 -1.532 1
-0.2736 1.532 0
0.59 -1.5074 1
-0.59 1.5074 0
0.9064 -1.416 1
-0.9064 1.416 0
1.2086 -1.257 1
-1.2086 1.257 0
1.4818 -1.0328 1
-1.4818 1.0328 0
1.7121 -0.7489 1
-1.7121 0.7489 0
1.8864 -0.4139 1
-1.8864 0.4139 0
1.9934 -0.0391 1
-1.9934 0.0391 0
2.0242 0.3615 1
-2.0242 -0.3615 0
1.973 0.7722 1
-1.973 -0.7722 0
1.8371 1.176 1
-1.8371 -1.176 0
1.6174 1.5551 1
-1.6174 -1.5551 0
1.3187 1.8921 1
-1.3187 -1.8921 0
0.9493 2.1702 1
-0.9493 -2.1702 0
0.521 2.3748 1
-0.521 -2.3748 0
0.049 2.4933 1
-0.049 -2.4933 0
-0.4494 2.5164 1
0.4494 -2.5164 0
-0.9545 2.4386 1
0.9545 -2.4386 0
-1.4456 2.2582 1
1.4456 -2.2582 0
-1.9017 1.9778 1
1.9017 -1.9778 0
-2.3022 1.6046 1
2.3022 -1.6046 0
-2.6283 1.1497 1
2.6283 -1.1497 0
-2.8632 0.6282 1
2.8632 -0.6282 0
-2.9932 0.0588 1
2.9932 -0.0588 0
-3.0086 -0.5373 1
3.0086 0.5373 0
-2.9042 -1.1367 1
2.9042 1.1367 0
-2.6793 -1.7151 1
2.6793 1.7151 0
-2.3383 -2.2482 1
2.3383 2.2482 0
-1.8905 -2.7125 1
1.8905 2.7125 0
-1.35 -3.0864 1
1.35 3.0864 0
-0.7353 -3.3515 1
0.7353 3.3515 0
-0.0686 -3.4931 1
0.0686 3.4931 0
Loading

0 comments on commit 5f4025e

Please sign in to comment.