% Training a linear layer (general_linear.m).
 
clf;
figure(gcf)
setfsize(400,300);
echo on
clc

%    INITLIN  - Initializes a linear layer.
%    TRAINWH  - Trains a linear layer with Widrow-Hoff rule.
%    SIMULIN  - Simulates a linear layer.

%    TRAINING A LINEAR LAYER:

%    Using the above functions a linear layer is trained
%    to respond to specific inputs with target outputs.

pause % Strike any key to continue...
clc

%    DEFINING A PROBLEM
%    ==================

%    P defines input patterns (column vectors):

load P.dat
P

%    T defines associated targets (column vectors):

load T.dat
T

pause % Strike any key to design the network...
clc
%    DEFINE THE NETWORK
%    ==================

%    INITLIN generates initial weights and biases for our neuron:

[W,b] = initlin(P,T)

W
b

clc
%    TRAINING THE NETWORK
%    ====================

%    TRAINWH uses the Widrow-Hoff rule to train PURELIN networks.

me = 200;                        % Maximum number of epochs.
eg = 0.001;                      % Sum-squared error goal.

%    Training begins...please wait...

[W,b,epochs,errors] = trainwh(W,b,P,T,[NaN me eg NaN]);

%    ...and finishes.

W
b

pause % Strike any key to see a plot of errors...
clc
%    PLOTTING INDIVIDUAL ERRORS FOR EACH VECTOR
%    ==========================================

barerr(T-simulin(P,W,b))
pause % Strike any key to see predictions of model

A = round(simulin(P,W,b))
T
E = T - A

echo off
disp('End of general_linear.m')


This page hosted by Get your own Free Home Page