Step 5:  

clear;
clc;
close all;

 X=[0 0       %input matrix
        0 1
        1 0
        1 1];
 
  D=[0        %correct output matrix
        1
        1
        0];

W1=rand(4,2); %assign random weights to connections between input and hidden layers
W2=rand(1,4); %assign random weights to connections between hidden and output layers

for epoch=1:10000  %start training the network

        clc;  %these additional  lines will just allow you to see the progress of the training in the command window when you run the program
        disp(['Epoch # ' num2str(epoch)]);
        [W1 W2] = BackpropXOR(W1,W2,X,D);
        W1 %print updated values of W1 to screen
        W2 %print updated values of W2 to screen

end; %for epoch