cout<

ARTIFICIAL NEURAL NETWORKS
[COMP 442)
LAB MANUAL
Department of Computer Science
College of Computer Science & Information Systems
Ministry of Higher Education |Jazan University
1
2
3
4
5
Lab Course
Lab#
1
2
3
4
5
6
Practical
Revision of C++ Programming concepts:
Program to show how to compute random numbers.
Program to compute the sigmoidal function.
Example showing how to compute the summation part of the neuron..
Threshold Function
Simulating the McCulloch & Pitts neurons for 2-input logical OR, and 2-input
logical AND.
Error Correction Learning Rule
Perceptron
Example on representing a line, and classifying points if they are above or
below the line.
7
Classify the perceptron
8
Implement the learning algorithm.
9
Initialize randomly input signals with synaptic weight and compute the
summation part of the neurons and apply MLPs Logistic function.
10
Initialize randomly input signals with synaptic weight and compute the
summation part of the neurons and apply MLPs Hyperbolic tangent function.
6
//Lab #1: Write a program to compute random numbers.
#include<iostream>
#include<cmath>
#include <conio.h>
using namespace std;
void main()
{
int i, x[10], w[10];
cout<<"Display random numbers"<<endl<<endl;
for(i=0;i<10;i++)
{
x[i]= rand() % 5 + 1; //Gives a number between 1 and 5.
w[i]= rand() % 3 + 1; //Gives a number between 1 and 3.
cout<< "x[i] = " << x[i] <<"
w[i] = " << w[i] <<endl;
}
getch();
}
OUTPUT:
7
//Lab # 2: Write a program to compute the sigmoid function.
#include<iostream>
#include<cmath>
#include<conio.h>
using namespace std;
void main()
{
float j;
for(j=-1.0;j<=1.1;j=j+0.1)
{
cout<< "j = " <<j<<"
";
cout<<"Sigmoid Value = " <<1.0/(1+exp(-j)) <<endl;
}
getch();
}
OUTPUT:
8
//Lab # 3: Write a program to compute the summation of the neuron.
#include<iostream>
#include<conio.h>
#include<cmath>
using namespace std;
void main()
{
int i,
x[10], w[10], sum=0;
for(i=0;i<10;i++)
{
x[i]= rand() % 5 + 1; // Initialize the input signal randomly.
w[i]= rand() % 3 + 1;// Initialize the weights randomly.
sum = sum + x[i] * w[i];
}
cout<< "sum = " <<sum <<endl;
getch();
}
OUTPUT:
9
//Lab # 4: Write a program to simulating the McCulloch & Pitts neurons
//for 2-input logical OR, and 2-input logical AND.
//Apply Threshold Function
#include<iostream>
#include<conio.h>
#include<cmath>
using namespace std;
// Define the or function
int or (int i,int j)
{
int net;
int out;
const int w1 = 1;
const int w2 = 1;
const int theta = 0;
net = i * w1 +j * w2 - theta;
// The activation function is threshold (step) function.
if (net >= 0)
out=1;
else
out=0;
return out;
}
// Define the and function
int and (int i,int j)
{
int net;
int out;
const int w1 = 1;
const int w2 = 1;
const int theta = 1;
net = i * w1 +j * w2 - theta;
// The activation function is threshold (step) function.
if(net >= 0)
out=1;
else
out=0;
return out;
}
10
void main()
{
int x1,x2,i;
cout<< "x2
x1
OR(x1,x2) AND(x1,x2) " <<endl;
cout<<"------------------------------------" <<endl;
for(x2 = 0; x2 < 2; x2++)
for(x1 = 0; x1 < 2; x1++)
cout<< x2 <<"
" << x1 <<"
";
cout<< or(x1,x2)<<"
"<< and(x1,x2) <<endl;
getch();
}
OUTPUT:
11
//Lab # 5: Write a program to find error signals
//Initialize randomly input signals with synaptic weights
//Initialize desired value.
//ek(n) = dk(n) – yk(n) [Apply “Error – Correction Learning Rule”]
#include<iostream>
#include<conio.h>
#include<time.h>
using namespace std;
void main()
{
system("color 5b");
int size;
cout<< "Enter number of Input signal : ";
cin>> size;
cout<< "--------------------\n";
double
double
double
double
double
x[100];
w[100];
bk = 1;
dk = 20; //desired value
yk=0; //actual output
srand(time(NULL));
// Random x and w
for(int i=0;i<size;i++)
{
x[i] = rand()% 5 - 5;
w[i] = rand() % 5 -5;
}
// print x and w
for(int i=0;i<size;i++)
{
cout<< "x"<<i + 1 << "=" << x[i] << "\t";
cout<< "w" << i+1 << "=" << w[i] <<endl;
}
cout<< "--------------------\n";
12
//Compute actual output
for(int i=0;i<size;i++)
yk += x[i]*w[i];
yk += bk;
double ek;//Declare error signal
if(yk != dk)
ek = dk - yk; //Apply "Error Correction Learning Rule"
cout<< "The Actual result is (yk): " <<yk <<endl;
cout<< "ek(n) = dk(n) - yk(n)\nek(n) = " <<dk << " - " <<yk;
cout<< "\nek(n) = "<<ek <<endl;
getch();
}
OUTPUT:
13
//Lab # 6: Write a program to representing a line
//Classifying points if they are above or belowthe line.
#include<iostream>
#include<conio.h>
using namespace std;
// Checks if the point (x,y) in 2-D is above or
//below the line: 3 X + 4 Y - 12 = 0.
int aboveline (int x,int y)
{
int net,out;
const int k = 1;
const int w0 = -12;
const int w1 = 3;
const int w2 = 4;
net = x * w1 + y * w2 + k * w0;
OUTPUT:
if (net >= 0)
out=1;
else
out=-1;
return out;
}
void main()
{
int x1,x2,i;
cout<< "Checks if the point (x1,x2) in 2-D is above " <<endl;
cout<< " or below the line: " <<endl;
cout<< " 3 X + 4 Y - 12 = 0. " <<endl;
cout<< "The program outputs: " <<endl;
cout<< " 1 if the point above the line" <<endl;
cout<< " -1 if the point below the line" <<endl;
cout<<"Enter the coordinates of the point: ";
cin>> x1;
cin>> x2;
cout<<endl;
cout<<"Display the results = "<<aboveline(x1,x2);
getch();
}
14
//Lab # 7: Write a program to Classify the perceptron either in class
//C1 or C2 using decision boundary x1w1 + x2w2 + bk = 0
#include<iostream>
#include<conio.h>
using namespace std;
void main()
{
double x[2],w[2];
double bk;//bias
cout<<"Enter the value of bias= ";
cin>>bk;
for(int i=0;i<2;i++)
{
x[i]=rand()%11;
w[i]=rand()%11;
}
for(int i=0;i<2;i++)
{
cout<<"x"<<i+1<<"= "<<x[i]<<endl;
}
for(int i=0;i<2;i++)
{
cout<<"w"<<i+1<<"= "<<w[i]<<endl;
}
double v=0;
for(int i=0;i<2;i++)
{
v=v+w[i]*x[i];
}
v+=bk;
if(v>=0)
{
cout<<"result = "<<v<<endl;
cout<<"the result is within c1";
}
else
{
cout<<"result = "<<v<<endl;
cout<<"the result is within c2";
}
15
cout<<endl;
getch();
}
OUTPUT:
16
//Lab # 8.The perceptron learning algorithm.
//Training a perceptron to represent a line, so that we can classify
//points if they are above or below the line. Provide the function
//misclassified.Implement the learning algorithm.
#include<iostream>
#include<conio.h>
using namespace std;
int misclassified(int inputclass[],int outclass[])
{
int i;
int point=-1;
for(i=0; i< 5; i++)
if (inputclass[i] != outclass[i])
point=i;
return point;
}
void evaluate(int x[5][2],int w[],int outclass[])
{
int i;
int net;
for(i=0;i<5;i++)
{
Net = x[i][0]*w[1]+x[i][1]*w[2]+w[0];
if(net>0)
outclass[i]=1;
else
outclass[i]=-1;
}
}
void main()
{
int p;
int i;
int w[3] = {2, 1, -2}; // theta, w1,w2
// We have five data points in 2-D.
int x[5][2] = {{-1, 2},{0, 2},{3, -2},{0, -3},{-2, -1}};
// Specify the class for each point.
17
int inputclass[5] = {1, 1, 1, -1, -1};
// To be determined by the ANN
int outclass[5];
int pclass;
cout<<"The input classes"<<endl;
for(i=0;i<5;i++)
cout<<"point "<<i<<" "<<inputclass[i]<<endl;
evaluate (x, w, outclass);
while (p=misclassified (inputclass,outclass)!=-1)
{
cout<<" inside the while ...";
pclass= inputclass[p];
w[0]=w[0]+ pclass *1;
w[1]=w[1]+ pclass *x[p][0];
w[2]=w[2] + pclass *x[p][1];
evaluate (x,w,outclass);
}
for(i=0;i<5;i++)
cout<<"point"<<i<<" "<<outclass[i]<<endl;
cout<<misclassified (inputclass,outclass);
system("pause");
getch();
}
OUTPUT:
18
//Lab # 9. Implement Multi-Layer Perceptron
//Initialize randomly input signals with synaptic weights
//Compute the summation of the neurons and apply MLPs Logistic function.
#include<iostream>
#include<conio.h>
#include<cmath>
using namespace std;
OUTPUT:
void main()
{
double
double
double
double
double
x[2];
w[6];
b1=5,b2=10;
y,h1,h2,L,H;
a = 5 , b = 3;
// genarate input signal x
for(int i=0;i<2;i++)
x[i] = rand() % 9 + 1 ;
// genarate synaptic weight w
for(int i=0;i<6;i++)
w[i] = rand() % 9 + 1 ;
// Calculate h1 and h2 (Hidden layer)
h1 = (x[0]*w[0]) + (x[1]*w[2]) + b1;
h2 = (x[0]*w[1]) + (x[1]*w[1]) + b1;
// Find y ==> outout neuron
y = (h1*w[4]) + (h2*w[5]) + b2;
// Logistic Function
L = 1/(1+exp(-y));
// Display
for(int i=0;i<2;i++)
cout << "Input Sinals "<<i+1 << "=" << x[i] << endl;
for(int i=0;i<6;i++)
cout << "w" << i+1 << "=" << w[i] << endl;
cout<<"Actual Output (y) = "<<y<<endl;
cout << "Logistic value (L ) = "<<L<<endl;
getch();
}
19
//Lab # 10. Implement Multi-Layer Perceptron – Forward Signals
//Initialize randomly input signals with synaptic weights
//Compute the summation of the neurons
//Apply MLPs Hyperbolic Tangent Function.
#include<iostream>
#include<conio.h>
#include<cmath>
using namespace std;
void main()
{
double x[2];
double w[6];
double b1=5,b2=10;
double vj,h1,h2,H;
double a = 5 , b = 3;
// genarate input signal x
for(int i=0;i<2;i++)
x[i]=rand()%9+1;
// genarate synaptic weight w
for(int i=0;i<6;i++)
w[i]=rand()%9+1;
// Calculate h1 and h2
h1=(x[0]*w[0]) + (x[1]*w[2])+b1;
h2=(x[0]*w[1]) + (x[1]*w[1])+b1;
// Find y ==> outout neuron
vj=(h1*w[4])+(h2*w[5])+b2;
// Hyperbolic Tangent Function
if(a>0 && b>0)
H=a*tanh(b*vj);
else
H=a*tanh(vj);
// Display
for(int i=0;i<2;i++)
cout<<"x"<<i+1<<"="<<x[i]<<endl;
for(int i=0;i<6;i++)
cout<<"w"<<i+1<<"="<<w[i]<<endl;
cout<<"-------------------------------------------\n";
cout<<"Sum of Neurons (vj) = "<<vj<<endl;
cout<<"H=a * tanh(b*vj(n))"<<endl;
cout<<"H="<<a<<"*tanh("<<b<<"*"<<vj<<")\n";
cout<<"Hyperbolic Tangent Function = "<<H<<endl;
getch();
}
20
OUTPUT:
21