300
printf("Enter file name -->");
scanf("%s",file_name);
ind=access(file_name,0);
}
}
fptr=fopen(file_name,"w");
/* Assigning memory to *net, *z, *delta. */
net=(float *)malloc(Nt*sizeof(float));
y=(float *)malloc(Nt*sizeof(float));
delta=(float *)malloc(Nt*sizeof(float));
printf("\nEnter file - name containing training data -
->");
scanf("%s",file_name2);
fptr2=fopen(file_name2,"r");
if(fptr2==NULL)
{
printf("file %s does not exist. ", file_name);
exit(1);
}
/* Determining the size of the data.*/
M=0; ind=1;
while(1)
{
for(i=0;i<NL[0];i++)
{
if((fscanf(fptr2,"%f ",&xt))==EOF) /*input
data. */
{ ind=0;
break;
}
}
if(ind==0)
break;
for(i=0;i<NL[L-1];i++) /* desired output. */
fscanf(fptr2,"%d ",&xd);
M++;
}
printf("\n# of data points=%d",M);
rewind(fptr2);
/* Assigning memory to *xp, *d */
301
xp=(float *)malloc((M*NL[0])*sizeof(float));
d=(int *)malloc((M*NL[L-1])*sizeof(int));
/* Reading in the data. */
for(i=0; i<M; i++)
{
for(j=0;j<NL[0];j++)
fscanf(fptr2,"%f ",&xp[j*M+i]);
for(j=0;j<NL[L-1];j++)
fscanf(fptr2,"%d ",&d[j*M+i]);
}
fclose(fptr2);
/*Call the Fletcher-Reeves conj. grad. algorithm.*/
clrscr();
gotoxy(1, 1);
printf("Press ESC to exit and save latest update for
weights.");
conj_grad(fun, dfun, w, N, 1.e-3,1.e-3, 10000);
fprintf(fptr, "%d", L);
for( i=0; i<L; i++)
fprintf(fptr , "%d ", NL[i]);
for(i=0; i<N; i++)
fprintf(fptr,"%f ",w[i]);
fprintf(fptr, "%f ", theta);
fclose(fptr);
q=fun(w);
printf("\nError=%f ", q);
printf ( "\n File name used to store weights i s %s" ,
file_name);
printf ( "\n File name for the trai ning data is %s" ,
file_name2);
}
extern float *net, *w, *delta, *y ;
extern int *d;
extern int *NS,*NL;
/* Generating the function. */
float fun(float *w)
{
int i,j,k,m,n,Nt1,Nt2;
float q, error, E;
302
q=0.0;
for(k=0; k<M; k++)
{
for(i=0;i<NL[1];i++) /* From input layer to
first */
{ /* hidden layer. */
net[i]=0.0;
for(j=0;j<NL[0];j++)
net[i]+=w[i+j*NL[1]]*xp[j*M+k];
net[i]+=theta;
E=(float)exp(-(double)net[i]);
y[i]=1.0/(1.0+E);
}
Nt1=NL[1]; Nt2=0;
for(n=2;n<L;n++) /* From layer n-1 to layer n.
*/
{
for(i=0;i<NL[n];i++)
{
m=Nt1+i;
net[m]=0.0;
for(j=0;j<NL[n-1];j++)
net[m]+=w[NS[n-2]+i+j*NL[n]]*y[j+Nt2];
net[m]+=theta;
E=(float)exp(-(double)net[m]);
y[m]=1.0/(1.0+E);
}
Nt1+=NL[n];
Nt2+=NL[n-1];
}
for(i=0;i<NL[L-1];i++) /* Caculating the
error. */
{
error=d[k+i*M]-y[Nt2+i];
q+=error*error;
}
} /*k-loop*/
q/=2 ;
return q;
303
}
extern float *df,*w,*net;
extern *NL,*NL;
#define fd(i) y[i]*(1.0-y[i]) /* Define
derivative. */
void dfun(float *w, float *df, int N)
{
int i,j,k,m,n,Nt1,Nt2,Nt3,ii;
float E,error,sum;
/* Initialize derivative vector. */
for(i=0;i<N;i++)
df[i]=0.0;
/* Start. */
for(k=0;k<M;k++)
{
/* Forward propagation. */
for(i=0;i<NL[1];i++) /* From input layer to first
*/
{ /* hidden layer. */
net[i]=0.0;
for(j=0;j<NL[0];j++)
net[i]+=w[i+j*NL[1]]*xp[j*M+k];
net[i]+=theta;
E=(float)exp(-(double)net[i]);
y[i]=1.0/(1.0+E);
}
Nt1=NL[1]; Nt2=0;
for(n=2;n<L;n++) /*From layer n-1 to layer n. */
{
for(i=0;i<NL[n];i++)
{
m=Nt1+i;
net[m]=0.0;
for(j=0;j<NL[n-1];j++)
net[m]+=w[NS[n-2]+i+j*NL[n]]*y[j+Nt2];
net[m]+=theta;
E=(float)exp(-(double)net[m]);
304
y[m]=1.0/(1.0+E);
}
Nt1+=NL[n];
Nt2+=NL[n-1];
}
Nt1=0;
for(i=1; i<(L-1);i++)
Nt1+=NL[i];
for(i=0; i<NL[L-1]; i++) /* delta's for output
layer. */
{
ii=Nt1+i;
error=d[k+i*M]-y[ii];
delta[ii]=-error*fd(ii);
}
for(m=0;m<(L-2);m++) /* delta's by back
propagation. */
{
Nt2=Nt1-NL[L-2-m];
for(i=0;i<NL[L-2-m];i++)
{
ii=Nt2+i ;
sum=0.0;
for(j=0;j<NL[L-1-m];j++)
sum+=delta[Nt1+j]*w[NS[L-3-m]+j+i*NL[L-1-m]];
delta[ii]=fd(ii)*sum;
}
Nt1=Nt2;
}
for(i=0;i<NL[1];i++)
for(j=0;j<NL[0];j++)
df[i+j*NL[1]]+=delta[i]*xp[k+j*M];
Nt1=NS[0]; Nt2=0;
Nt3=NL[1];
for(m=1;m<(L-1) ;m++)
{
for(i=0;i<NL[m+1];i++)
for(j=0;j<NL[m];j++)
df[Nt1+i+j*NL[m+1]]+=delta[Nt3+i]*y[Nt2+j];
Nt1=NS[m] ;