Giáo trình xử lý ảnh y tế Tập 3 P6 pot

9 243 0
Giáo trình xử lý ảnh y tế Tập 3 P6 pot

Đang tải... (xem toàn văn)

Thông tin tài liệu

300 printf("Enter file name >"); scanf("%s",file_name); ind=access(file_name,0); } } fptr=fopen(file_name,"w"); /* Assigning memory to *net, *z, *delta. */ net=(float *)malloc(Nt*sizeof(float)); y=(float *)malloc(Nt*sizeof(float)); delta=(float *)malloc(Nt*sizeof(float)); printf("\nEnter file - name containing training data - ->"); scanf("%s",file_name2); fptr2=fopen(file_name2,"r"); if(fptr2==NULL) { printf("file %s does not exist. ", file_name); exit(1); } /* Determining the size of the data.*/ M=0; ind=1; while(1) { for(i=0;i<NL[0];i++) { if((fscanf(fptr2,"%f ",&xt))==EOF) /*input data. */ { ind=0; break; } } if(ind==0) break; for(i=0;i<NL[L-1];i++) /* desired output. */ fscanf(fptr2,"%d ",&xd); M++; } printf("\n# of data points=%d",M); rewind(fptr2); /* Assigning memory to *xp, *d */ 301 xp=(float *)malloc((M*NL[0])*sizeof(float)); d=(int *)malloc((M*NL[L-1])*sizeof(int)); /* Reading in the data. */ for(i=0; i<M; i++) { for(j=0;j<NL[0];j++) fscanf(fptr2,"%f ",&xp[j*M+i]); for(j=0;j<NL[L-1];j++) fscanf(fptr2,"%d ",&d[j*M+i]); } fclose(fptr2); /*Call the Fletcher-Reeves conj. grad. algorithm.*/ clrscr(); gotoxy(1, 1); printf("Press ESC to exit and save latest update for weights."); conj_grad(fun, dfun, w, N, 1.e-3,1.e-3, 10000); fprintf(fptr, "%d", L); for( i=0; i<L; i++) fprintf(fptr , "%d ", NL[i]); for(i=0; i<N; i++) fprintf(fptr,"%f ",w[i]); fprintf(fptr, "%f ", theta); fclose(fptr); q=fun(w); printf("\nError=%f ", q); printf ( "\n File name used to store weights i s %s" , file_name); printf ( "\n File name for the trai ning data is %s" , file_name2); } extern float *net, *w, *delta, *y ; extern int *d; extern int *NS,*NL; /* Generating the function. */ float fun(float *w) { int i,j,k,m,n,Nt1,Nt2; float q, error, E; 302 q=0.0; for(k=0; k<M; k++) { for(i=0;i<NL[1];i++) /* From input layer to first */ { /* hidden layer. */ net[i]=0.0; for(j=0;j<NL[0];j++) net[i]+=w[i+j*NL[1]]*xp[j*M+k]; net[i]+=theta; E=(float)exp(-(double)net[i]); y[i]=1.0/(1.0+E); } Nt1=NL[1]; Nt2=0; for(n=2;n<L;n++) /* From layer n-1 to layer n. */ { for(i=0;i<NL[n];i++) { m=Nt1+i; net[m]=0.0; for(j=0;j<NL[n-1];j++) net[m]+=w[NS[n-2]+i+j*NL[n]]*y[j+Nt2]; net[m]+=theta; E=(float)exp(-(double)net[m]); y[m]=1.0/(1.0+E); } Nt1+=NL[n]; Nt2+=NL[n-1]; } for(i=0;i<NL[L-1];i++) /* Caculating the error. */ { error=d[k+i*M]-y[Nt2+i]; q+=error*error; } } /*k-loop*/ q/=2 ; return q; 303 } extern float *df,*w,*net; extern *NL,*NL; #define fd(i) y[i]*(1.0-y[i]) /* Define derivative. */ void dfun(float *w, float *df, int N) { int i,j,k,m,n,Nt1,Nt2,Nt3,ii; float E,error,sum; /* Initialize derivative vector. */ for(i=0;i<N;i++) df[i]=0.0; /* Start. */ for(k=0;k<M;k++) { /* Forward propagation. */ for(i=0;i<NL[1];i++) /* From input layer to first */ { /* hidden layer. */ net[i]=0.0; for(j=0;j<NL[0];j++) net[i]+=w[i+j*NL[1]]*xp[j*M+k]; net[i]+=theta; E=(float)exp(-(double)net[i]); y[i]=1.0/(1.0+E); } Nt1=NL[1]; Nt2=0; for(n=2;n<L;n++) /*From layer n-1 to layer n. */ { for(i=0;i<NL[n];i++) { m=Nt1+i; net[m]=0.0; for(j=0;j<NL[n-1];j++) net[m]+=w[NS[n-2]+i+j*NL[n]]*y[j+Nt2]; net[m]+=theta; E=(float)exp(-(double)net[m]); 304 y[m]=1.0/(1.0+E); } Nt1+=NL[n]; Nt2+=NL[n-1]; } Nt1=0; for(i=1; i<(L-1);i++) Nt1+=NL[i]; for(i=0; i<NL[L-1]; i++) /* delta's for output layer. */ { ii=Nt1+i; error=d[k+i*M]-y[ii]; delta[ii]=-error*fd(ii); } for(m=0;m<(L-2);m++) /* delta's by back propagation. */ { Nt2=Nt1-NL[L-2-m]; for(i=0;i<NL[L-2-m];i++) { ii=Nt2+i ; sum=0.0; for(j=0;j<NL[L-1-m];j++) sum+=delta[Nt1+j]*w[NS[L-3-m]+j+i*NL[L-1-m]]; delta[ii]=fd(ii)*sum; } Nt1=Nt2; } for(i=0;i<NL[1];i++) for(j=0;j<NL[0];j++) df[i+j*NL[1]]+=delta[i]*xp[k+j*M]; Nt1=NS[0]; Nt2=0; Nt3=NL[1]; for(m=1;m<(L-1) ;m++) { for(i=0;i<NL[m+1];i++) for(j=0;j<NL[m];j++) df[Nt1+i+j*NL[m+1]]+=delta[Nt3+i]*y[Nt2+j]; Nt1=NS[m] ; 305 Nt2+=NL[m]; Nt3+=NL[m+1]; } } /*k-loop*/ } #include <stdio.h> #include <stdlib.h> #include <math.h> #include <conio.h> void conj_grad( float (*)(float *), void(*)(float *, float*, int), float *, int, float ,float, int ); float f( float, float (*)(float *),float *, float *, float *, int); float fun(float *); void dfun(float*, float*,int); void bracket(float , float , float *,float *,float (*)(float *), float *, float *, float *, int); float Brent(float, float, float (*)(float *),float, float *,float *, float *, int ); /* Conjugate gradient method. fun:is a subprogram that returns the value of the function to be minimized. The arguments are: vector of variables, number of variables. dfun:is subprogram that provides the gradients. Arguments: variables, gradients, number of variables. x[]: contain the variables. An initial value need to be supplied. N: number of variables. eps1: overall convergence criteria. eps2: line search convergence criteria. no_iter: Maximum number of iterations. */ #define ESC 0x1B 306 float EPS; /*square-root of machine epsilon. */ void conj_grad( float (*fun)(float *), void (*dfun)(float *, float *, int), float *x, int N, float eps1, float eps2, int no_iter) { float *df,*dfp,*xt,*S,q,astar,sum,test,sum1,sum2; int i,j,iter; float a,b,tol1; EPS=1.0; do { EPS/=2.0; tol1=1.0+EPS; } while(tol1>1.0); EPS=(float)sqrt((double)EPS); df=(float *)malloc(N*sizeof(float)); dfp=(float *)malloc(N*sizeof(float)); S=(float *)malloc(N*sizeof(float)); xt=(float *)malloc(N*sizeof(float)); dfun(x,df,N); for(i=0;i<N;i++) S[i]=df[i]; gotoxy(1,6); q=fun(x); printf(" Initial value of error function=%f",q); iter=0; while(iter<no_iter) { if(kbhit()!=0) { if(getch()==ESC); return; } iter++; /* test convergence. */ test=0.0; for(i=0;i<N;i++) 307 test +=(float)fabs((float)df[i]); if(test < eps1) { printf("\nConvergence by gradient test."); break; } /* If df*S<0.0 restart. */ test=1.0; for(i=0;i<N;i++) { if( df[i]*S[i]>0.0){ test=-1.0; break; } } if(test<0.0) { for(i=0;i<N;i++) S[i]=df[i]; } /* Save previous gradient vector.*/ for(i=0;i<N;j++) dfp[i]=df[i]; /* Line Search. */ bracket(0.01, 0.001,&a,&b,fun,x,xt,S,N); astar=Brent(a,b,fun,eps2,x,xt,S,N); /* Adjust variables.*/ for(i=0;i<N;i++) x[i]-=astar*S[i]; dfun(x,df,N); sum1=sum2-0.0; for(i=0;i<N;i++) { sum1+=dfp[i]*dfp[i]; sum2+=df[i]*df[i]; } sum=sum2/sum1; for(i=0;i<N;i++) S[i]=sum*S[i]+df[i]; q=fun(x); gotoxy(1,7); 308 printf(" Error function=%f at iteration # %- 5d",q,iter); } printf("\nNumber of iterations = %d \n",iter); free(S); free(xt); } /* Function evaluation for line search. */ float f( float alpha, float (*fun)(float *),float *x, float *xt, float *S, int N) { int i; float q; for(i=0;i<N;i++) xt[i]=x[i]-alpha*S[i]; q=fun(xt); return q; } /* Function to bracket the minimum of a single variable function. */ void bracket(float ax, float dx, float *a,float *b,float (*fun)(float *), float *x, float *xt, float *s, int N) { float y1,x1,x0,y0,x2,y2; int iter; x0=ax ; x1=x0+dx; y0=f(x0,fun,x,xt,s,N); y1=f(x1,fun,x,xt,s,N); if(y1>=y0) { dx=-dx; x1=x0+dx; . float *xt, float *s, int N) { float y1 ,x1,x0 ,y0 ,x2 ,y2 ; int iter; x0=ax ; x1=x0+dx; y0 =f(x0,fun,x,xt,s,N); y1 =f(x1,fun,x,xt,s,N); if (y1 > =y0 ) { dx=-dx; x1=x0+dx; . q; 30 3 } extern float *df,*w,*net; extern *NL,*NL; #define fd(i) y[ i]*(1.0 -y[ i]) /* Define derivative. */ void dfun(float *w, float *df, int N) { int i,j,k,m,n,Nt1,Nt2,Nt3,ii;. for(i=0;i<NL[m+1];i++) for(j=0;j<NL[m];j++) df[Nt1+i+j*NL[m+1]]+=delta[Nt3+i] *y[ Nt2+j]; Nt1=NS[m] ; 30 5 Nt2+=NL[m]; Nt3+=NL[m+1]; } } /*k-loop*/ } #include <stdio.h> #include

Ngày đăng: 10/07/2014, 22:20

Từ khóa liên quan

Tài liệu cùng người dùng

Tài liệu liên quan