Actual source code: eige.c

  1: /*$Id: eige.c,v 1.35 2001/08/07 03:03:45 balay Exp $*/

 3:  #include src/ksp/ksp/kspimpl.h

  7: /*@
  8:     KSPComputeExplicitOperator - Computes the explicit preconditioned operator.  

 10:     Collective on KSP

 12:     Input Parameter:
 13: .   ksp - the Krylov subspace context

 15:     Output Parameter:
 16: .   mat - the explict preconditioned operator

 18:     Notes:
 19:     This computation is done by applying the operators to columns of the 
 20:     identity matrix.

 22:     Currently, this routine uses a dense matrix format when 1 processor
 23:     is used and a sparse format otherwise.  This routine is costly in general,
 24:     and is recommended for use only with relatively small systems.

 26:     Level: advanced
 27:    
 28: .keywords: KSP, compute, explicit, operator

 30: .seealso: KSPComputeEigenvaluesExplicitly()
 31: @*/
 32: int KSPComputeExplicitOperator(KSP ksp,Mat *mat)
 33: {
 34:   Vec      in,out;
 35:   int      ierr,i,M,m,size,*rows,start,end;
 36:   Mat      A;
 37:   MPI_Comm comm;
 38:   PetscScalar   *array,zero = 0.0,one = 1.0;

 43:   comm = ksp->comm;

 45:   MPI_Comm_size(comm,&size);

 47:   VecDuplicate(ksp->vec_sol,&in);
 48:   VecDuplicate(ksp->vec_sol,&out);
 49:   VecGetSize(in,&M);
 50:   VecGetLocalSize(in,&m);
 51:   VecGetOwnershipRange(in,&start,&end);
 52:   PetscMalloc((m+1)*sizeof(int),&rows);
 53:   for (i=0; i<m; i++) {rows[i] = start + i;}

 55:   MatCreate(comm,m,m,M,M,mat);
 56:   if (size == 1) {
 57:     MatSetType(*mat,MATSEQDENSE);
 58:     MatSeqDenseSetPreallocation(*mat,PETSC_NULL);
 59:   } else {
 60:     MatSetType(*mat,MATMPIAIJ);
 61:     MatMPIAIJSetPreallocation(*mat,0,PETSC_NULL,0,PETSC_NULL);
 62:   }
 63: 
 64:   PCGetOperators(ksp->B,&A,PETSC_NULL,PETSC_NULL);

 66:   for (i=0; i<M; i++) {

 68:     VecSet(&zero,in);
 69:     VecSetValues(in,1,&i,&one,INSERT_VALUES);
 70:     VecAssemblyBegin(in);
 71:     VecAssemblyEnd(in);

 73:     KSP_MatMult(ksp,A,in,out);
 74:     KSP_PCApply(ksp,ksp->B,out,in);
 75: 
 76:     VecGetArray(in,&array);
 77:     MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);
 78:     VecRestoreArray(in,&array);

 80:   }
 81:   PetscFree(rows);
 82:   VecDestroy(in);
 83:   VecDestroy(out);
 84:   MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);
 85:   MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);
 86:   return(0);
 87: }

 89:  #include petscblaslapack.h

 93: /*@
 94:    KSPComputeEigenvaluesExplicitly - Computes all of the eigenvalues of the 
 95:    preconditioned operator using LAPACK.  

 97:    Collective on KSP

 99:    Input Parameter:
100: +  ksp - iterative context obtained from KSPCreate()
101: -  n - size of arrays r and c

103:    Output Parameters:
104: +  r - real part of computed eigenvalues
105: -  c - complex part of computed eigenvalues

107:    Notes:
108:    This approach is very slow but will generally provide accurate eigenvalue
109:    estimates.  This routine explicitly forms a dense matrix representing 
110:    the preconditioned operator, and thus will run only for relatively small
111:    problems, say n < 500.

113:    Many users may just want to use the monitoring routine
114:    KSPSingularValueMonitor() (which can be set with option -ksp_singmonitor)
115:    to print the singular values at each iteration of the linear solve.

117:    The preconditoner operator, rhs vector, solution vectors should be
118:    set before this routine is called. i.e use KSPSetOperators(),KSPSolve() or
119:    KSPSetOperators(),KSPSetRhs(),KSPSetSolution()

121:    Level: advanced

123: .keywords: KSP, compute, eigenvalues, explicitly

125: .seealso: KSPComputeEigenvalues(), KSPSingularValueMonitor(), KSPComputeExtremeSingularValues(), KSPSetOperators(), KSPSolve(), KSPSetRhs(), KSPSetSolution()
126: @*/
127: int KSPComputeEigenvaluesExplicitly(KSP ksp,int nmax,PetscReal *r,PetscReal *c)
128: {
129:   Mat          BA;
130:   int          i,n,ierr,size,rank,dummy;
131:   MPI_Comm     comm = ksp->comm;
132:   PetscScalar  *array;
133:   Mat          A;
134:   int          m,row,nz,*cols;
135:   PetscScalar  *vals;

138:   KSPComputeExplicitOperator(ksp,&BA);
139:   MPI_Comm_size(comm,&size);
140:   MPI_Comm_rank(comm,&rank);

142:   MatGetSize(BA,&n,&n);
143:   if (size > 1) { /* assemble matrix on first processor */
144:     if (!rank) {
145:       MatCreate(ksp->comm,n,n,n,n,&A);
146:     } else {
147:       MatCreate(ksp->comm,0,n,n,n,&A);
148:     }
149:     MatSetType(A,MATMPIDENSE);
150:     MatMPIDenseSetPreallocation(A,PETSC_NULL);
151:     PetscLogObjectParent(BA,A);

153:     MatGetOwnershipRange(BA,&row,&dummy);
154:     MatGetLocalSize(BA,&m,&dummy);
155:     for (i=0; i<m; i++) {
156:       MatGetRow(BA,row,&nz,&cols,&vals);
157:       MatSetValues(A,1,&row,nz,cols,vals,INSERT_VALUES);
158:       MatRestoreRow(BA,row,&nz,&cols,&vals);
159:       row++;
160:     }

162:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
163:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
164:     MatGetArray(A,&array);
165:   } else {
166:     MatGetArray(BA,&array);
167:   }

169: #if defined(PETSC_HAVE_ESSL)
170:   /* ESSL has a different calling sequence for dgeev() and zgeev() than standard LAPACK */
171:   if (!rank) {
172:     PetscScalar sdummy,*cwork;
173:     PetscReal   *work,*realpart;
174:     int         clen,idummy,lwork,*perm,zero;

176: #if !defined(PETSC_USE_COMPLEX)
177:     clen = n;
178: #else
179:     clen = 2*n;
180: #endif
181:     PetscMalloc(clen*sizeof(PetscScalar),&cwork);
182:     idummy = n;
183:     lwork  = 5*n;
184:     PetscMalloc(lwork*sizeof(PetscReal),&work);
185:     PetscMalloc(n*sizeof(PetscReal),&realpart);
186:     zero   = 0;
187:     LAgeev_(&zero,array,&n,cwork,&sdummy,&idummy,&idummy,&n,work,&lwork);
188:     PetscFree(work);

190:     /* For now we stick with the convention of storing the real and imaginary
191:        components of evalues separately.  But is this what we really want? */
192:     PetscMalloc(n*sizeof(int),&perm);

194: #if !defined(PETSC_USE_COMPLEX)
195:     for (i=0; i<n; i++) {
196:       realpart[i] = cwork[2*i];
197:       perm[i]     = i;
198:     }
199:     PetscSortRealWithPermutation(n,realpart,perm);
200:     for (i=0; i<n; i++) {
201:       r[i] = cwork[2*perm[i]];
202:       c[i] = cwork[2*perm[i]+1];
203:     }
204: #else
205:     for (i=0; i<n; i++) {
206:       realpart[i] = PetscRealPart(cwork[i]);
207:       perm[i]     = i;
208:     }
209:     PetscSortRealWithPermutation(n,realpart,perm);
210:     for (i=0; i<n; i++) {
211:       r[i] = PetscRealPart(cwork[perm[i]]);
212:       c[i] = PetscImaginaryPart(cwork[perm[i]]);
213:     }
214: #endif
215:     PetscFree(perm);
216:     PetscFree(realpart);
217:     PetscFree(cwork);
218:   }
219: #elif !defined(PETSC_USE_COMPLEX)
220:   if (!rank) {
221:     PetscScalar *work;
222:     PetscReal   *realpart,*imagpart;
223:     int         idummy,lwork,*perm;

225:     idummy   = n;
226:     lwork    = 5*n;
227:     PetscMalloc(2*n*sizeof(PetscReal),&realpart);
228:     imagpart = realpart + n;
229:     PetscMalloc(5*n*sizeof(PetscReal),&work);
230: #if defined(PETSC_MISSING_LAPACK_GEEV) 
231:     SETERRQ(PETSC_ERR_SUP,"GEEV - Lapack routine is unavailable\nNot able to provide eigen values.");
232: #else
233:     {
234:       PetscScalar sdummy;
235:       LAgeev_("N","N",&n,array,&n,realpart,imagpart,&sdummy,&idummy,&sdummy,&idummy,work,&lwork,&ierr);
236:     }
237: #endif
238:     if (ierr) SETERRQ1(PETSC_ERR_LIB,"Error in LAPACK routine %d",ierr);
239:     PetscFree(work);
240:     PetscMalloc(n*sizeof(int),&perm);
241:     for (i=0; i<n; i++) { perm[i] = i;}
242:     PetscSortRealWithPermutation(n,realpart,perm);
243:     for (i=0; i<n; i++) {
244:       r[i] = realpart[perm[i]];
245:       c[i] = imagpart[perm[i]];
246:     }
247:     PetscFree(perm);
248:     PetscFree(realpart);
249:   }
250: #else
251:   if (!rank) {
252:     PetscScalar *work,*eigs;
253:     PetscReal   *rwork;
254:     int         idummy,lwork,*perm;

256:     idummy   = n;
257:     lwork    = 5*n;
258:     PetscMalloc(5*n*sizeof(PetscScalar),&work);
259:     PetscMalloc(2*n*sizeof(PetscReal),&rwork);
260:     PetscMalloc(n*sizeof(PetscScalar),&eigs);
261: #if defined(PETSC_MISSING_LAPACK_GEEV) 
262:     SETERRQ(PETSC_ERR_SUP,"GEEV - Lapack routine is unavailable\nNot able to provide eigen values.");
263: #else
264:     {
265:       PetscScalar sdummy;
266:       LAgeev_("N","N",&n,array,&n,eigs,&sdummy,&idummy,&sdummy,&idummy,work,&lwork,rwork,&ierr);
267:     }
268: #endif
269:     if (ierr) SETERRQ1(PETSC_ERR_LIB,"Error in LAPACK routine %d",ierr);
270:     PetscFree(work);
271:     PetscFree(rwork);
272:     PetscMalloc(n*sizeof(int),&perm);
273:     for (i=0; i<n; i++) { perm[i] = i;}
274:     for (i=0; i<n; i++) { r[i]    = PetscRealPart(eigs[i]);}
275:     PetscSortRealWithPermutation(n,r,perm);
276:     for (i=0; i<n; i++) {
277:       r[i] = PetscRealPart(eigs[perm[i]]);
278:       c[i] = PetscImaginaryPart(eigs[perm[i]]);
279:     }
280:     PetscFree(perm);
281:     PetscFree(eigs);
282:   }
283: #endif  
284:   if (size > 1) {
285:     MatRestoreArray(A,&array);
286:     MatDestroy(A);
287:   } else {
288:     MatRestoreArray(BA,&array);
289:   }
290:   MatDestroy(BA);
291:   return(0);
292: }