Actual source code: ramgpetsc.c

  1: /*$Id: ramgpetsc.c,v 1.20 2001/08/24 16:04:09 bsmith Exp $*/

 3:  #include src/mat/impls/aij/seq/aij.h
 4:  #include ramgfunc.h
 5:  #include petscfunc.h
 6:  #include petscksp.h

  8: /**************************************************************************/
  9: /*                                                                        */
 10: /*  PETSc - amg1r5 interface                                              */
 11: /*  author: Domenico Lahaye (domenico.lahaye@cs.kuleuven.ac.be)           */
 12: /*  May 2000                                                              */
 13: /*  This interface allows to call amg1r5 as a shell preconditioner.       */
 14: /*  amg1r5 is the algebraic multigrid code by John Ruge and Klaus         */
 15: /*  Stueben [1,2].                                                        */
 16: /*  [1] K. St\"{u}ben,"Algebraic Multigrid: An Introduction for Positive  */
 17: /*      Definite Problems with Applications", Tech. Rep. 53, German       */
 18: /*      National Research Center for Information Technology (GMD),        */
 19: /*      Schloss Birlinhoven, D-53754 Sankt-Augustin, Germany, March 1999  */
 20: /*  [2] J. Ruge and K. St\"{u}ben, "Algebraic Multigrid" in "Multigrid    */
 21: /*      Methods" S. McCormick, Ed., vol. 3 of Frontiers in Applied        */
 22: /*      Mathmatics, pp. 73--130, SIAM, Philadelphia, PA, 1987             */
 23: /*                                                                        */
 24: /**************************************************************************/

 26: /**************************************************************************/
 27: /*                                                                        */
 28: /* Notes on the amg1r5 part of the interface                              */
 29: /* The amg1r5 source code can be obtained from MGNET                      */
 30: /* Information on the various options in the code can be found at the     */
 31: /* beginning of the source code.                                          */
 32: /*                                                                        */
 33: /**************************************************************************/

 35: /*.. Implementation notes ..*/
 36: /*   i) Memory management: ramg1r5 is a Fortran 77 code and thus does not 
 37:         have dymanic memory management. Sufficiently large real and integer 
 38:         work arrays have to be allocated prior to calling the code. 
 39:      ii) We allocate memory for the right-hand side and the approximation 
 40:         in the set phase of the preconditioner, although this is not 
 41:         strictly necesarry. The setup phase doesn't require any rhs or 
 42:         approximation to the solution.                                   */

 44: /* ------------------------------------------------------------------- */
 47: /*.. RamgShellPCCreate - This routine creates a user-defined
 48:      preconditioner context.

 50:      Output Parameter:
 51:      shell - user-defined preconditioner context..*/

 53: int RamgShellPCCreate(RamgShellPC **shell)
 54: {
 55:    RamgShellPC *newctx;
 56:    int         ierr;

 59:    PetscNew(RamgShellPC,&newctx);
 60:    newctx->arraysset = PETSC_FALSE;
 61:    *shell            = newctx;
 62:    return(0);
 63: }

 65: /* ------------------------------------------------------------------- */
 68: /*..RamgShellPCSetUp - This routine sets up a user-defined
 69:     ramg preconditioner context.  

 71:     Input Parameters:
 72:     shell - user-defined preconditioner context
 73:     pmat  - preconditioner matrix

 75:     Output Parameter:
 76:     shell - fully set up user-defined preconditioner context

 78:     This routine calls the setup phase of RAMG..*/
 79: 
 80: int RamgShellPCSetUp(RamgShellPC *shell, Mat pmat)
 81: {
 82:    int                numnodes, numnonzero, nnz_count;
 83:    int                j, I, J, ncols_getrow, *cols_getrow,*diag;
 84:    PetscScalar        *vals_getrow;
 85:    MatInfo            info;
 86:    Mat_SeqAIJ         *aij = (Mat_SeqAIJ*)pmat->data;

 88:    /*..RAMG variables..*/
 89:    struct RAMG_PARAM  *ramg_param;
 90:    double           *u_approx, *rhs, *Asky;
 91:    int              *ia, *ja;
 92:    /*..RAMG names for number of unknowns and number of nonzeros..*/
 93:    int              nnu, nna;
 94:    /*..RAMG integer work array..*/
 95:    int              *ig;
 96:    /*..RAMG input parameters..*/
 97:    /*....Class 1 parameters....*/
 98:    int              nda, ndia, ndja, ndu, ndf, ndig, matrix;
 99:    /*....Class 2 parameters....*/
100:    int              iswtch, iout, iprint;
101:    /*....Class 3 parameters....*/
102:    int              levelx, ifirst, ncyc, madapt, nrd, nsolco, nru;
103:    double           eps;
104:    /*....Class 4 parameters....*/
105:    int              nwt, ntr;
106:    double           ecg1, ecg2, ewt2;
107:    /*..RAMG output..*/
108:    int              ierr;

111:    /*..Get size and number of unknowns of preconditioner matrix..*/
112:    MatGetSize(pmat, &numnodes, &numnodes);
113:    MatGetInfo(pmat,MAT_LOCAL,&info);
114:    numnonzero = (int)info.nz_used;
115:    /*..Set number of unknowns and nonzeros in RAMG terminology..*/
116:    nnu    = numnodes;
117:    nna    = numnonzero;

119:    /*..Set RAMG Class 1 parameters..*/
120:    /*
121:          These are the sizes of all the arrays passed into RAMG
122:       They need to be large enough or RAMG will return telling how
123:       large they should be
124:    */
125:    nda    = 3*nna+5*nnu + 10;
126:    ndia   = (int)(2.5*nnu);
127:    ndja   = nda;
128:    ndu    = 5*nnu;
129:    ndf    = ndu;
130:    ndig   = 8*nnu;

132:   if (shell->arraysset) {
133:     PetscFree(shell->A);
134:     PetscFree(shell->IA);
135:     PetscFree(shell->JA);
136:     PetscFree(shell->U_APPROX);
137:     PetscFree(shell->RHS);
138:     PetscFree(shell->IG);
139:     PetscFree(shell->PARAM);
140:   }

142:    /*..Allocate memory for RAMG variables..*/
143:    PetscMalloc(nda *sizeof(double),&Asky);
144:    PetscMalloc(ndia*sizeof(int),&ia);
145:    PetscMalloc(ndja*sizeof(int),&ja);
146:    PetscMalloc(ndu *sizeof(double),&u_approx);
147:    PetscMalloc(ndf *sizeof(double),&rhs);
148:    PetscMalloc(ndig*sizeof(int),&ig);

150:    /*..Store PETSc matrix in compressed skyline format required by RAMG..*/
151:    nnz_count = 0;
152:    MatMarkDiagonal_SeqAIJ(pmat);
153:    diag = aij->diag;

155:    for (I=0;I<numnodes;I++){
156:      ia[I]           = nnz_count + 1;

158:      /* put in diagonal entry first */
159:      ja[nnz_count]   = I + 1;
160:      Asky[nnz_count] = aij->a[diag[I]];
161:      nnz_count++;

163:      /* put in off diagonals */
164:      ncols_getrow = aij->i[I+1] - aij->i[I];
165:      vals_getrow  = aij->a + aij->i[I];
166:      cols_getrow  = aij->j + aij->i[I];
167:      for (j=0;j<ncols_getrow;j++){
168:        J = cols_getrow[j];
169:        if (J != I) {
170:          Asky[nnz_count] = vals_getrow[j];
171:          ja[nnz_count]   = J + 1;
172:          nnz_count++;
173:        }
174:      }
175:    }
176:    ia[numnodes] = nnz_count + 1;

178:    /*..Allocate memory for RAMG parameters..*/
179:    PetscNew(struct RAMG_PARAM,&ramg_param);

181:    /*..Set RAMG parameters..*/
182:    RamgGetParam(pmat,ramg_param);
183:    /*..Set remaining RAMG Class 1 parameters..*/
184:    matrix = (*ramg_param).MATRIX;
185:    /*..Set RAMG Class 2 parameters..*/
186:    iswtch = (*ramg_param).ISWTCH;
187:    iout   = (*ramg_param).IOUT;
188:    iprint = (*ramg_param).IPRINT;
189:    /*..Set RAMG Class 3 parameters..*/
190:    levelx = (*ramg_param).LEVELX;
191:    ifirst = (*ramg_param).IFIRST;
192:    ncyc   = (*ramg_param).NCYC;
193:    eps    = (*ramg_param).EPS;
194:    madapt = (*ramg_param).MADAPT;
195:    nrd    = (*ramg_param).NRD;
196:    nsolco = (*ramg_param).NSOLCO;
197:    nru    = (*ramg_param).NRU;
198:    /*..Set RAMG Class 4 parameters..*/
199:    ecg1   = (*ramg_param).ECG1;
200:    ecg2   = (*ramg_param).ECG2;
201:    ewt2   = (*ramg_param).EWT2;
202:    nwt    = (*ramg_param).NWT;
203:    ntr    = (*ramg_param).NTR;
204:    /*..Reset ncyc such that only setup is performed. This is done by setting 
205:      the last digit of ncyc (the number of cycles performed) equal to zero 
206:    ..*/
207:    ncyc   = 1030;

209:    PetscLogInfo((PetscObject)pmat,"\n\n");
210:    PetscLogInfo((PetscObject)pmat,"******************************************\n");
211:    PetscLogInfo((PetscObject)pmat,"*** RAMG Start Setup                   ***\n");
212:    PetscLogInfo((PetscObject)pmat,"******************************************\n");
213:    PetscLogInfo((PetscObject)pmat,"\n\n");

215:    /*..Call RAMG..*/
216:    amg1r5_(Asky, ia, ja, u_approx, rhs, ig, &nda, &ndia, &ndja, &ndu,
217:               &ndf, &ndig, &nnu, &matrix, &iswtch, &iout, &iprint, &levelx,
218:               &ifirst, &ncyc, &eps, &madapt, &nrd, &nsolco, &nru, &ecg1,
219:               &ecg2, &ewt2, &nwt, &ntr, &ierr);
220:    if (ierr) {
221:      if (ierr > 0 && ierr <= 6) {
222:        char *name[] = {"A","JA","IA","U","F","IG"};
223:        (*PetscErrorPrintf)("Error from RAMG setup, not enough array work space provided\n");
224:        (*PetscErrorPrintf)("Increase the one for \n",name[ierr-1]);
225:        (*PetscErrorPrintf)("A provided %d\n",nda);
226:        (*PetscErrorPrintf)("JA provided %d\n",ndja);
227:        (*PetscErrorPrintf)("IA provided %d\n",ndia);
228:        (*PetscErrorPrintf)("U provided %d\n",ndu);
229:        (*PetscErrorPrintf)("F provided %d\n",ndf);
230:        (*PetscErrorPrintf)("IG provided %d\n",ndig);
231:      }
232:      if (ierr == -12) {
233:        (*PetscErrorPrintf)("Error from RAMG setup, could be matrix is symmetric but you have not\n");
234:        (*PetscErrorPrintf)("indicated it with MatSetOption(mat,MAT_SYMMETRIC); or -matload_symmetric\n");
235:      }
236:      if (ierr == 14) {
237:        (*PetscErrorPrintf)("Error from RAMG setup, diagonal element not positive\n");
238:      }
239:      SETERRQ1(PETSC_ERR_LIB,"Error in RAMG setup. Error number %d",ierr);
240:    }

242:    PetscLogInfo((PetscObject)pmat,"\n\n");
243:    PetscLogInfo((PetscObject)pmat,"******************************************\n");
244:    PetscLogInfo((PetscObject)pmat,"*** RAMG End Setup                     ***\n");
245:    PetscLogInfo((PetscObject)pmat,"******************************************\n");
246:    PetscLogInfo((PetscObject)pmat,"\n\n");
247: 
248:    /*..Store RAMG output in PETSc context..*/
249:    shell->A         = Asky;
250:    shell->IA        = ia;
251:    shell->JA        = ja;
252:    shell->U_APPROX  = u_approx;
253:    shell->RHS       = rhs;
254:    shell->IG        = ig;
255:    shell->PARAM     = ramg_param;
256:    shell->arraysset = PETSC_TRUE;

258:    /*..Save Class 1 parameters..*/
259:    (*ramg_param).NDA    = nda;
260:    (*ramg_param).NDIA   = ndia;
261:    (*ramg_param).NDJA   = ndja;
262:    (*ramg_param).NDU    = ndu;
263:    (*ramg_param).NDF    = ndf;
264:    (*ramg_param).NDIG   = ndig;
265:    (*ramg_param).MATRIX = matrix;

267:    return(0);
268: }

270: /* ------------------------------------------------------------------- */
273: /*..RamgShellPCApply - This routine applies the AMG code as preconditioner 

275:     Input Parameters:
276:     ctx - user-defined context, as set by RamgShellSetApply()
277:     x - input vector

279:     Output Parameter:
280:     y - preconditioned vector

282:    Notes:
283:    Note that the PCSHELL preconditioner passes a void pointer as the
284:    first input argument.  This can be cast to be the whatever the user
285:    has set (via PCSetShellApply()) the application-defined context to be.

287:    ..*/
288: /*..To apply AMG as a preconditioner we set:                        */
289: /*        i) rhs-vector equal to the residual                       */
290: /*       ii) start solution equal to zero                           */
291: /*  Implementation notes:                                           */
292: /*  For the residual (vector r) we take the values from the vector  */
293: /*  using VecGetArray. No explicit memory allocation for            */
294: /*  vals_getarray is thus  needed as VecGetArray takes care of it.  */
295: /*  The allocated memory is freed again using a call to             */
296: /*  VecRestoreArray. The values in vals_getarray are then copy to   */
297: /*  rhs of the AMG code using memcpy.                               */

299: int RamgShellPCApply(void *ctx, Vec r, Vec z)
300: {
301:    int               ierr, I, numnodes, *cols;
302:    RamgShellPC       *shell = (RamgShellPC *) ctx;
303:    double            *u_approx, *rhs, *Asky, *vals_getarray;
304:    int               *ia, *ja;
305:    struct RAMG_PARAM *ramg_param;
306:    /*..RAMG integer work array..*/
307:    int               *ig;
308:    /*..RAMG input parameters..*/
309:    int               nnu;
310:    /*....Class 1 parameters....*/
311:    int               nda, ndia, ndja, ndu, ndf, ndig, matrix;
312:    /*....Class 2 parameters....*/
313:    int               iswtch, iout, iprint;
314:    /*....Class 3 parameters....*/
315:    int               levelx, ifirst, ncyc, madapt, nrd, nsolco, nru;
316:    double            eps;
317:    /*....Class 4 parameters....*/
318:    int               nwt, ntr;
319:    double            ecg1, ecg2, ewt2;

321:    /*..Get numnodes as the size of the input vector r..*/
323:    VecGetSize(r,&numnodes);
324:    nnu  = numnodes;

326:    /*..Get values from context..*/
327:    Asky       = shell->A;
328:    ia         = shell->IA;
329:    ja         = shell->JA;
330:    u_approx   = shell->U_APPROX;
331:    rhs        = shell->RHS;
332:    ig         = shell->IG;
333:    ramg_param = shell->PARAM;

335:    /*..Set the rhs of the call to ramg equal to the residual..*/
336:    VecGetArray(r,&vals_getarray);

338:    /*..Set rhs of call to ramg..*/
339:    PetscMemcpy(rhs, vals_getarray, numnodes * sizeof(*rhs));
340: 
341:    /*..Set initial solution of call to ramg to zero..*/
342:    for (I=0;I<numnodes;I++){
343:        u_approx[I] = 0.;
344:    }

346:    /*..Set RAMG Class 1 parameters..*/
347:    nda    = (*ramg_param).NDA;
348:    ndia   = (*ramg_param).NDIA;
349:    ndja   = (*ramg_param).NDJA;
350:    ndu    = (*ramg_param).NDU;
351:    ndf    = (*ramg_param).NDF;
352:    ndig   = (*ramg_param).NDIG;
353:    matrix = (*ramg_param).MATRIX;
354:    /*..Set RAMG Class 2 parameters..*/
355:    iswtch = (*ramg_param).ISWTCH;
356:    iout   = (*ramg_param).IOUT;
357:    iprint = (*ramg_param).IPRINT;
358:    /*..Set RAMG Class 3 parameters..*/
359:    levelx = (*ramg_param).LEVELX;
360:    ifirst = (*ramg_param).IFIRST;
361:    ncyc   = (*ramg_param).NCYC;
362:    eps    = (*ramg_param).EPS;
363:    madapt = (*ramg_param).MADAPT;
364:    nrd    = (*ramg_param).NRD;
365:    nsolco = (*ramg_param).NSOLCO;
366:    nru    = (*ramg_param).NRU;
367:    /*..Set RAMG Class 4 parameters..*/
368:    ecg1   = (*ramg_param).ECG1;
369:    ecg2   = (*ramg_param).ECG2;
370:    ewt2   = (*ramg_param).EWT2;
371:    nwt    = (*ramg_param).NWT;
372:    ntr    = (*ramg_param).NTR;

374:    /*..Redefine iswtch to bypass setup and first..*/
375:    iswtch = 2;

377:    /*..Call RAMG..*/
378:    amg1r5_(Asky, ia, ja, u_approx, rhs, ig, &nda, &ndia, &ndja, &ndu,
379:               &ndf, &ndig, &nnu, &matrix, &iswtch, &iout, &iprint, &levelx,
380:               &ifirst, &ncyc, &eps, &madapt, &nrd, &nsolco, &nru, &ecg1,
381:               &ecg2, &ewt2, &nwt, &ntr, &ierr);
382:    if (ierr) {
383:      if (ierr == -1) {
384:        (*PetscErrorPrintf)("Error from RAMG, not enough array work space provided in NDA\n");
385:        (*PetscErrorPrintf)("NDA provided %d\n",nda);
386:      }

388:      SETERRQ1(1,"Error from RAMG solve, number %d",ierr);
389:    }

391:    /*..Create auxilary vector..*/
392:    PetscMalloc(numnodes * sizeof(int),&cols);
393:    for (I=0;I<numnodes;I++)
394:        cols[I] = I;

396:    /*..Store values computed by RAMG into the PETSc vector z..*/
397:    VecSetValues(z,numnodes,cols,u_approx,INSERT_VALUES);

399:    /*..Restore PETSc rhs vector..*/
400:    VecRestoreArray(r, &vals_getarray);

402:    PetscFree(cols);
403: 
404:    return(0);
405: }

407: /* ------------------------------------------------------------------- */
410: /*..RamgShellPCDestroy - This routine destroys a user-defined
411:     preconditioner context.

413:     Input Parameter:
414:     shell - user-defined preconditioner context..*/

416: int RamgShellPCDestroy(RamgShellPC *shell)
417: {

421:   /*..Free PCShell context..*/
422:   if (shell->arraysset) {
423:     PetscFree(shell->A);
424:     PetscFree(shell->IA);
425:     PetscFree(shell->JA);
426:     PetscFree(shell->U_APPROX);
427:     PetscFree(shell->RHS);
428:     PetscFree(shell->IG);
429:     PetscFree(shell->PARAM);
430:     PetscFree(shell);
431:   }
432:   return(0);
433: }

435: /* ------------------------------------------------------------------- */
438: int RamgGetParam(Mat A,struct RAMG_PARAM *ramg_param)
439: {
440:   int        ierr,cycles;
441:   PetscTruth flg;

444:   /*..Set default RAMG paramets..*/
445:   /*....Class 1 RAMG parameters....*/

447:   (*ramg_param).MATRIX    = 22;
448:   if (A->symmetric) {
449:     (*ramg_param).MATRIX -= 10;
450:   }

452:   /*....Class 2 RAMG parameters....*/
453:   (*ramg_param).ISWTCH    = 4;
454:   if (PetscLogPrintInfo) {
455:     (*ramg_param).IOUT    = 13;
456:   } else { /* no output by default */
457:     (*ramg_param).IOUT    = 1;
458:   }
459:   (*ramg_param).IPRINT    = 10606;
460:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_iswtch",&(*ramg_param).ISWTCH,PETSC_NULL);
461:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_iout",&(*ramg_param).IOUT,PETSC_NULL);
462:   /*....Class 3 RAMG parameters....*/
463:   (*ramg_param).LEVELX    = 0;
464:   (*ramg_param).IFIRST    = 10;
465:   /*......note: in the AMG-PETSc interface the number of cycles is required 
466:           to equal on assure that in the PCApply routine AMG only performs 
467:           one cycle......*/
468:   (*ramg_param).NCYC      = 1031;
469:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_cycles",&cycles,&flg);
470:   if (flg) {
471:     double scale = pow(10.0,((double)(1 + (int)(log10(1.e-12+(double)cycles)))));
472:     (*ramg_param).NCYC = (int)(103*scale + cycles);
473:     PetscLogInfo(0,"RAMG using %d for cycles (number after 103 is number of cycles)",(*ramg_param).NCYC);
474:   }
475:   (*ramg_param).MADAPT    = 0;
476:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_madapt",&(*ramg_param).MADAPT,0);
477:   (*ramg_param).NRD       = 1234;
478:   (*ramg_param).NSOLCO    = 2;
479:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_nsolc",&(*ramg_param).NSOLCO,0);
480:   (*ramg_param).NRU       = 1256;
481:   (*ramg_param).EPS       = 1e-12;
482:   PetscOptionsGetReal(PETSC_NULL,"-pc_ramg_eps",&(*ramg_param).EPS,0);
483:   /*....Class 4 RAMG parameters....*/
484:   (*ramg_param).NWT       = 2;
485:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_nwt",&(*ramg_param).NWT,0);
486:   (*ramg_param).NTR       = 0;
487:   PetscOptionsGetInt(PETSC_NULL,"-pc_ramg_ntr",&(*ramg_param).NTR,0);
488:   (*ramg_param).ECG1      = 0.0;
489:   PetscOptionsGetReal(PETSC_NULL,"-pc_ramg_ecg1",&(*ramg_param).ECG1,0);
490:   (*ramg_param).ECG2      = 0.25;
491:   PetscOptionsGetReal(PETSC_NULL,"-pc_ramg_ecg2",&(*ramg_param).ECG2,0);
492:   (*ramg_param).EWT2      = 0.35;
493:   PetscOptionsGetReal(PETSC_NULL,"-pc_ramg_ewt2",&(*ramg_param).EWT2,0);

495:   return(0);
496: }

498: /* -------------------------------------------------------------------------------------*/

500:  #include src/ksp/pc/pcimpl.h

504: static int PCSetUp_RAMG(PC pc)
505: {
506:   int        ierr;

509:   RamgShellPCSetUp((RamgShellPC*)(pc->data),pc->pmat);
510:   return(0);
511: }

515: static int PCApply_RAMG(PC pc,Vec x,Vec y)
516: {
517:   int       ierr;

520:   RamgShellPCApply(pc->data,x,y);
521:   return(0);
522: }

526: static int PCDestroy_RAMG(PC pc)
527: {
528:   int       ierr;

531:   RamgShellPCDestroy((RamgShellPC *)pc->data);
532:   return(0);
533: }

535: /*MC
536:      PCRAMG - Interface to the algebraic multigrid preconditioner 

538:     amg1r5 is the algebraic multigrid code by John Ruge and Klaus         
539:     Stueben [1,2].                                                         
540:     [1] K. St\"{u}ben,"Algebraic Multigrid: An Introduction for Positive  
541:         Definite Problems with Applications", Tech. Rep. 53, German       
542:         National Research Center for Information Technology (GMD),        
543:         Schloss Birlinhoven, D-53754 Sankt-Augustin, Germany, March 1999   
544:     [2] J. Ruge and K. St\"{u}ben, "Algebraic Multigrid" in "Multigrid    
545:         Methods" S. McCormick, Ed., vol. 3 of Frontiers in Applied        
546:         Mathmatics, pp. 73--130, SIAM, Philadelphia, PA, 1987  

548:    Options Database Keys:
549: +   -pc_ramg_iswtch - 
550: .   -pc_ramg_iout -
551: .   -pc_ramg_cycles -
552: .   -pc_ramg_madapt -
553: .   -pc_ramg_nsolc -
554: .   -pc_ramg_eps - 
555: .   -pc_ramg_nwt -
556: .   -pc_ramg_ntr -
557: .   -pc_ramg_ecg1 - 
558: .   -pc_ramg_ecg2 - 
559: -   -pc_ramg_ewt2 -
560:       
561:        See the amg1r5 documentation for the meaning of these parameters.

563:    Level: intermediate

565:   Concepts: algebraic multigrid

567:    Notes: Only implemented for some matrix formats. Not implemented in parallel

569:           Requires symmetric, positive definite matrices

571:           AMG1r5 is no longer easily available, see http://www.mgnet.org/mgnet-codes-gmd.html
572:           It has been replaced by Klaus Stueben's SAMG (represent in PETSc by PCRAMG).

574:   Contributed by Domenico Lahaye

576: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCSAMG

578: M*/

580: EXTERN_C_BEGIN
583: int PCCreate_RAMG(PC pc)
584: {
585:   int       ierr;

588:   RamgShellPCCreate((RamgShellPC **)&(pc->data));
589:   pc->ops->destroy = PCDestroy_RAMG;
590:   pc->ops->apply   = PCApply_RAMG;
591:   pc->ops->setup   = PCSetUp_RAMG;
592:   return(0);
593: }
594: EXTERN_C_END

596: /*
597:       The AMG code uses a silly timing routine. This captures it
598: */
599: EXTERN_C_BEGIN
600: #if defined(PETSC_HAVE_FORTRAN_CAPS)
601: #define ctime_ CTIME
602: #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
603: #define ctime_ ctime
604: #endif
605: void ctime_(float *time)
606: {
607:   double ltime;
608:   PetscGetTime(&ltime);
609:   *time = (float) ltime;
610: }
611: EXTERN_C_END