Actual source code: pinit.c

  1: /*$Id: pinit.c,v 1.58 2001/08/10 03:28:54 bsmith Exp $*/
  2: /*
  3:    This file defines the initialization of PETSc, including PetscInitialize()
  4: */

 6:  #include petsc.h
 7:  #include petscsys.h

  9: EXTERN int PetscLogBegin_Private(void);

 11: /* -----------------------------------------------------------------------------------------*/

 13: extern FILE *petsc_history;

 15: EXTERN int PetscInitialize_DynamicLibraries(void);
 16: EXTERN int PetscFinalize_DynamicLibraries(void);
 17: EXTERN int PetscFListDestroyAll(void);
 18: EXTERN int PetscSequentialPhaseBegin_Private(MPI_Comm,int);
 19: EXTERN int PetscSequentialPhaseEnd_Private(MPI_Comm,int);
 20: EXTERN int PetscLogCloseHistoryFile(FILE **);

 22: /* this is used by the _, __, and ___ macros (see include/petscerror.h) */
 23: int __g0;

 25: /*
 26:        Checks the options database for initializations related to the 
 27:     PETSc components
 28: */
 31: int PetscOptionsCheckInitial_Components(void)
 32: {
 33:   MPI_Comm   comm = PETSC_COMM_WORLD;
 34:   PetscTruth flg1;
 35:   int        ierr;

 38:   /*
 39:      Publishing to the AMS
 40:   */
 41: #if defined(PETSC_HAVE_AMS)
 42:   PetscOptionsHasName(PETSC_NULL,"-ams_publish_objects",&flg1);
 43:   if (flg1) {
 44:     PetscAMSPublishAll = PETSC_TRUE;
 45:   }
 46:   PetscOptionsHasName(PETSC_NULL,"-ams_publish_stack",&flg1);
 47:   if (flg1) {
 48:     PetscStackPublish();
 49:   }
 50: #endif

 52:   PetscOptionsHasName(PETSC_NULL,"-help",&flg1);
 53:   if (flg1) {
 54: #if defined (PETSC_USE_LOG)
 55:     (*PetscHelpPrintf)(comm,"------Additional PETSc component options--------\n");
 56:     (*PetscHelpPrintf)(comm," -log_summary_exclude: <vec,mat,pc.ksp,snes>\n");
 57:     (*PetscHelpPrintf)(comm," -log_info_exclude: <null,vec,mat,pc,ksp,snes,ts>\n");
 58:     (*PetscHelpPrintf)(comm,"-----------------------------------------------\n");
 59: #endif
 60:   }
 61:   return(0);
 62: }

 66: /*@C
 67:       PetscInitializeNoArguments - Calls PetscInitialize() from C/C++ without
 68:         the command line arguments.

 70:    Collective
 71:   
 72:    Level: advanced

 74: .seealso: PetscInitialize(), PetscInitializeFortran()
 75: @*/
 76: int PetscInitializeNoArguments(void)
 77: {
 78:   int ierr,argc = 0;
 79:   char **args = 0;

 82:   PetscInitialize(&argc,&args,PETSC_NULL,PETSC_NULL);
 83:   PetscFunctionReturn(ierr);
 84: }

 86: EXTERN int        PetscOptionsCheckInitial_Private(void);
 87: extern PetscTruth PetscBeganMPI;

 89: /*
 90:        This function is the MPI reduction operation used to compute the sum of the 
 91:    first half of the datatype and the max of the second half.
 92: */
 93: MPI_Op PetscMaxSum_Op = 0;

 95: EXTERN_C_BEGIN
 98: void PetscMaxSum_Local(void *in,void *out,int *cnt,MPI_Datatype *datatype)
 99: {
100:   int *xin = (int *)in,*xout = (int*)out,i,count = *cnt;

103:   if (*datatype != MPI_2INT) {
104:     (*PetscErrorPrintf)("Can only handle MPI_2INT data types");
105:     MPI_Abort(MPI_COMM_WORLD,1);
106:   }

108:   for (i=0; i<count; i++) {
109:     xout[2*i]    = PetscMax(xout[2*i],xin[2*i]);
110:     xout[2*i+1] += xin[2*i+1];
111:   }
112:   PetscStackPop;
113:   return;
114: }
115: EXTERN_C_END

117: /*
118:     Returns the max of the first entry owned by this processor and the
119: sum of the second entry.
120: */
123: int PetscMaxSum(MPI_Comm comm,const int nprocs[],int *max,int *sum)
124: {
125:   int size,rank,ierr,*work;
126: 
128:   MPI_Comm_size(comm,&size);
129:   MPI_Comm_rank(comm,&rank);
130:   PetscMalloc(2*size*sizeof(int),&work);
131:   MPI_Allreduce((void*)nprocs,work,size,MPI_2INT,PetscMaxSum_Op,comm);
132:   *max   = work[2*rank];
133:   *sum   = work[2*rank+1];
134:   PetscFree(work);
135:   return(0);
136: }

138: /* ----------------------------------------------------------------------------*/
139: MPI_Op PetscADMax_Op = 0;

141: EXTERN_C_BEGIN
144: void PetscADMax_Local(void *in,void *out,int *cnt,MPI_Datatype *datatype)
145: {
146:   PetscScalar *xin = (PetscScalar *)in,*xout = (PetscScalar*)out;
147:   int         i,count = *cnt;

150:   if (*datatype != MPIU_2SCALAR) {
151:     (*PetscErrorPrintf)("Can only handle MPIU_2SCALAR data (i.e. double or complex) types");
152:     MPI_Abort(MPI_COMM_WORLD,1);
153:   }

155:   for (i=0; i<count; i++) {
156:     if (PetscRealPart(xout[2*i]) < PetscRealPart(xin[2*i])) {
157:       xout[2*i]   = xin[2*i];
158:       xout[2*i+1] = xin[2*i+1];
159:     }
160:   }

162:   PetscStackPop;
163:   return;
164: }
165: EXTERN_C_END

167: MPI_Op PetscADMin_Op = 0;

169: EXTERN_C_BEGIN
172: void PetscADMin_Local(void *in,void *out,int *cnt,MPI_Datatype *datatype)
173: {
174:   PetscScalar *xin = (PetscScalar *)in,*xout = (PetscScalar*)out;
175:   int         i,count = *cnt;

178:   if (*datatype != MPIU_2SCALAR) {
179:     (*PetscErrorPrintf)("Can only handle MPIU_2SCALAR data (i.e. double or complex) types");
180:     MPI_Abort(MPI_COMM_WORLD,1);
181:   }

183:   for (i=0; i<count; i++) {
184:     if (PetscRealPart(xout[2*i]) > PetscRealPart(xin[2*i])) {
185:       xout[2*i]   = xin[2*i];
186:       xout[2*i+1] = xin[2*i+1];
187:     }
188:   }

190:   PetscStackPop;
191:   return;
192: }
193: EXTERN_C_END
194: /* ---------------------------------------------------------------------------------------*/

196: #if defined(PETSC_USE_COMPLEX)
197: MPI_Op PetscSum_Op = 0;

199: EXTERN_C_BEGIN
202: void PetscSum_Local(void *in,void *out,int *cnt,MPI_Datatype *datatype)
203: {
204:   PetscScalar *xin = (PetscScalar *)in,*xout = (PetscScalar*)out;
205:   int         i,count = *cnt;

208:   if (*datatype != MPIU_SCALAR) {
209:     (*PetscErrorPrintf)("Can only handle MPIU_SCALAR data (i.e. double or complex) types");
210:     MPI_Abort(MPI_COMM_WORLD,1);
211:   }

213:   for (i=0; i<count; i++) {
214:     xout[i] += xin[i];
215:   }

217:   PetscStackPop;
218:   return;
219: }
220: EXTERN_C_END
221: #endif

223: static int  PetscGlobalArgc   = 0;
224: static char **PetscGlobalArgs = 0;

228: /*@C
229:    PetscGetArgs - Allows you to access the raw command line arguments anywhere
230:      after PetscInitialize() is called but before PetscFinalize().

232:    Not Collective

234:    Output Parameters:
235: +  argc - count of number of command line arguments
236: -  args - the command line arguments

238:    Level: intermediate

240:    Notes:
241:       This is usually used to pass the command line arguments into other libraries
242:    that are called internally deep in PETSc or the application.

244:    Concepts: command line arguments
245:    
246: .seealso: PetscFinalize(), PetscInitializeFortran()

248: @*/
249: int PetscGetArgs(int *argc,char ***args)
250: {
252:   if (!PetscGlobalArgs) {
253:     SETERRQ(1,"You must call after PetscInitialize() but before PetscFinalize()");
254:   }
255:   *argc = PetscGlobalArgc;
256:   *args = PetscGlobalArgs;
257:   return(0);
258: }

262: /*@C
263:    PetscInitialize - Initializes the PETSc database and MPI. 
264:    PetscInitialize() calls MPI_Init() if that has yet to be called,
265:    so this routine should always be called near the beginning of 
266:    your program -- usually the very first line! 

268:    Collective on MPI_COMM_WORLD or PETSC_COMM_WORLD if it has been set

270:    Input Parameters:
271: +  argc - count of number of command line arguments
272: .  args - the command line arguments
273: .  file - [optional] PETSc database file, defaults to ~username/.petscrc
274:           (use PETSC_NULL for default)
275: -  help - [optional] Help message to print, use PETSC_NULL for no message

277:    Options Database Keys:
278: +  -start_in_debugger [noxterm,dbx,xdb,gdb,...] - Starts program in debugger
279: .  -on_error_attach_debugger [noxterm,dbx,xdb,gdb,...] - Starts debugger when error detected
280: .  -on_error_emacs <machinename> causes emacsclient to jump to error file
281: .  -debugger_nodes [node1,node2,...] - Indicates nodes to start in debugger
282: .  -debugger_pause [sleeptime] (in seconds) - Pauses debugger
283: .  -stop_for_debugger - Print message on how to attach debugger manually to 
284:                         process and wait (-debugger_pause) seconds for attachment
285: .  -trmalloc - Indicates use of PETSc error-checking malloc
286: .  -trmalloc no - Indicates not to use error-checking malloc
287: .  -fp_trap - Stops on floating point exceptions (Note that on the
288:               IBM RS6000 this slows code by at least a factor of 10.)
289: .  -no_signal_handler - Indicates not to trap error signals
290: .  -shared_tmp - indicates /tmp directory is shared by all processors
291: .  -not_shared_tmp - each processor has own /tmp
292: .  -tmp - alternative name of /tmp directory
293: .  -get_total_flops - returns total flops done by all processors
294: -  -get_resident_set_size - Print memory usage at end of run

296:    Options Database Keys for Profiling:
297:    See the Profiling chapter of the users manual for details.
298: +  -log_trace [filename] - Print traces of all PETSc calls
299:         to the screen (useful to determine where a program
300:         hangs without running in the debugger).  See PetscLogTraceBegin().
301: .  -log_info <optional filename> - Prints verbose information to the screen
302: -  -log_info_exclude <null,vec,mat,pc,ksp,snes,ts> - Excludes some of the verbose messages

304:    Environmental Variables:
305: +   PETSC_TMP - alternative tmp directory
306: .   PETSC_SHARED_TMP - tmp is shared by all processes
307: .   PETSC_NOT_SHARED_TMP - each process has its own private tmp
308: .   PETSC_VIEWER_SOCKET_PORT - socket number to use for socket viewer
309: -   PETSC_VIEWER_SOCKET_MACHINE - machine to use for socket viewer to connect to


312:    Level: beginner

314:    Notes:
315:    If for some reason you must call MPI_Init() separately, call
316:    it before PetscInitialize().

318:    Fortran Version:
319:    In Fortran this routine has the format
320: $       call PetscInitialize(file,ierr)

322: +   ierr - error return code
323: -   file - [optional] PETSc database file name, defaults to 
324:            ~username/.petscrc (use PETSC_NULL_CHARACTER for default)
325:            
326:    Important Fortran Note:
327:    In Fortran, you MUST use PETSC_NULL_CHARACTER to indicate a
328:    null character string; you CANNOT just use PETSC_NULL as 
329:    in the C version.  See the users manual for details.


332:    Concepts: initializing PETSc
333:    
334: .seealso: PetscFinalize(), PetscInitializeFortran(), PetescGetArgs()

336: @*/
337: int PetscInitialize(int *argc,char ***args,const char file[],const char help[])
338: {
339:   int        ierr,flag,dummy_tag,size;
340:   PetscTruth flg;
341:   char       hostname[256];

344:   if (PetscInitializeCalled) return(0);

346:   PetscOptionsCreate();

348:   /*
349:      We initialize the program name here (before MPI_Init()) because MPICH has a bug in 
350:      it that it sets args[0] on all processors to be args[0] on the first processor.
351:   */
352:   if (argc && *argc) {
353:     PetscSetProgramName(**args);
354:   } else {
355:     PetscSetProgramName("Unknown Name");
356:   }


359:   MPI_Initialized(&flag);
360:   if (!flag) {
361:     MPI_Init(argc,args);
362:     PetscBeganMPI = PETSC_TRUE;
363:   }
364:   if (argc && args) {
365:     PetscGlobalArgc = *argc;
366:     PetscGlobalArgs = *args;
367:   }
368:   PetscInitializeCalled = PETSC_TRUE;

370:   /* Also initialize the initial datestamp. Done after init due to a bug in MPICH-GM? */
371:   PetscSetInitialDate();

373:   if (!PETSC_COMM_WORLD) {
374:     PETSC_COMM_WORLD = MPI_COMM_WORLD;
375:   }

377:   MPI_Comm_rank(MPI_COMM_WORLD,&PetscGlobalRank);
378:   MPI_Comm_size(MPI_COMM_WORLD,&PetscGlobalSize);

380: #if defined(PETSC_USE_COMPLEX)
381:   /* 
382:      Initialized the global complex variable; this is because with 
383:      shared libraries the constructors for global variables
384:      are not called; at least on IRIX.
385:   */
386:   {
387:     PetscScalar ic(0.0,1.0);
388:     PETSC_i = ic;
389:   }
390:   MPI_Type_contiguous(2,MPIU_REAL,&MPIU_COMPLEX);
391:   MPI_Type_commit(&MPIU_COMPLEX);
392:   MPI_Op_create(PetscSum_Local,1,&PetscSum_Op);
393: #endif

395:   /*
396:      Create the PETSc MPI reduction operator that sums of the first
397:      half of the entries and maxes the second half.
398:   */
399:   MPI_Op_create(PetscMaxSum_Local,1,&PetscMaxSum_Op);

401:   MPI_Type_contiguous(2,MPIU_SCALAR,&MPIU_2SCALAR);
402:   MPI_Type_commit(&MPIU_2SCALAR);
403:   MPI_Op_create(PetscADMax_Local,1,&PetscADMax_Op);
404:   MPI_Op_create(PetscADMin_Local,1,&PetscADMin_Op);

406:   /*
407:      Build the options database and check for user setup requests
408:   */
409:   PetscOptionsInsert(argc,args,file);

411:   /*
412:      Print main application help message
413:   */
414:   PetscOptionsHasName(PETSC_NULL,"-help",&flg);
415:   if (help && flg) {
416:     PetscPrintf(PETSC_COMM_WORLD,help);
417:   }
418:   PetscOptionsCheckInitial_Private();

420:   /* SHOULD PUT IN GUARDS: Make sure logging is initialized, even if we do not print it out */
421:   PetscLogBegin_Private();

423:   /*
424:      Initialize PETSC_COMM_SELF and WORLD as a MPI_Comm with the PETSc attribute.
425:     
426:      We delay until here to do it, since PetscMalloc() may not have been
427:      setup before this.
428:   */
429:   PetscCommDuplicate(MPI_COMM_SELF,&PETSC_COMM_SELF,&dummy_tag);
430:   PetscCommDuplicate(PETSC_COMM_WORLD,&PETSC_COMM_WORLD,&dummy_tag);

432:   /*
433:      Load the dynamic libraries (on machines that support them), this registers all
434:      the solvers etc. (On non-dynamic machines this initializes the PetscDraw and PetscViewer classes)
435:   */
436:   PetscInitialize_DynamicLibraries();

438:   /*
439:      Initialize all the default viewers
440:   */
441:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
442:   PetscLogInfo(0,"PetscInitialize:PETSc successfully started: number of processors = %d\n",size);
443:   PetscGetHostName(hostname,256);
444:   PetscLogInfo(0,"PetscInitialize:Running on machine: %s\n",hostname);

446:   PetscOptionsCheckInitial_Components();

448:   PetscFunctionReturn(ierr);
449: }


454: /*@C 
455:    PetscFinalize - Checks for options to be called at the conclusion
456:    of the program and calls MPI_Finalize().

458:    Collective on PETSC_COMM_WORLD

460:    Options Database Keys:
461: +  -options_table - Calls OptionsPrint()
462: .  -options_left - Prints unused options that remain in the database
463: .  -options_left no - Does not print unused options that remain in the database
464: .  -mpidump - Calls PetscMPIDump()
465: .  -trdump - Calls PetscTrDump()
466: .  -trinfo - Prints total memory usage
467: .  -trdebug - Calls malloc_debug(2) to activate memory
468:         allocation diagnostics (used by PETSC_ARCH=sun4, 
469:         BOPT=[g,g_c++,g_complex] only!)
470: -  -trmalloc_log - Prints summary of memory usage

472:    Options Database Keys for Profiling:
473:    See the Profiling chapter of the users manual for details.
474: +  -log_summary [filename] - Prints summary of flop and timing
475:         information to screen. If the filename is specified the
476:         summary is written to the file. (for code compiled with 
477:         PETSC_USE_LOG).  See PetscLogPrintSummary().
478: .  -log_all [filename] - Logs extensive profiling information
479:         (for code compiled with PETSC_USE_LOG). See PetscLogDump(). 
480: .  -log [filename] - Logs basic profiline information (for
481:         code compiled with PETSC_USE_LOG).  See PetscLogDump().
482: .  -log_sync - Log the synchronization in scatters, inner products
483:         and norms
484: -  -log_mpe [filename] - Creates a logfile viewable by the 
485:       utility Upshot/Nupshot (in MPICH distribution)

487:    Level: beginner

489:    Note:
490:    See PetscInitialize() for more general runtime options.

492: .seealso: PetscInitialize(), PetscOptionsPrint(), PetscTrDump(), PetscMPIDump(), PetscEnd()
493: @*/
494: int PetscFinalize(void)
495: {
496:   int            ierr,rank,nopt;
497:   PetscLogDouble rss;
498:   PetscTruth     flg1,flg2,flg3;
499: 

502:   if (!PetscInitializeCalled) {
503:     (*PetscErrorPrintf)("PETSc ERROR: PetscInitialize() must be called before PetscFinalize()\n");
504:     return(0);
505:   }
506:   /* Destroy auxiliary packages */
507:   PetscViewerMathematicaFinalizePackage();
508:   PetscPLAPACKFinalizePackage();

510:   /*
511:      Destroy all the function registration lists created
512:   */
513:   PetscFinalize_DynamicLibraries();


516:   PetscOptionsHasName(PETSC_NULL,"-get_resident_set_size",&flg1);
517:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
518:   if (flg1) {
519:     PetscGetResidentSetSize(&rss);
520:     if (rss) {
521:       PetscPrintf(PETSC_COMM_SELF,"[%d] Size of entire process memory %d\n",rank,(int)rss);
522:     } else {
523:       PetscPrintf(PETSC_COMM_SELF,"[%d] OS does not support computing entire process memory\n",rank);
524:     }
525:   }

527: #if defined(PETSC_USE_LOG)
528:   PetscOptionsHasName(PETSC_NULL,"-get_total_flops",&flg1);
529:   if (flg1) {
530:     PetscLogDouble flops = 0;
531:     MPI_Reduce(&_TotalFlops,&flops,1,MPI_DOUBLE,MPI_SUM,0,PETSC_COMM_WORLD);
532:     PetscPrintf(PETSC_COMM_WORLD,"Total flops over all processors %g\n",flops);
533:   }
534: #endif

536:   /*
537:      Free all objects registered with PetscObjectRegisterDestroy() such ast
538:     PETSC_VIEWER_XXX_().
539:   */
540:   PetscObjectRegisterDestroyAll();

542: #if defined(PETSC_USE_STACK)
543:   if (PetscStackActive) {
544:     PetscStackDestroy();
545:   }
546: #endif

548: #if defined(PETSC_USE_LOG)
549:   {
550:     char mname[PETSC_MAX_PATH_LEN];
551: #if defined(PETSC_HAVE_MPE)
552:     mname[0] = 0;
553:     PetscOptionsGetString(PETSC_NULL,"-log_mpe",mname,PETSC_MAX_PATH_LEN,&flg1);
554:     if (flg1){
555:       if (mname[0]) {PetscLogMPEDump(mname);}
556:       else          {PetscLogMPEDump(0);}
557:     }
558: #endif
559:     mname[0] = 0;
560:     PetscOptionsGetString(PETSC_NULL,"-log_summary",mname,PETSC_MAX_PATH_LEN,&flg1);
561:     if (flg1) {
562:       if (mname[0])  {PetscLogPrintSummary(PETSC_COMM_WORLD,mname);}
563:       else           {PetscLogPrintSummary(PETSC_COMM_WORLD,0);}
564:     }

566:     mname[0] = 0;
567:     PetscOptionsGetString(PETSC_NULL,"-log_all",mname,PETSC_MAX_PATH_LEN,&flg1);
568:     PetscOptionsGetString(PETSC_NULL,"-log",mname,PETSC_MAX_PATH_LEN,&flg2);
569:     if (flg1 || flg2){
570:       if (mname[0]) PetscLogDump(mname);
571:       else          PetscLogDump(0);
572:     }
573:     PetscLogDestroy();
574:   }
575: #endif
576:   PetscOptionsHasName(PETSC_NULL,"-no_signal_handler",&flg1);
577:   if (!flg1) { PetscPopSignalHandler();}
578:   PetscOptionsHasName(PETSC_NULL,"-mpidump",&flg1);
579:   if (flg1) {
580:     PetscMPIDump(stdout);
581:   }
582:   PetscOptionsHasName(PETSC_NULL,"-trdump",&flg1);
583:   PetscOptionsHasName(PETSC_NULL,"-options_table",&flg2);
584:   if (flg2) {
585:     if (!rank) {PetscOptionsPrint(stdout);}
586:   }

588:   /* to prevent PETSc -options_left from warning */
589:   PetscOptionsHasName(PETSC_NULL,"-nox_warning",&flg1);CHKERRQ(ierr)
590:   PetscOptionsHasName(PETSC_NULL,"-error_output_stderr",&flg1);

592:   PetscOptionsGetLogical(PETSC_NULL,"-options_left",&flg2,&flg1);
593:   PetscOptionsAllUsed(&nopt);
594:   if (flg2) {
595:     PetscOptionsPrint(stdout);
596:     if (!nopt) {
597:       PetscPrintf(PETSC_COMM_WORLD,"There are no unused options.\n");
598:     } else if (nopt == 1) {
599:       PetscPrintf(PETSC_COMM_WORLD,"There is one unused database option. It is:\n");
600:     } else {
601:       PetscPrintf(PETSC_COMM_WORLD,"There are %d unused database options. They are:\n",nopt);
602:     }
603:   }
604: #if defined(PETSC_USE_BOPT_g)
605:   if (nopt && !flg1 && !flg2) {
606:     PetscPrintf(PETSC_COMM_WORLD,"WARNING! There are options you set that were not used!\n");
607:     PetscPrintf(PETSC_COMM_WORLD,"WARNING! could be spelling mistake, etc!\n");
608:     PetscOptionsLeft();
609:   } else if (nopt && flg2) {
610: #else 
611:   if (nopt && flg2) {
612: #endif
613:     PetscOptionsLeft();
614:   }

616:   PetscOptionsHasName(PETSC_NULL,"-log_history",&flg1);
617:   if (flg1) {
618:     PetscLogCloseHistoryFile(&petsc_history);
619:     petsc_history = 0;
620:   }


623:   /*
624:        Destroy PETSC_COMM_SELF/WORLD as a MPI_Comm with the PETSc 
625:      attribute.
626:   */
627:   PetscCommDestroy(&PETSC_COMM_SELF);
628:   PetscCommDestroy(&PETSC_COMM_WORLD);

630:   /*
631:        Free all the registered create functions, such as KSPList, VecList, SNESList, etc
632:   */
633:   PetscFListDestroyAll();

635:   PetscOptionsHasName(PETSC_NULL,"-trdump",&flg1);
636:   PetscOptionsHasName(PETSC_NULL,"-trinfo",&flg2);
637:   PetscOptionsHasName(PETSC_NULL,"-trmalloc_log",&flg3);
638:   if (flg1) {
639:     char fname[256];
640:     FILE *fd;
641: 
642:     fname[0] = 0;
643:     PetscOptionsGetString(PETSC_NULL,"-trdump",fname,250,&flg1);
644:     if (flg1 && fname[0]) {
645:       char sname[256];

647:       sprintf(sname,"%s_%d",fname,rank);
648:       fd   = fopen(sname,"w"); if (!fd) SETERRQ1(1,"Cannot open log file: %s",sname);
649:       PetscTrDump(fd);
650:       fclose(fd);
651:     } else {
652:       MPI_Comm local_comm;

654:       MPI_Comm_dup(MPI_COMM_WORLD,&local_comm);
655:       PetscSequentialPhaseBegin_Private(local_comm,1);
656:         PetscTrDump(stdout);
657:       PetscSequentialPhaseEnd_Private(local_comm,1);
658:       MPI_Comm_free(&local_comm);
659:     }
660:   } else if (flg2) {
661:     MPI_Comm       local_comm;
662:     PetscLogDouble maxm;

664:     MPI_Comm_dup(MPI_COMM_WORLD,&local_comm);
665:     PetscTrSpace(PETSC_NULL,PETSC_NULL,&maxm);
666:     PetscSequentialPhaseBegin_Private(local_comm,1);
667:       printf("[%d] Maximum memory used %g\n",rank,maxm);
668:     PetscSequentialPhaseEnd_Private(local_comm,1);
669:     MPI_Comm_free(&local_comm);
670:   }
671:   if (flg3) {
672:     char fname[256];
673:     FILE *fd;
674: 
675:     fname[0] = 0;
676:     PetscOptionsGetString(PETSC_NULL,"-trmalloc_log",fname,250,&flg1);
677:     if (flg1 && fname[0]) {
678:       char sname[256];

680:       sprintf(sname,"%s_%d",fname,rank);
681:       fd   = fopen(sname,"w"); if (!fd) SETERRQ1(1,"Cannot open log file: %s",sname);
682:       PetscTrLogDump(fd);
683:       fclose(fd);
684:     } else {
685:       PetscTrLogDump(stdout);
686:     }
687:   }
688:   /* Can be destroyed only after all the options are used */
689:   PetscOptionsDestroy();

691:   PetscGlobalArgc = 0;
692:   PetscGlobalArgs = 0;

694:   PetscLogInfo(0,"PetscFinalize:PETSc successfully ended!\n");
695:   if (PetscBeganMPI) {
696:     MPI_Finalize();
697:   }

699: /*

701:      Note: In certain cases PETSC_COMM_WORLD is never MPI_Comm_free()ed because 
702:    the communicator has some outstanding requests on it. Specifically if the 
703:    flag PETSC_HAVE_BROKEN_REQUEST_FREE is set (for IBM MPI implementation). See 
704:    src/vec/utils/vpscat.c. Due to this the memory allocated in PetscCommDuplicate()
705:    is never freed as it should be. Thus one may obtain messages of the form
706:    [ 1] 8 bytes PetscCommDuplicate() line 645 in src/sys/src/mpiu.c indicating the
707:    memory was not freed.

709: */
710:   PetscClearMalloc();
711:   PetscInitializeCalled = PETSC_FALSE;
712:   PetscFunctionReturn(ierr);
713: }

715: /*
716:      These may be used in code that ADIC is to be used on
717: */

721: /*@C
722:       PetscGlobalMax - Computes the maximum value over several processors

724:      Collective on MPI_Comm

726:    Input Parameters:
727: +   local - the local value
728: -   comm - the processors that find the maximum

730:    Output Parameter:
731: .   result - the maximum value
732:   
733:    Level: intermediate

735:    Notes:
736:      These functions are to be used inside user functions that are to be processed with 
737:    ADIC. PETSc will automatically provide differentiated versions of these functions

739: .seealso: PetscGlobalMin(), PetscGlobalSum()
740: @*/
741: int PetscGlobalMax(PetscReal* local,PetscReal* result,MPI_Comm comm)
742: {
743:   return MPI_Allreduce(local,result,1,MPIU_REAL,MPI_MAX,comm);
744: }

748: /*@C
749:       PetscGlobalMin - Computes the minimum value over several processors

751:      Collective on MPI_Comm

753:    Input Parameters:
754: +   local - the local value
755: -   comm - the processors that find the minimum

757:    Output Parameter:
758: .   result - the minimum value
759:   
760:    Level: intermediate

762:    Notes:
763:      These functions are to be used inside user functions that are to be processed with 
764:    ADIC. PETSc will automatically provide differentiated versions of these functions

766: .seealso: PetscGlobalMax(), PetscGlobalSum()
767: @*/
768: int PetscGlobalMin(PetscReal* local,PetscReal* result,MPI_Comm comm)
769: {
770:   return MPI_Allreduce(local,result,1,MPIU_REAL,MPI_MIN,comm);
771: }

775: /*@C
776:       PetscGlobalSum - Computes the sum over sever processors

778:      Collective on MPI_Comm

780:    Input Parameters:
781: +   local - the local value
782: -   comm - the processors that find the sum

784:    Output Parameter:
785: .   result - the sum
786:   
787:    Level: intermediate

789:    Notes:
790:      These functions are to be used inside user functions that are to be processed with 
791:    ADIC. PETSc will automatically provide differentiated versions of these functions

793: .seealso: PetscGlobalMin(), PetscGlobalMax()
794: @*/
795: int PetscGlobalSum(PetscScalar* local,PetscScalar* result,MPI_Comm comm)
796: {
797:   return MPI_Allreduce(local,result,1,MPIU_SCALAR,PetscSum_Op,comm);
798: }