Actual source code: pipeline.c

  1: /*$Id: pipeline.c,v 1.29 2001/09/07 20:08:55 bsmith Exp $*/

  3: /*
  4:        Vector pipeline routines. These routines have all been contributed
  5:     by Victor Eijkhout while working at UCLA and UTK. 

  7:        Note: these are not completely PETScified. For example the PETSCHEADER in the 
  8:    pipeline object is not completely filled in so that the pipeline object cannot 
  9:    be used as a TRUE PETSc object. Also there are some memory leaks. This code 
 10:    attempts to reuse some of the VecScatter internal code and thus is kind of tricky.

 12: */

 14:  #include vecimpl.h
 15:  #include petscsys.h
 16:  #include src/mat/impls/aij/mpi/mpiaij.h

 18: typedef int (*PipelineFunction)(int,PetscObject);

 20: struct _p_VecPipeline {
 21:   PETSCHEADER(int)
 22:   VecScatter             scatter;
 23:   PipelineType           pipe_type; /* duplicated in the subdomain structure */
 24:   VecScatter_MPI_General *upto,*upfrom,*dnto,*dnfrom;
 25:   VecScatter_MPI_General *scatterto,*scatterfrom;
 26:   PipelineFunction       upfn,dnfn;
 27:   PetscObject            aux_data;
 28:   PetscObject            custom_pipe_data;
 29:   int                    setupcalled;
 30:   int                    (*setup)(VecPipeline,PetscObject,PetscObject*);
 31: };

 35: static int VecPipelineCreateUpDown(VecScatter scatter,VecScatter_MPI_General **to,VecScatter_MPI_General **from)
 36: {
 37:   VecScatter_MPI_General *gen_to,*gen_from,*pipe_to,*pipe_from;
 38:   int                    ierr;

 41:   gen_to    = (VecScatter_MPI_General*)scatter->todata;
 42:   gen_from  = (VecScatter_MPI_General*)scatter->fromdata;

 44:   PetscNew(VecScatter_MPI_General,&pipe_to);
 45:   PetscMemzero(pipe_to,sizeof(VecScatter_MPI_General));
 46:   PetscNew(VecScatter_MPI_General,&pipe_from);
 47:   PetscMemzero(pipe_from,sizeof(VecScatter_MPI_General));

 49:   pipe_to->requests   = gen_to->requests;
 50:   pipe_from->requests = gen_from->requests;
 51:   pipe_to->local      = gen_to->local;
 52:   pipe_from->local    = gen_from->local;
 53:   pipe_to->values     = gen_to->values;
 54:   pipe_from->values   = gen_from->values;
 55:   PetscMalloc((gen_to->n+1)*sizeof(MPI_Status),&pipe_to->sstatus);
 56:   PetscMalloc((gen_from->n+1)*sizeof(MPI_Status),&pipe_from->sstatus);
 57: 
 58:   *to   = pipe_to;
 59:   *from = pipe_from;

 61:   return(0);
 62: }

 64: /* --------------------------------------------------------------*/
 65: /*C
 66:    VecPipelineCreate - Creates a vector pipeline context.
 67: */
 70: int VecPipelineCreate(MPI_Comm comm,Vec xin,IS ix,Vec yin,IS iy,VecPipeline *newctx)
 71: {
 72:   VecPipeline ctx;
 73:   int         ierr;

 76:   PetscNew(struct _p_VecPipeline,&ctx);
 77:   PetscMemzero(ctx,sizeof(struct _p_VecPipeline));
 78:   ctx->comm = comm;
 79:   VecScatterCreate(xin,ix,yin,iy,&(ctx->scatter));
 80:   VecPipelineSetType(ctx,PIPELINE_SEQUENTIAL,PETSC_NULL);
 81:   ctx->setupcalled = 0;
 82:   ctx->upfn        = 0;
 83:   ctx->dnfn        = 0;

 85:   /*
 86:       Keep a copy of the original scatter fields so we can destroy them later
 87:      when the pipeline is destroyed
 88:   */
 89:   ctx->scatterto   = (VecScatter_MPI_General *)ctx->scatter->todata;
 90:   ctx->scatterfrom = (VecScatter_MPI_General *)ctx->scatter->fromdata;

 92:   VecPipelineCreateUpDown(ctx->scatter,&(ctx->upto),&(ctx->upfrom));
 93:   VecPipelineCreateUpDown(ctx->scatter,&(ctx->dnto),&(ctx->dnfrom));

 95:   *newctx = ctx;

 97:   return(0);
 98: }

102: static int VecPipelineSetupSelect(VecScatter_MPI_General *gen,VecScatter_MPI_General *pipe,
103:                                   int (*test)(int,PetscObject),PetscObject pipe_data)
104: {
105:   int ierr,i;

108:   pipe->n = 0;
109:   for (i=0; i<gen->n; i++) {
110:     if ((*test)(gen->procs[i],pipe_data)) {
111:       pipe->n++;
112:     }
113:   }
114: 
115:   PetscMalloc((pipe->n+1)*sizeof(int),&pipe->procs);
116:   PetscMalloc((pipe->n+1)*sizeof(int),&pipe->starts);
117:   {
118:     int pipe_size = 1;
119:     if (gen->n) pipe_size = gen->starts[gen->n]+1;
120:     PetscMalloc(pipe_size*sizeof(int),&pipe->indices);
121:   }
122:   {
123:     int *starts = gen->starts,*pstarts = pipe->starts;
124:     int *procs = gen->procs,*pprocs = pipe->procs;
125:     int *indices = gen->indices,*pindices = pipe->indices;
126:     int n = 0;
127: 
128:     pstarts[0]=0;
129:     for (i=0; i<gen->n; i++) {
130:       if ((*test)(gen->procs[i],pipe_data)) {
131:         int j;
132:         pprocs[n] = procs[i];
133:         pstarts[n+1] = pstarts[n]+ starts[i+1]-starts[i];
134:         for (j=0; j<pstarts[n+1]-pstarts[n]; j++) {
135:           pindices[pstarts[n]+j] = indices[starts[i]+j];
136:         }
137:         n++;
138:       }
139:     }
140:   }

142:   return(0);
143: }

145: /*C
146:    VecPipelineSetup - Sets up a vector pipeline context.
147:    This call is done implicitly in VecPipelineBegin, but
148:    since it is a bit costly, you may want to do it explicitly
149:    when timing.
150: */
153: int VecPipelineSetup(VecPipeline ctx)
154: {
155:   VecScatter_MPI_General *gen_to,*gen_from;
156:   int                    ierr;

159:   if (ctx->setupcalled) return(0);

161:   (ctx->setup)(ctx,ctx->aux_data,&(ctx->custom_pipe_data));

163:   gen_to    = (VecScatter_MPI_General*)ctx->scatter->todata;
164:   gen_from  = (VecScatter_MPI_General*)ctx->scatter->fromdata;

166:   /* data for PIPELINE_UP */
167:   VecPipelineSetupSelect(gen_to,ctx->upto,ctx->upfn,ctx->custom_pipe_data);
168:   VecPipelineSetupSelect(gen_from,ctx->upfrom,ctx->dnfn,ctx->custom_pipe_data);

170:   /* data for PIPELINE_DOWN */
171:   VecPipelineSetupSelect(gen_to,ctx->dnto,ctx->dnfn,ctx->custom_pipe_data);
172:   VecPipelineSetupSelect(gen_from,ctx->dnfrom,ctx->upfn,ctx->custom_pipe_data);

174:   ctx->setupcalled = 1;
175: 
176:   return(0);
177: }

179: /*
180:    VecPipelineSetType
181: */
182: EXTERN int ProcYes(int proc,PetscObject pipe_info);
183: EXTERN int ProcUp(int proc,PetscObject pipe_info);
184: EXTERN int ProcDown(int proc,PetscObject pipe_info);
185: EXTERN int ProcColorUp(int proc,PetscObject pipe_info);
186: EXTERN int ProcColorDown(int proc,PetscObject pipe_info);
187: EXTERN int PipelineSequentialSetup(VecPipeline,PetscObject,PetscObject*);
188: EXTERN int PipelineRedblackSetup(VecPipeline,PetscObject,PetscObject*);
189: EXTERN int PipelineMulticolorSetup(VecPipeline,PetscObject,PetscObject*);

191: int ProcNo(int proc,PetscObject pipe_info);

195: /*C
196:    VecPipelineSetType - Sets the type of a vector pipeline. Vector
197:    pipelines are to be used as

199:    VecPipelineBegin(<see below for parameters>)
200:    <do useful work with incoming data>
201:    VecPipelineEnd(<see below for paramteres>)

203:    Input Parameters:
204: +  ctx - vector pipeline object
205: +  type - vector pipeline type
206:    Choices currently allowed are 
207:    -- PIPELINE_NONE all processors send and receive simultaneously
208:    -- PIPELINE_SEQUENTIAL processors send and receive in ascending
209:    order of MPI rank
210:    -- PIPELINE_RED_BLACK even numbered processors only send; odd numbered
211:    processors only receive
212:    -- PIPELINE_MULTICOLOR processors are given a color and send/receive
213:    according to ascending color
214: +  x - auxiliary data; for PIPELINE_MULTICOLOR this should be
215:    <(PetscObject)pmat> where pmat is the matrix on which the coloring
216:    is to be based.

218: .seealso: VecPipelineCreate(), VecPipelineBegin(), VecPipelineEnd().
219: */
220: int VecPipelineSetType(VecPipeline ctx,PipelineType type,PetscObject x)
221: {
223:   ctx->pipe_type = type;
224:   ctx->aux_data = x;
225:   if (type==PIPELINE_NONE) {
226:     ctx->upfn = &ProcYes;
227:     ctx->dnfn = &ProcYes;
228:     ctx->setup = &PipelineSequentialSetup;
229:   } else if (type==PIPELINE_SEQUENTIAL) {
230:     ctx->upfn = &ProcUp;
231:     ctx->dnfn = &ProcDown;
232:     ctx->setup = &PipelineSequentialSetup;
233:   } else if (type == PIPELINE_REDBLACK) {
234:     ctx->upfn = &ProcColorUp;
235:     ctx->dnfn = &ProcColorDown;
236:     ctx->setup = &PipelineRedblackSetup;
237:   } else if (type == PIPELINE_MULTICOLOR) {
238:     ctx->upfn = &ProcColorUp;
239:     ctx->dnfn = &ProcColorDown;
240:     ctx->setup = &PipelineMulticolorSetup;
241:   } else {
242:     SETERRQ1(1,"VecPipelineSetType: unknown or not implemented type %d",(int)type);
243:   }

245:   return(0);
246: }

250: /*
251:    VecPipelineBegin - Receive data from processor earlier in
252:    a processor pipeline from one vector to another. 

254: .seealso: VecPipelineEnd.
255: */
256: int VecPipelineBegin(Vec x,Vec y,InsertMode addv,ScatterMode smode,PipelineDirection pmode,VecPipeline ctx)
257: {

261:   if (!ctx->setupcalled) {
262:     VecPipelineSetup(ctx);
263:   }

265:   if (pmode==PIPELINE_UP) {
266:     ctx->scatter->todata = ctx->upto;
267:     ctx->scatter->fromdata = ctx->upfrom;
268:   } else if (pmode==PIPELINE_DOWN) {
269:     ctx->scatter->todata = ctx->dnto;
270:     ctx->scatter->fromdata = ctx->dnfrom;
271:   } else SETERRQ1(1,"VecPipelineBegin: unknown or not implemented pipeline mode %d",pmode);

273:   {
274:     VecScatter             scat = ctx->scatter;
275:     VecScatter_MPI_General *gen_to;
276:     int                    nsends=0;

278:     if (smode & SCATTER_REVERSE){
279:       gen_to   = (VecScatter_MPI_General*)scat->fromdata;
280:     } else {
281:       gen_to   = (VecScatter_MPI_General*)scat->todata;
282:     }
283:     if (ctx->pipe_type != PIPELINE_NONE) {
284:       nsends   = gen_to->n;
285:       gen_to->n = 0;
286:     }
287:     VecScatterBegin(x,y,addv,smode,scat);
288:     VecScatterEnd(x,y,addv,smode,scat);
289:     if (ctx->pipe_type != PIPELINE_NONE) gen_to->n = nsends;
290:   }
291:   return(0);
292: }

296: /*
297:    VecPipelineEnd - Send data to processors later in
298:    a processor pipeline from one vector to another.
299:  
300: .seealso: VecPipelineBegin.
301: */
302: int VecPipelineEnd(Vec x,Vec y,InsertMode addv,ScatterMode smode,PipelineDirection pmode,VecPipeline ctx)
303: {
304:   VecScatter             scat = ctx->scatter;
305:   VecScatter_MPI_General *gen_from,*gen_to;
306:   int                    nsends=0,nrecvs,ierr;
307: 
309:   if (smode & SCATTER_REVERSE){
310:     gen_to   = (VecScatter_MPI_General*)scat->fromdata;
311:     gen_from = (VecScatter_MPI_General*)scat->todata;
312:   } else {
313:     gen_to   = (VecScatter_MPI_General*)scat->todata;
314:     gen_from = (VecScatter_MPI_General*)scat->fromdata;
315:   }
316:   if (ctx->pipe_type == PIPELINE_NONE) {
317:       nsends   = gen_to->n;
318:       gen_to->n = 0;
319:   }
320:   nrecvs      = gen_from->n;
321:   gen_from->n = 0;
322:   VecScatterBegin(x,y,addv,smode,scat);
323:   VecScatterEnd(x,y,addv,smode,scat);
324:   if (ctx->pipe_type == PIPELINE_NONE) gen_to->n = nsends;
325:   gen_from->n = nrecvs;

327:   return(0);
328: }

330: /*
331:       This destroys the material that is allocated inside the 
332:    VecScatter_MPI_General datastructure. Mimics the 
333:    VecScatterDestroy_PtoP() object except it does not destroy 
334:    the wrapper VecScatter object.
335: */

339: static int VecPipelineDestroy_MPI_General(VecScatter_MPI_General *gen)
340: {

344:   if (gen->sstatus) {PetscFree(gen->sstatus);}
345:   PetscFree(gen);
346:   return(0);
347: }

351: /*C
352:    VecPipelineDestroy - Destroys a pipeline context created by 
353:    VecPipelineCreate().

355: */
356: int VecPipelineDestroy(VecPipeline ctx)
357: {

361:   /* free the VecScatter_MPI_General data structures */
362:   if (ctx->upto) {
363:     PetscFree(ctx->upto->procs);
364:     PetscFree(ctx->upto->starts);
365:     PetscFree(ctx->upto->indices);
366:     VecPipelineDestroy_MPI_General(ctx->upto);
367:   }
368:   if (ctx->upfrom) {
369:     PetscFree(ctx->upfrom->procs);
370:     PetscFree(ctx->upfrom->starts);
371:     PetscFree(ctx->upfrom->indices);
372:     VecPipelineDestroy_MPI_General(ctx->upfrom);
373:   }
374:   if (ctx->dnto) {
375:     PetscFree(ctx->dnto->procs);
376:     PetscFree(ctx->dnto->starts);
377:     PetscFree(ctx->dnto->indices);
378:     VecPipelineDestroy_MPI_General(ctx->dnto);
379:   }
380:   if (ctx->dnfrom) {
381:     PetscFree(ctx->dnfrom->procs);
382:     PetscFree(ctx->dnfrom->starts);
383:     PetscFree(ctx->dnfrom->indices);
384:     VecPipelineDestroy_MPI_General(ctx->dnfrom);
385:   }
386:   if (ctx->scatterto) {
387:     PetscFree(ctx->scatterto->values);
388:     if (ctx->scatterto->local.slots) {PetscFree(ctx->scatterto->local.slots);}
389:     VecPipelineDestroy_MPI_General(ctx->scatterto);
390:   }
391:   if (ctx->scatterfrom) {
392:     PetscFree(ctx->scatterfrom->values);
393:     if (ctx->scatterfrom->local.slots) {PetscFree(ctx->scatterfrom->local.slots);}
394:     VecPipelineDestroy_MPI_General(ctx->scatterfrom);
395:   }

397:   if (ctx->custom_pipe_data) {PetscFree(ctx->custom_pipe_data);}
398:   PetscHeaderDestroy(ctx->scatter);
399:   PetscFree(ctx);

401:   return(0);
402: }

404: /* >>>> Routines for sequential ordering of processors <<<< */

406: typedef struct {int rank;} Pipeline_sequential_info;

410: int ProcYes(int proc,PetscObject pipe_info)
411: {
413:   PetscFunctionReturn(1);
414: }
417: int ProcNo(int proc,PetscObject pipe_info)
418: {
420:   return(0);
421: }

425: int ProcUp(int proc,PetscObject pipe_info)
426: {
427:   int rank = ((Pipeline_sequential_info *)pipe_info)->rank;

430:   if (rank<proc) {
431:     PetscFunctionReturn(1);
432:   } else {
433:     return(0);
434:   }
435: }

439: int ProcDown(int proc,PetscObject pipe_info)
440: {
441:   int rank = ((Pipeline_sequential_info *)pipe_info)->rank;

444:   if (rank>proc) {
445:     PetscFunctionReturn(1);
446:   } else {
447:     return(0);
448:   }
449: }

453: int PipelineSequentialSetup(VecPipeline vs,PetscObject x,PetscObject *obj)
454: {
455:   Pipeline_sequential_info *info;
456:   int                      ierr;

459:   PetscNew(Pipeline_sequential_info,&info);
460:   MPI_Comm_rank(vs->scatter->comm,&(info->rank));
461:   *obj = (PetscObject)info;

463:   return(0);
464: }

466: /* >>>> Routines for multicolor ordering of processors <<<< */

468: typedef struct {
469:   int rank,size,*proc_colors;
470: } Pipeline_colored_info;

474: int ProcColorUp(int proc,PetscObject pipe_info)
475: {
476:   Pipeline_colored_info* comm_info = (Pipeline_colored_info*)pipe_info;
477:   int                    rank = comm_info->rank;

480:   if (comm_info->proc_colors[rank]<comm_info->proc_colors[proc]) {
481:     PetscFunctionReturn(1);
482:   } else {
483:     return(0);
484:   }
485: }
488: int ProcColorDown(int proc,PetscObject pipe_info)
489: {
490:   Pipeline_colored_info* comm_info = (Pipeline_colored_info*)pipe_info;
491:   int                    rank = comm_info->rank;

494:   if (comm_info->proc_colors[rank]>comm_info->proc_colors[proc]) {
495:     PetscFunctionReturn(1);
496:   } else {
497:     return(0);
498:   }
499: }

503: int PipelineRedblackSetup(VecPipeline vs,PetscObject x,PetscObject *obj)
504: {
505:   Pipeline_colored_info *info;
506:   int                    size,i,ierr;

509:   PetscNew(Pipeline_colored_info,&info);
510:   MPI_Comm_rank(vs->scatter->comm,&(info->rank));
511:   MPI_Comm_size(vs->scatter->comm,&size);
512:   PetscMalloc(size*sizeof(int),&info->proc_colors);
513:   for (i=0; i<size; i++) {info->proc_colors[i] = i%2;}
514:   *obj = (PetscObject)info;

516:   return(0);
517: }

521: int PipelineMulticolorSetup(VecPipeline vs,PetscObject x,PetscObject *obj)
522: {
523:   Pipeline_colored_info *info;
524:   Mat                    mat = (Mat) x;
525:   int                    size,ierr;

528:   PetscNew(Pipeline_colored_info,&info);
529:   MPI_Comm_rank(mat->comm,&(info->rank));
530:   MPI_Comm_size(mat->comm,&size);
531:   PetscMalloc(size*sizeof(int),&info->proc_colors);
532:   PetscMemzero(info->proc_colors,size*sizeof(int));

534:   /* coloring */
535:   {
536:     Mat_MPIAIJ  *Aij = (Mat_MPIAIJ*)mat->data;
537:     int *owners = Aij->rowners,*touch = Aij->garray;
538:     int ntouch = Aij->B->n;
539:     int *conn,*colr;
540:     int *colors = info->proc_colors,base = info->rank*size;
541:     int p,e;

543:     /* allocate connectivity matrix */
544:     PetscMalloc(size*size*sizeof(int),&conn);
545:     PetscMalloc(size*sizeof(int),&colr);
546:     PetscMemzero(conn,size*size*sizeof(int));

548:     /* fill in local row of connectivity matrix */
549:     p = 0; e = 0;
550:   loop:
551:     while (touch[e]>=owners[p+1]) {
552:       p++;
553: #if defined(PETSC_DEBUG)
554:       if (p>=size) SETERRQ(1,p,"Processor overflow");
555: #endif
556:     }
557:     conn[base+p] = 1;
558:     if (p==size-1) ;
559:     else {
560:       while (touch[e]<owners[p+1]) {
561:         e++;
562:         if (e>=ntouch) goto exit;
563:       }
564:       goto loop;
565:     }
566:   exit:
567:     /* distribute to establish local copies of full connectivity matrix */
568:     MPI_Allgather(conn+base,size,MPI_INT,conn,size,MPI_INT,mat->comm);

570:     base = size;
571:     /* PetscPrintf(mat->comm,"Coloring: 0->0"); */
572:     for (p=1; p<size; p++) {
573:       int q,hi=-1,nc=0;
574:       PetscMemzero(colr,size*sizeof(int));
575:       for (q=0; q<p; q++) { /* inspect colors of all connect previous procs */
576:         if (conn[base+q] /* should be tranposed! */) {
577:           if (!colr[colors[q]]) {
578:             nc++;
579:             colr[colors[q]] = 1;
580:             if (colors[q]>hi) hi = colors[q];
581:           }
582:         }
583:       }
584:       if (hi+1!=nc) {
585:         nc = 0;
586:         while (colr[nc]) nc++;
587:       }
588:       colors[p] = nc;
589:       /* PetscPrintf(mat->comm,",%d->%d",p,colors[p]); */
590:       base = base+size;
591:     }
592:     /* PetscPrintf(mat->comm,"\n"); */
593:     PetscFree(conn);
594:     PetscFree(colr);
595:   }
596:   *obj = (PetscObject)info;

598:   return(0);
599: }

603: int VecPipelineView(VecPipeline pipe,PetscViewer viewer)
604: {
605:   MPI_Comm comm = pipe->comm;
606:   int      ierr;

609:   PetscPrintf(comm,">> Vector Pipeline<<\n");
610:   if (!pipe->setupcalled) {PetscPrintf(comm,"Not yet set up\n");}
611:   PetscPrintf(comm,"Pipelinetype: %d\n",(int)pipe->pipe_type);
612:   PetscPrintf(comm,"based on scatter:\n");
613:   /*  VecScatterView(pipe->scatter,viewer);*/
614:   PetscPrintf(comm,"Up function %p\n",pipe->upfn);
615:   PetscPrintf(comm,"Dn function %p\n",pipe->dnfn);

617:   return(0);
618: }