Actual source code: da1.c

  1: /*$Id: da1.c,v 1.129 2001/09/07 20:12:17 bsmith Exp $*/

  3: /* 
  4:    Code for manipulating distributed regular 1d arrays in parallel.
  5:    This file was created by Peter Mell   6/30/95    
  6: */

 8:  #include src/dm/da/daimpl.h

 10: #if defined (PETSC_HAVE_AMS)
 11: EXTERN_C_BEGIN
 12: EXTERN int AMSSetFieldBlock_DA(AMS_Memory,char *,Vec);
 13: EXTERN_C_END
 14: #endif

 18: int DAView_1d(DA da,PetscViewer viewer)
 19: {
 20:   int        rank,ierr;
 21:   PetscTruth isascii,isdraw;

 24:   MPI_Comm_rank(da->comm,&rank);

 26:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
 27:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
 28:   if (isascii) {
 29:     PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %d m %d w %d s %d\n",rank,da->M,
 30:                  da->m,da->w,da->s);
 31:     PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %d %d\n",da->xs,da->xe);
 32:     PetscViewerFlush(viewer);
 33:   } else if (isdraw) {
 34:     PetscDraw       draw;
 35:     double     ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
 36:     int        base;
 37:     char       node[10];
 38:     PetscTruth isnull;

 40:     PetscViewerDrawGetDraw(viewer,0,&draw);
 41:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

 43:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 44:     PetscDrawSynchronizedClear(draw);

 46:     /* first processor draws all node lines */
 47:     if (!rank) {
 48:       int xmin_tmp;
 49:       ymin = 0.0; ymax = 0.3;
 50: 
 51:       /* ADIC doesn't like doubles in a for loop */
 52:       for (xmin_tmp =0; xmin_tmp < (int)da->M; xmin_tmp++) {
 53:          PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 54:       }

 56:       xmin = 0.0; xmax = da->M - 1;
 57:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 58:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 59:     }

 61:     PetscDrawSynchronizedFlush(draw);
 62:     PetscDrawPause(draw);

 64:     /* draw my box */
 65:     ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w)  - 1;
 66:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 67:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 68:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 69:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 71:     /* Put in index numbers */
 72:     base = da->base / da->w;
 73:     for (x=xmin; x<=xmax; x++) {
 74:       sprintf(node,"%d",base++);
 75:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 76:     }

 78:     PetscDrawSynchronizedFlush(draw);
 79:     PetscDrawPause(draw);
 80:   } else {
 81:     SETERRQ1(1,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
 82:   }
 83:   return(0);
 84: }

 86: EXTERN int DAPublish_Petsc(PetscObject);

 90: /*@C
 91:    DACreate1d - Creates an object that will manage the communication of  one-dimensional 
 92:    regular array data that is distributed across some processors.

 94:    Collective on MPI_Comm

 96:    Input Parameters:
 97: +  comm - MPI communicator
 98: .  wrap - type of periodicity should the array have, if any. Use 
 99:           either DA_NONPERIODIC or DA_XPERIODIC
100: .  M - global dimension of the array (use -M to indicate that it may be set to a different value 
101:             from the command line with -da_grid_x <M>)
102: .  dof - number of degrees of freedom per node
103: .  lc - array containing number of nodes in the X direction on each processor, 
104:         or PETSC_NULL. If non-null, must be of length as m.
105: -  s - stencil width  

107:    Output Parameter:
108: .  inra - the resulting distributed array object

110:    Options Database Key:
111: +  -da_view - Calls DAView() at the conclusion of DACreate1d()
112: -  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0

114:    Level: beginner

116:    Notes:
117:    If you are having problems with running out of memory than run with the option -da_noao

119:    The array data itself is NOT stored in the DA, it is stored in Vec objects;
120:    The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
121:    and DACreateLocalVector() and calls to VecDuplicate() if more are needed.


124: .keywords: distributed array, create, one-dimensional

126: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(),
127:           DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(),
128:           DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()

130: @*/
131: int DACreate1d(MPI_Comm comm,DAPeriodicType wrap,int M,int dof,int s,int *lc,DA *inra)
132: {
133:   int        rank,size,xs,xe,x,Xs,Xe,ierr,start,end,m;
134:   int        i,*idx,nn,left,refine_x = 2,tM = M;
135:   PetscTruth flg1,flg2;
136:   DA         da;
137:   Vec        local,global;
138:   VecScatter ltog,gtol;
139:   IS         to,from;

143:   *inra = 0;
144: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
145:   DMInitializePackage(PETSC_NULL);
146: #endif

148:   if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %d",dof);
149:   if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %d",s);

151:   PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
152:     if (M < 0) {
153:       tM   = -M;
154:       PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
155:     }
156:     PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DACreate1d",refine_x,&refine_x,PETSC_NULL);
157:   PetscOptionsEnd();
158:   M = tM;

160:   PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
161:   PetscLogObjectCreate(da);
162:   da->bops->publish           = DAPublish_Petsc;
163:   da->ops->createglobalvector = DACreateGlobalVector;
164:   da->ops->getinterpolation   = DAGetInterpolation;
165:   da->ops->getcoloring        = DAGetColoring;
166:   da->ops->getmatrix          = DAGetMatrix;
167:   da->ops->refine             = DARefine;
168:   PetscLogObjectMemory(da,sizeof(struct _p_DA));
169:   da->dim        = 1;
170:   da->interptype = DA_Q1;
171:   da->refine_x   = refine_x;
172:   PetscMalloc(dof*sizeof(char*),&da->fieldname);
173:   PetscMemzero(da->fieldname,dof*sizeof(char*));
174:   MPI_Comm_size(comm,&size);
175:   MPI_Comm_rank(comm,&rank);

177:   m = size;

179:   if (M < m)     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %d %d",m,M);
180:   if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %d %d",M-1,s);

182:   /* 
183:      Determine locally owned region 
184:      xs is the first local node number, x is the number of local nodes 
185:   */
186:   if (!lc) {
187:     PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
188:     PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
189:     if (flg1) {      /* Block Comm type Distribution */
190:       xs = rank*M/m;
191:       x  = (rank + 1)*M/m - xs;
192:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
193:       x = (M + rank)/m;
194:       if (M/m == x) { xs = rank*x; }
195:       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
196:     } else { /* The odd nodes are evenly distributed across the first k nodes */
197:       /* Regular PETSc Distribution */
198:       x = M/m + ((M % m) > rank);
199:       if (rank >= (M % m)) {xs = (rank * (int)(M/m) + M % m);}
200:       else                 {xs = rank * (int)(M/m) + rank;}
201:     }
202:   } else {
203:     x  = lc[rank];
204:     xs = 0;
205:     for (i=0; i<rank; i++) {
206:       xs += lc[i];
207:     }
208:     /* verify that data user provided is consistent */
209:     left = xs;
210:     for (i=rank; i<size; i++) {
211:       left += lc[i];
212:     }
213:     if (left != M) {
214:       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %d %d",left,M);
215:     }
216:   }

218:   /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
219:   x  *= dof;
220:   s  *= dof;  /* NOTE: here change s to be absolute stencil distance */
221:   xs *= dof;
222:   xe = xs + x;

224:   /* determine ghost region */
225:   if (wrap == DA_XPERIODIC) {
226:     Xs = xs - s;
227:     Xe = xe + s;
228:   } else {
229:     if ((xs-s) >= 0)   Xs = xs-s;  else Xs = 0;
230:     if ((xe+s) <= M*dof) Xe = xe+s;  else Xe = M*dof;
231:   }

233:   /* allocate the base parallel and sequential vectors */
234:   da->Nlocal = x;
235:   VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
236:   VecSetBlockSize(global,dof);
237:   da->nlocal = (Xe-Xs);
238:   VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
239:   VecSetBlockSize(local,dof);
240: 
241:   /* Create Local to Global Vector Scatter Context */
242:   /* local to global inserts non-ghost point region into global */
243:   VecGetOwnershipRange(global,&start,&end);
244:   ISCreateStride(comm,x,start,1,&to);
245:   ISCreateStride(comm,x,xs-Xs,1,&from);
246:   VecScatterCreate(local,from,global,to,&ltog);
247:   PetscLogObjectParent(da,to);
248:   PetscLogObjectParent(da,from);
249:   PetscLogObjectParent(da,ltog);
250:   ISDestroy(from);
251:   ISDestroy(to);

253:   /* Create Global to Local Vector Scatter Context */
254:   /* global to local must retrieve ghost points */
255:   ISCreateStride(comm,(Xe-Xs),0,1,&to);
256: 
257:   PetscMalloc((x+2*s)*sizeof(int),&idx);
258:   PetscLogObjectMemory(da,(x+2*s)*sizeof(int));

260:   nn = 0;
261:   if (wrap == DA_XPERIODIC) {    /* Handle all cases with wrap first */

263:     for (i=0; i<s; i++) {  /* Left ghost points */
264:       if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
265:       else             { idx[nn++] = M*dof+(xs-s+i);}
266:     }

268:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */
269: 
270:     for (i=0; i<s; i++) { /* Right ghost points */
271:       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
272:       else            { idx [nn++] = (xe+i) - M*dof;}
273:     }
274:   } else {      /* Now do all cases with no wrapping */

276:     if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
277:     else         {for (i=0; i<xs;  i++) {idx[nn++] = i;}}

279:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}
280: 
281:     if ((xe+s)<=M*dof) {for (i=0;  i<s;     i++) {idx[nn++]=xe+i;}}
282:     else             {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;   }}
283:   }

285:   ISCreateGeneral(comm,nn,idx,&from);
286:   VecScatterCreate(global,from,local,to,&gtol);
287:   PetscLogObjectParent(da,to);
288:   PetscLogObjectParent(da,from);
289:   PetscLogObjectParent(da,gtol);
290:   ISDestroy(to);
291:   ISDestroy(from);
292:   VecDestroy(local);
293:   VecDestroy(global);

295:   da->M  = M;  da->N  = 1;  da->m  = m; da->n = 1;
296:   da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
297:   da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
298:   da->P  = 1;  da->p  = 1;  da->w = dof; da->s = s/dof;

300:   da->gtol         = gtol;
301:   da->ltog         = ltog;
302:   da->idx          = idx;
303:   da->Nl           = nn;
304:   da->base         = xs;
305:   da->ops->view    = DAView_1d;
306:   da->wrap         = wrap;
307:   da->stencil_type = DA_STENCIL_STAR;

309:   /* 
310:      Set the local to global ordering in the global vector, this allows use
311:      of VecSetValuesLocal().
312:   */
313:   ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
314:   ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
315:   PetscLogObjectParent(da,da->ltogmap);

317:   da->ltol = PETSC_NULL;
318:   da->ao   = PETSC_NULL;

320:   PetscOptionsHasName(PETSC_NULL,"-da_view",&flg1);
321:   if (flg1) {DAView(da,PETSC_VIEWER_STDOUT_(da->comm));}
322:   PetscOptionsHasName(PETSC_NULL,"-da_view_draw",&flg1);
323:   if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(da->comm));}
324:   PetscOptionsHasName(PETSC_NULL,"-help",&flg1);
325:   if (flg1) {DAPrintHelp(da);}
326:   *inra = da;
327:   PetscPublishAll(da);
328:   return(0);
329: }